blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4c7231a8e14c10703fc26a8640339469a4b4e25f | 8b3bc4efea5663b356acbabec231d1d647891805 | /835/Solution.py | 64c84a730e0de88937de14c75b1eadc26c283e47 | [] | no_license | FawneLu/leetcode | 9a982b97122074d3a8488adec2039b67e709af08 | 03020fb9b721a1c345e32bbe04f9b2189bfc3ac7 | refs/heads/master | 2021-06-18T20:13:34.108057 | 2021-03-03T05:14:13 | 2021-03-03T05:14:13 | 177,454,524 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | ```python
class Solution:
def largestOverlap(self, A: List[List[int]], B: List[List[int]]) -> int:
N=len(A)
LA=[(xi,yi) for xi in range(N) for yi in range(N) if A[xi][yi]]
LB=[(xi,yi) for xi in range(N) for yi in range(N) if B[xi][yi]]
d=collections.Counter([(x1-x2,y1-y2) for (x1,y1) in LA for (x2,y2) in LB])
return max(d.values() or [0])
``` | [
"tracylu1996@gmail.com"
] | tracylu1996@gmail.com |
3e6fd81d4b9238c22cf01b6edbd2d6b1dcd44b92 | 17886364a482cafdbe376cc96af80bafd824238a | /ci.local | c525088256bf8f92f458386de199f84f76414cc3 | [
"Apache-2.0"
] | permissive | theishshah/xi-editor | 75ed0cbf06e6f255ecfb3625a5d2d0f15c36db93 | bbc7db09fbba2e35cf9e4b970b6d1b3337a95053 | refs/heads/master | 2020-03-29T07:53:04.174730 | 2018-09-19T21:18:01 | 2018-09-20T19:40:40 | 149,683,581 | 0 | 0 | Apache-2.0 | 2018-09-20T23:42:14 | 2018-09-20T23:42:14 | null | UTF-8 | Python | false | false | 1,633 | local | #!/usr/bin/env python
import glob
import os
import signal
import subprocess
import sys
import tempfile
from multiprocessing import cpu_count, Pool
SCRIPT_DIR = os.path.dirname(__file__)
MANIFESTS = [os.path.join(SCRIPT_DIR, "rust", "Cargo.toml")] + glob.glob(os.path.join(SCRIPT_DIR, "rust", "*", "Cargo.toml"))
def run_test(manifest):
print >> sys.stderr, "Starting test {}".format(manifest)
with tempfile.TemporaryFile("rw+b", prefix="xi-test", suffix=".log") as log:
result = subprocess.call(
["cargo", "test", "--manifest-path", manifest], stdout=log,
stderr=subprocess.STDOUT)
test_output = None
if result != 0:
log.seek(0)
test_output = log.read()
return (manifest, result, test_output)
def run_tests(test_pool, manifests):
results = test_pool.imap_unordered(run_test, MANIFESTS, chunksize=1)
for manifest, result, test_output in results:
relative_manifest = os.path.relpath(manifest, SCRIPT_DIR)
if result == 0:
print >> sys.stderr, "Test for {} passed".format(relative_manifest)
else:
print >> sys.stderr, "Test for {} failed with exit code {}".format(
manifest, result)
if test_output:
print >> sys.stderr, test_output
if __name__ == "__main__":
original_sigint_handler = signal.signal(signal.SIGINT, signal.SIG_IGN)
test_pool = Pool(cpu_count())
signal.signal(signal.SIGINT, original_sigint_handler)
try:
run_tests(test_pool, MANIFESTS)
except KeyboardInterrupt:
print >> sys.stderr, "Terminating tests"
test_pool.terminate()
raise
else:
test_pool.close()
finally:
test_pool.join()
| [
"vlovich@google.com"
] | vlovich@google.com |
082cf2b3d96dbd75f123de59ed7b96b84bacd95e | f18d14f21c9a06162804751a3fcc01c86e8e3fc8 | /onnx_chainer/functions/activation/leaky_relu.py | e83ff2b91ea2f286788675cbe39170a4101c5085 | [
"MIT"
] | permissive | Hakuyume/onnx-chainer | 8d6cc61a06ada52aba73b3bf526cd16c4e277a15 | 3c46bd692ef38a7c0f45a2a09795d2023364e12b | refs/heads/master | 2020-03-22T05:19:12.619953 | 2018-04-26T04:59:56 | 2018-04-26T05:12:13 | 139,557,461 | 0 | 0 | MIT | 2018-07-03T09:08:18 | 2018-07-03T09:08:17 | null | UTF-8 | Python | false | false | 508 | py | from onnx import helper
from onnx_chainer import mapping
def convert_LeakyReLU(
func, input_names, param_names, parameters, input_tensors):
for i, input_name in enumerate(input_names):
if type(input_name) is not str:
input_names[i] = str(input_name)
layer_name = mapping.operators[func.__class__.__name__]
out_names = [str(id(out())) for out in func.outputs]
return helper.make_node(
layer_name, input_names, out_names,
alpha=func.slope
),
| [
"shunta.saito@gmail.com"
] | shunta.saito@gmail.com |
330f23f0483991fcec12cce2a2bc68f2a98d9c82 | 8717bc60344a2efbe731fe236846b7fd403f2b2d | /Loops/loop1.py | d76e2864ea158a083ce66f86180ece21abc1a121 | [] | no_license | intro-programming/Class-Examples | b02413ecf000d76bf51ee81fd93c4a74d590aa97 | 016ee06489b1f6e2d85da7f87fe4fe4fa7e65996 | refs/heads/master | 2020-04-09T15:51:10.074986 | 2018-12-20T02:34:37 | 2018-12-20T02:34:37 | 160,437,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | # Doing something a number of times in a row
for looper in [7,8,2,1]:
print "Hello World" | [
"denris97@gmail.com"
] | denris97@gmail.com |
a7a7dbe76a16863f1314ea9878fc13649cab1508 | 099ca663d2e6c1ddbce550e7e8e90310f084101c | /SkinLesionPrediction/urls.py | 1c2fc06e844e20b2971fed1bd0888d9b19da3523 | [] | no_license | hassan-rehan/skin_lesion_prediction | c3916ed8cffc2fd6bf948b37495964c34b2e65eb | 32cd6c488bfcbcd098838c805721a8a5a06fc7ed | refs/heads/main | 2023-08-18T20:43:49.117903 | 2021-02-21T14:13:14 | 2021-02-21T14:13:14 | 340,915,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | """SkinLesionPrediction URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path,include
from django.contrib import admin
from django.contrib.staticfiles.urls import static
from django.conf import settings
from messaging import views as messaging_views
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
urlpatterns = [
path('admin/',admin.site.urls),
path('',include('landing.urls')),
path('signup/',include('signup.urls')),
path('signin/',include('signin.urls')),
path('patient/<int:id>/',include('patientHome.urls')),
path('doctor/<int:id>/',include('doctorHome.urls')),
path('patient/<int:id>/models/skin-lesion/',include('skinLesionModel.urls')),
path('fcm/',include('messaging.urls')),
path('firebase-messaging-sw.js',messaging_views.FM_js)
]
urlpatterns += staticfiles_urlpatterns()
urlpatterns+=static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"jutthassan30@gmail.com"
] | jutthassan30@gmail.com |
94a5b614b8befada3ca809196db00f567a8a79b6 | 4dedd946a44ee62ab6a93db76e2ce3bf42deaad1 | /test/turian_crf_with_embeddings/file.py | 28899fd585bb29abe98b03cf69d6588a9a780835 | [] | no_license | lqrz/medical_information_extraction | 5cde8edc7d72a3e597f8ae60307f3b4e7372556b | d092af57ea8d6202acf55fc6204f7ab19b1be030 | refs/heads/master | 2021-03-27T12:02:59.410557 | 2017-03-07T09:47:08 | 2017-03-07T09:47:08 | 53,079,784 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,314 | py | import os.path
import gzip
try:
import bz2
except:
import sys
print >> sys.stderr, "COULD NOT IMPORT bz2 !"
import os, os.path, sys
def myopen(filename, mode="r", bufsize=-1):
"""
open(), detecting .gz and .bz2 file suffixes
"""
filename = os.path.expanduser(filename)
if filename[-3:] == ".gz":
if mode == "r" or mode == "rt": mode = "rb"
elif mode == "w" or mode == "wt": mode = "wb"
return gzip.open(filename, mode, bufsize)
elif filename[-4:] == ".bz2":
if mode == "r" or mode == "rt": mode = "rb"
elif mode == "w" or mode == "wt": mode = "wb"
return bz2.open(filename, mode, bufsize)
else:
return open(filename, mode, bufsize)
def find_files(dir, shuffle=False):
"""
Find all files in dir by recursively directory walking.
@param shuffle: Randomly shuffle the files before returning them.
"""
all = []
assert os.path.isdir(dir)
for root, dirs, files in os.walk(dir):
#sys.stderr.write("Walking %s...\n" % root)
for f in files:
all.append(os.path.join(root, f))
if shuffle:
import random
random.shuffle(all)
return all
def ensuredir(dir):
"""
Create dir if it does not exist (including all parents).
Do nothing if it does.
"""
if not os.path.exists(dir):
sys.stderr.write("Creating directory: %s\n" % dir)
os.makedirs(dir)
assert os.path.isdir(dir)
def ascend_find_help(name, dir):
"""
Crawl up the directory hierarchy from dir towards root, and return
the first time we find a file 'name'.
"""
p = os.path.join(dir, name)
if os.path.exists(p): return p
updir = os.path.dirname(dir)
assert updir != dir
return ascend_find(name, dir=updir)
def ascend_find(name, dir=os.getcwd()):
"""
Crawl up the directory hierarchy from dir towards root, and return
the first time we find a file 'name'.
Try first from os.getcwd, and then from os.dirname(sys.argv[0])
"""
try:
return ascend_find_help(name, dir)
except:
print >> sys.stderr, "WARNING: ascend_find(%s, %s) failed. Trying ascend_find(%s, %s)." % (name, dir, name, os.path.dirname(sys.argv[0]))
return ascend_find_help(name, os.path.dirname(sys.argv[0]))
| [
"lautaro.quiroz@gmail.com"
] | lautaro.quiroz@gmail.com |
4c82162d32b614c2ee03bdacc86b398d7b43afa8 | f35bb610514e0e8cc954303f41d8b675a67c4d12 | /test.py | 11284de57a06b4e03fc90a0823ea8e8f900ac446 | [
"MIT"
] | permissive | pwaller/inf | d9a2aad35c7c8c6f9bdefb38f8d508f21c7e2846 | 2af6d7c33fb33470a6bcd7b555d01618839ff77b | refs/heads/master | 2020-04-12T17:03:47.066841 | 2018-03-12T16:38:42 | 2018-03-12T16:43:46 | 13,465,281 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | # Because infinity needs testing.
from nose.tools import (raises, assert_equal)
import inf
@raises(ZeroDivisionError)
def test_infdiv_zero_zero():
inf.div(0.0, 0.0)
def test_infdiv_normal():
cases = [(1.0, 2.0, 0.5),
(0.0, 2.0, 0.0),
(1, 2, 0.5),
(0, 2, 0.0)]
def one(p, q, expected):
assert_equal(inf.div(p, q), expected)
for p, q, expected in cases:
yield one, p, q, expected
def test_infdiv_infinity():
cases = [
(1.0, 0.0, inf),
(-1.0, 0.0, -inf),
(-1.0, -0.0, inf),
(1.0, -0.0, -inf),
]
def one(p, q, expected):
assert_equal(inf.div(p, q), expected)
for p, q, expected in cases:
yield one, p, q, expected
| [
"peter@scraperwiki.com"
] | peter@scraperwiki.com |
47c1964ce2e1dd26a07d811460d0f7448641d484 | 6181fcd4a266d963a0ee85971768c97922ca77cd | /src/garage/tf/baselines/gaussian_cnn_baseline.py | 20fd54724e8593a6be0dcd34f639c3b50ebd710a | [
"MIT"
] | permissive | rlworkgroup/garage | 5d215bbecb3a4e74b504988d6684a7b04df69a80 | 2d594803636e341660cab0e81343abbe9a325353 | refs/heads/master | 2023-08-21T22:58:49.338034 | 2023-01-04T06:06:27 | 2023-01-04T06:06:27 | 136,846,372 | 1,832 | 363 | MIT | 2023-09-11T11:36:40 | 2018-06-10T21:31:23 | Python | UTF-8 | Python | false | false | 17,048 | py | """A baseline based on a GaussianCNN model."""
import akro
from dowel import tabular
import numpy as np
import tensorflow as tf
from garage import make_optimizer
from garage.experiment import deterministic
from garage.np.baselines.baseline import Baseline
from garage.tf import compile_function
from garage.tf.baselines.gaussian_cnn_baseline_model import (
GaussianCNNBaselineModel)
from garage.tf.optimizers import LBFGSOptimizer, PenaltyLBFGSOptimizer
# pylint: disable=too-many-ancestors
class GaussianCNNBaseline(GaussianCNNBaselineModel, Baseline):
"""Fits a Gaussian distribution to the outputs of a CNN.
Args:
env_spec (garage.envs.env_spec.EnvSpec): Environment specification.
filters (Tuple[Tuple[int, Tuple[int, int]], ...]): Number and dimension
of filters. For example, ((3, (3, 5)), (32, (3, 3))) means there
are two convolutional layers. The filter for the first layer have 3
channels and its shape is (3 x 5), while the filter for the second
layer have 32 channels and its shape is (3 x 3).
strides(tuple[int]): The stride of the sliding window. For example,
(1, 2) means there are two convolutional layers. The stride of the
filter for first layer is 1 and that of the second layer is 2.
padding (str): The type of padding algorithm to use,
either 'SAME' or 'VALID'.
name (str): Model name, also the variable scope.
hidden_sizes (list[int]): Output dimension of dense layer(s) for
the Convolutional model for mean. For example, (32, 32) means the
network consists of two dense layers, each with 32 hidden units.
hidden_nonlinearity (Callable): Activation function for intermediate
dense layer(s). It should return a tf.Tensor. Set it to
None to maintain a linear activation.
hidden_w_init (Callable): Initializer function for the weight
of intermediate dense layer(s). The function should return a
tf.Tensor.
hidden_b_init (Callable): Initializer function for the bias
of intermediate dense layer(s). The function should return a
tf.Tensor.
output_nonlinearity (Callable): Activation function for output dense
layer. It should return a tf.Tensor. Set it to None to
maintain a linear activation.
output_w_init (Callable): Initializer function for the weight
of output dense layer(s). The function should return a
tf.Tensor.
output_b_init (Callable): Initializer function for the bias
of output dense layer(s). The function should return a
tf.Tensor.
name (str): Name of this model (also used as its scope).
learn_std (bool): Whether to train the standard deviation parameter of
the Gaussian distribution.
init_std (float): Initial standard deviation for the Gaussian
distribution.
adaptive_std (bool): Whether to use a neural network to learn the
standard deviation of the Gaussian distribution. Unless True, the
standard deviation is learned as a parameter which is not
conditioned on the inputs.
std_share_network (bool): Boolean for whether the mean and standard
deviation models share a CNN network. If True, each is a head from
a single body network. Otherwise, the parameters are estimated
using the outputs of two indepedent networks.
std_filters (Tuple[Tuple[int, Tuple[int, int]], ...]): Number and
dimension of filters. For example, ((3, (3, 5)), (32, (3, 3)))
means there are two convolutional layers. The filter for the first
layer have 3 channels and its shape is (3 x 5), while the filter
for the second layer have 32 channels and its shape is (3 x 3).
std_strides(tuple[int]): The stride of the sliding window. For example,
(1, 2) means there are two convolutional layers. The stride of the
filter for first layer is 1 and that of the second layer is 2.
std_padding (str): The type of padding algorithm to use in std network,
either 'SAME' or 'VALID'.
std_hidden_sizes (list[int]): Output dimension of dense layer(s) for
the Conv for std. For example, (32, 32) means the Conv consists
of two hidden layers, each with 32 hidden units.
std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer
in the std network.
std_output_nonlinearity (Callable): Activation function for output
dense layer in the std network. It should return a tf.Tensor. Set
it to None to maintain a linear activation.
layer_normalization (bool): Bool for using layer normalization or not.
normalize_inputs (bool): Bool for normalizing inputs or not.
normalize_outputs (bool): Bool for normalizing outputs or not.
subsample_factor (float): The factor to subsample the data. By default
it is 1.0, which means using all the data.
optimizer (garage.tf.Optimizer): Optimizer used for fitting the model.
optimizer_args (dict): Arguments for the optimizer. Default is None,
which means no arguments.
use_trust_region (bool): Whether to use a KL-divergence constraint.
max_kl_step (float): KL divergence constraint for each iteration, if
`use_trust_region` is active.
"""
def __init__(self,
env_spec,
filters,
strides,
padding,
hidden_sizes,
hidden_nonlinearity=tf.nn.tanh,
hidden_w_init=tf.initializers.glorot_uniform(
seed=deterministic.get_tf_seed_stream()),
hidden_b_init=tf.zeros_initializer(),
output_nonlinearity=None,
output_w_init=tf.initializers.glorot_uniform(
seed=deterministic.get_tf_seed_stream()),
output_b_init=tf.zeros_initializer(),
name='GaussianCNNBaseline',
learn_std=True,
init_std=1.0,
adaptive_std=False,
std_share_network=False,
std_filters=(),
std_strides=(),
std_padding='SAME',
std_hidden_sizes=(),
std_hidden_nonlinearity=None,
std_output_nonlinearity=None,
layer_normalization=False,
normalize_inputs=True,
normalize_outputs=True,
subsample_factor=1.,
optimizer=None,
optimizer_args=None,
use_trust_region=True,
max_kl_step=0.01):
if not isinstance(env_spec.observation_space, akro.Box) or \
not len(env_spec.observation_space.shape) in (2, 3):
raise ValueError(
'{} can only process 2D, 3D akro.Image or'
' akro.Box observations, but received an env_spec with '
'observation_space of type {} and shape {}'.format(
type(self).__name__,
type(env_spec.observation_space).__name__,
env_spec.observation_space.shape))
self._env_spec = env_spec
self._use_trust_region = use_trust_region
self._subsample_factor = subsample_factor
self._max_kl_step = max_kl_step
self._normalize_inputs = normalize_inputs
self._normalize_outputs = normalize_outputs
if optimizer_args is None:
optimizer_args = dict()
if optimizer is None:
if use_trust_region:
self._optimizer = make_optimizer(PenaltyLBFGSOptimizer,
**optimizer_args)
else:
self._optimizer = make_optimizer(LBFGSOptimizer,
**optimizer_args)
else:
self._optimizer = make_optimizer(optimizer, **optimizer_args)
super().__init__(input_dim=env_spec.observation_space.shape,
output_dim=1,
filters=filters,
strides=strides,
padding=padding,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
hidden_w_init=hidden_w_init,
hidden_b_init=hidden_b_init,
output_nonlinearity=output_nonlinearity,
output_w_init=output_w_init,
output_b_init=output_b_init,
learn_std=learn_std,
adaptive_std=adaptive_std,
std_share_network=std_share_network,
init_std=init_std,
min_std=None,
max_std=None,
std_filters=std_filters,
std_strides=std_strides,
std_padding=std_padding,
std_hidden_sizes=std_hidden_sizes,
std_hidden_nonlinearity=std_hidden_nonlinearity,
std_output_nonlinearity=std_output_nonlinearity,
std_parameterization='exp',
layer_normalization=layer_normalization,
name=name)
# model for old distribution, used when trusted region is on
self._old_model = self.clone_model(name=name + '_old_model')
self._old_network = None
self._x_mean = None
self._x_std = None
self._y_mean = None
self._y_std = None
self._initialize()
def _initialize(self):
input_var = tf.compat.v1.placeholder(tf.float32,
shape=(None, ) +
self._input_shape)
if isinstance(self.env_spec.observation_space, akro.Image):
input_var = tf.cast(input_var, tf.float32) / 255.0
ys_var = tf.compat.v1.placeholder(dtype=tf.float32,
name='ys',
shape=(None, self._output_dim))
self._old_network = self._old_model.build(input_var)
(_, _, norm_dist, norm_mean, norm_log_std, _, mean, _, self._x_mean,
self._x_std, self._y_mean,
self._y_std) = self.build(input_var).outputs
normalized_ys_var = (ys_var - self._y_mean) / self._y_std
old_normalized_dist = self._old_network.normalized_dist
mean_k1 = tf.reduce_mean(old_normalized_dist.kl_divergence(norm_dist))
loss = -tf.reduce_mean(norm_dist.log_prob(normalized_ys_var))
self._f_predict = compile_function([input_var], mean)
optimizer_args = dict(
loss=loss,
target=self,
network_outputs=[norm_mean, norm_log_std],
)
if self._use_trust_region:
optimizer_args['leq_constraint'] = (mean_k1, self._max_kl_step)
optimizer_args['inputs'] = [input_var, ys_var]
else:
optimizer_args['inputs'] = [input_var, ys_var]
with tf.name_scope('update_opt'):
self._optimizer.update_opt(**optimizer_args)
def fit(self, paths):
"""Fit regressor based on paths.
Args:
paths (dict[numpy.ndarray]): Sample paths.
"""
xs = np.concatenate([p['observations'] for p in paths])
if isinstance(self._env_spec.observation_space, akro.Image) and \
len(xs[0].shape) < \
len(self._env_spec.observation_space.shape):
xs = self._env_spec.observation_space.unflatten_n(xs)
ys = np.concatenate([p['returns'] for p in paths])
ys = ys.reshape((-1, 1))
if self._subsample_factor < 1:
num_samples_tot = xs.shape[0]
idx = np.random.randint(
0, num_samples_tot,
int(num_samples_tot * self._subsample_factor))
xs, ys = xs[idx], ys[idx]
if self._normalize_inputs:
# recompute normalizing constants for inputs
self._x_mean.load(np.mean(xs, axis=0, keepdims=True))
self._x_std.load(np.std(xs, axis=0, keepdims=True) + 1e-8)
self._old_network.x_mean.load(np.mean(xs, axis=0, keepdims=True))
self._old_network.x_std.load(
np.std(xs, axis=0, keepdims=True) + 1e-8)
if self._normalize_outputs:
# recompute normalizing constants for outputs
self._y_mean.load(np.mean(ys, axis=0, keepdims=True))
self._y_std.load(np.std(ys, axis=0, keepdims=True) + 1e-8)
self._old_network.y_mean.load(np.mean(ys, axis=0, keepdims=True))
self._old_network.y_std.load(
np.std(ys, axis=0, keepdims=True) + 1e-8)
inputs = [xs, ys]
loss_before = self._optimizer.loss(inputs)
tabular.record('{}/LossBefore'.format(self._name), loss_before)
self._optimizer.optimize(inputs)
loss_after = self._optimizer.loss(inputs)
tabular.record('{}/LossAfter'.format(self._name), loss_after)
if self._use_trust_region:
tabular.record('{}/MeanKL'.format(self._name),
self._optimizer.constraint_val(inputs))
tabular.record('{}/dLoss'.format(self._name), loss_before - loss_after)
self._old_model.parameters = self.parameters
def predict(self, paths):
"""Predict ys based on input xs.
Args:
paths (dict[numpy.ndarray]): Sample paths.
Return:
numpy.ndarray: The predicted ys.
"""
xs = paths['observations']
if isinstance(self._env_spec.observation_space, akro.Image) and \
len(xs[0].shape) < \
len(self._env_spec.observation_space.shape):
xs = self._env_spec.observation_space.unflatten_n(xs)
return self._f_predict(xs).flatten()
def clone_model(self, name):
"""Return a clone of the GaussianCNNBaselineModel.
It copies the configuration of the primitive and also the parameters.
Args:
name (str): Name of the newly created model. It has to be
different from source policy if cloned under the same
computational graph.
Returns:
garage.tf.baselines.GaussianCNNBaselineModel: Newly cloned model.
"""
new_baseline = GaussianCNNBaselineModel(
name=name,
input_dim=self._env_spec.observation_space.shape,
output_dim=1,
filters=self._filters,
strides=self._strides,
padding=self._padding,
hidden_sizes=self._hidden_sizes,
hidden_nonlinearity=self._hidden_nonlinearity,
hidden_w_init=self._hidden_w_init,
hidden_b_init=self._hidden_b_init,
output_nonlinearity=self._output_nonlinearity,
output_w_init=self._output_w_init,
output_b_init=self._output_b_init,
learn_std=self._learn_std,
adaptive_std=self._adaptive_std,
std_share_network=self._std_share_network,
init_std=self._init_std,
min_std=None,
max_std=None,
std_filters=self._std_filters,
std_strides=self._std_strides,
std_padding=self._std_padding,
std_hidden_sizes=self._std_hidden_sizes,
std_hidden_nonlinearity=self._std_hidden_nonlinearity,
std_output_nonlinearity=None,
std_parameterization='exp',
layer_normalization=self._layer_normalization)
new_baseline.parameters = self.parameters
return new_baseline
@property
def recurrent(self):
"""bool: If this module has a hidden state."""
return False
@property
def env_spec(self):
"""Policy environment specification.
Returns:
garage.EnvSpec: Environment specification.
"""
return self._env_spec
def __getstate__(self):
"""Object.__getstate__.
Returns:
dict: The state to be pickled for the instance.
"""
new_dict = super().__getstate__()
del new_dict['_f_predict']
del new_dict['_old_network']
del new_dict['_x_mean']
del new_dict['_x_std']
del new_dict['_y_mean']
del new_dict['_y_std']
return new_dict
def __setstate__(self, state):
"""Object.__setstate__.
Args:
state (dict): Unpickled state.
"""
super().__setstate__(state)
self._initialize()
| [
"noreply@github.com"
] | rlworkgroup.noreply@github.com |
fc9711a5c699b94b2817448ca4cf6bcf6a65e04f | 5a376385688d7b8d98a04a4d18ac865d18950393 | /eurobot_sim/scripts/stm_node_secondary.py | 2622d0671782c55513aefde2c6b424f4db6995c4 | [] | no_license | akpp213/ros-eurobot-2018 | 66d50ce99e2ab943c21e105519e4375e160bdea2 | 2c64fe607e2e912cfbf9ab26e81c3c8d8baad01c | refs/heads/master | 2020-03-19T06:48:40.524813 | 2018-07-10T09:42:16 | 2018-07-10T09:42:16 | 136,057,327 | 0 | 0 | null | 2018-07-03T14:34:19 | 2018-06-04T17:02:30 | Python | UTF-8 | Python | false | false | 7,504 | py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
import numpy as np
import tf
from nav_msgs.msg import Odometry
from geometry_msgs.msg import Twist
class stm_node_secondary():
def __init__(self):
# ROS
rospy.init_node('stm_node_secondary', anonymous=True)
rospy.Subscriber("stm_command", String, self.stm_command_callback)
rospy.Subscriber("cmd_vel", Twist, self.set_twist)
self.pub_response = rospy.Publisher("response", String, queue_size=10)
self.pub_odom = rospy.Publisher("odom", Odometry, queue_size=1)
self.robot_name = rospy.get_param('robot_name')
self.br = tf.TransformBroadcaster()
# high-level commands info (for handling response)
self.actions_in_progress = [''] # action_names, indexing corresponds to types indexing
self.action_types = [] # list of high-level action types only
self.actions_with_response = [0x0E]
self.pack_format = {
0x01: "=BBBB",
0x03: "=Bf",
0x04: "=B",
0x05: "=B",
0x08: "=fff",
0x09: "=",
0x0a: "=",
0x0b: "=BH",
0x0c: "=B",
0x0d: "=B",
0xa0: "=fff",
0xa1: "=fff",
0xb0: "=B",
0xc0: "=BB",
0xb1: "=B",
0x0e: "=fff",
0x0f: "=",
}
self.unpack_format = {
0x01: "=BBBB",
0x03: "=BB",
0x04: "=BB",
0x05: "=BB",
0x08: "=BB",
0x09: "=fff",
0x0a: "=fff",
0x0b: "=BB",
0x0c: "=f",
0x0d: "=BB",
0xa0: "=Bfff",
0xa1: "=BB",
0xb0: "=BB",
0xc0: "=BB",
0xb1: "=BB",
0x0e: "=BB",
0x0f: "=fff",
}
self.freq = 400
self.rate = rospy.Rate(self.freq) # 100Hz
self.color = rospy.get_param("/field/color")
self.coords = np.array(rospy.get_param('start_' + self.color))
self.coords[:2] /= 1000.0
self.laser_coords = (rospy.get_param('lidar_x') / 1000.0, rospy.get_param('lidar_y') / 1000.0, 0.41)
self.vel = np.array([0.0, 0.0, 0.0])
self.last_integration_time = rospy.get_time()
rospy.Timer(rospy.Duration(1. / 80), self.pub_timer_callback)
def set_twist(self, twist):
vel = np.zeros(3)
vel[0] = twist.linear.x
vel[1] = twist.linear.y
vel[2] = twist.angular.z
self.set_vel(vel)
def parse_data(self, data):
data_splitted = data.data.split()
action_name = data_splitted[0]
action_type = int(data_splitted[1])
args_str = data_splitted[2:]
# TBD: split any chars in Strings like 'ECHO'->['E','C','H','O']
action_args_dict = {'B': ord, 'H': int, 'f': float}
args = [action_args_dict[t](s) for t, s in zip(self.pack_format[action_type][1:], args_str)]
return action_name, action_type, args
def set_vel(self, vel):
self.vel = vel
def set_coords(self, coords):
self.coords = coords
def stm_command_callback(self, data):
# parse data
action_name, action_type, args = self.parse_data(data)
rospy.loginfo(str(action_type))
# simulate STM32 response
successfuly = True
args_response = "Ok"
if action_type == 0x08:
self.set_vel(np.array(args))
elif action_type == 0x09:
args_response = self.vel
elif action_type == 0x0E:
self.set_coords(np.array(args))
elif action_type == 0x0F:
args_response = self.coords
# high-level commands handling
if action_type in self.action_types:
# store action_name
# rospy.loginfo(str(action_type))
self.actions_in_progress[self.action_types[action_type]] = action_name
# low-level commands handling
elif action_type in self.actions_with_response:
rospy.loginfo(action_name + " finished")
def delayed_cb(e):
self.pub_response.publish(action_name + " finished")
rospy.Timer(rospy.Duration(0.2), delayed_cb, oneshot=True)
def handle_response(self, status):
"""Handles response for high-lvl commands (only)."""
l = len(status)
for i in range(l):
# mind that indeces in status[] correspond to indeces in actions_in_progress[]
rospy.loginfo(status[i] + ' ' + str(self.action_in_progress[i]))
if status[i] == '0' and len(self.actions_in_progress[i]) > 0:
self.actions_in_progress[i] = '' # stop storing this action_name
self.pub_response.publish(self.actions_in_progress[i] + " done") # publish responce
self.rate.sleep()
def integrate(self):
while not rospy.is_shutdown():
# time between iterations
now = rospy.get_time()
dt = now - self.last_integration_time
self.last_integration_time = now
# matrix for conveting into world frame
a = self.coords[2]
M = np.array([[np.cos(a), -np.sin(a)],
[np.sin(a), np.cos(a)]]) # TODO
vel = self.vel.copy()
vel[:2] = np.matmul(M, vel[:2].reshape((2, 1))).reshape((2,))
# process noise
noise = np.random.normal(size=3) * 0.1 * vel
# noise *= 0.96 # simulate bad estimation of wheel size, etc.
# add dx and noise to coords
self.coords += (vel + noise) * dt
self.coords[2] = self.coords[2] % (2 * np.pi)
self.rate.sleep()
def pub_timer_callback(self, event):
# localization noise
coords = self.coords.copy()
coords[:2] += np.random.normal(size=2, scale=0.0005)
odom = Odometry()
odom.header.frame_id = 'odom'
odom.child_frame_id = self.robot_name
odom.pose.pose.position.x = coords[0]
odom.pose.pose.position.y = coords[1]
quat = tf.transformations.quaternion_from_euler(0, 0, coords[2])
odom.pose.pose.orientation.z = quat[2]
odom.pose.pose.orientation.w = quat[3]
odom.twist.twist.linear.x = self.vel[0]
odom.twist.twist.linear.x = self.vel[1]
odom.twist.twist.angular.z = self.vel[2]
self.pub_odom.publish(odom)
self.br.sendTransform((coords[0], coords[1], 0),
tf.transformations.quaternion_from_euler(0, 0, coords[2]),
rospy.Time.now(),
self.robot_name,
"%s_odom" % self.robot_name)
self.br.sendTransform(self.laser_coords,
tf.transformations.quaternion_from_euler(0, 0, 1.570796),
rospy.Time.now(),
'%s_laser' % self.robot_name,
self.robot_name)
self.br.sendTransform((0, 0, 0),
tf.transformations.quaternion_from_euler(0, 0, 0),
rospy.Time.now(),
'map',
"%s_odom" % self.robot_name)
if __name__ == '__main__':
try:
stm = stm_node_secondary()
stm.integrate()
except rospy.ROSInterruptException:
pass
| [
"knapping@mit.edu"
] | knapping@mit.edu |
97643dcf7a8ea11bd4c4a65ddf0fd23e7f4c0fc3 | 1d690d72ad7465c1a90cf0fb4d8d4b67d9a35a8b | /Writer.py | 3cd1ae98ee2d4635c486b33eb0967440537bb7e5 | [] | no_license | AlphaBetaGamma96/LogHarmonicNet | ee57f907d14613f59b654215a25658c7679c26f9 | 98d177cf9ce83e2396f428cb5ae914df7a6bd0a8 | refs/heads/main | 2023-04-12T23:03:24.322794 | 2021-04-02T14:31:59 | 2021-04-02T14:31:59 | 353,356,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 757 | py | import pandas as pd
class WriteToFile(object):
"""
Class to write a dict to a Pandas' dataframe and saved to a .csv
"""
def __init__(self, load, filename):
self.dataframe=None
if(isinstance(load, str)):
self._load(load)
self._filename=filename
def _load(self, filepath):
self.dataframe = pd.read_csv(filepath, index_col[0])
def _write_to_file(self, filename):
self.dataframe.to_csv(filename)
def __call__(self, dic):
if(self.dataframe is None):
self.dataframe = pd.DataFrame.from_dict(dic)
else:
row = pd.DataFrame.from_dict(dic)
frames = [self.dataframe, row]
self.dataframe = pd.concat(frames, axis=0, ignore_index=True)
self._write_to_file(self._filename)
| [
"jameswtkeeble@gmail.com"
] | jameswtkeeble@gmail.com |
4a868e656d4c079a39cb55f5a666878a4b75f71d | 4f92cc4e6072017909e4bb362e98f3bb85460952 | /python/src/print_to_pmod.py | ee8657d9057772783060c828bbd9007fd706435f | [] | no_license | wscotten/Actioniks | 7b3c0f851ee265e2c7b5458d7e14269604e93366 | d6f19906d8a47f37a9466f3dbf0a6dab163ec351 | refs/heads/master | 2020-12-02T11:33:39.008433 | 2017-06-07T07:40:29 | 2017-06-07T07:40:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | from pynq import Overlay
from pynq.iop import Pmod_OLED
from pynq.iop import PMODA
from pynq.board import Button
import time
ol = Overlay("base.bit")
ol.download()
pmod_oled = Pmod_OLED(PMODA)
def print_to_pmod_solution(solution):
for i in range(len(solution)):
pmod_oled.clear()
pmod_oled.write(solution[i])
while True:
if Button(0).read():
time.sleep(1)
break
pmod_oled.clear()
pmod_oled.write('Congratulations!\nYour cube is\n solved!')
| [
"wscotten@uci.edu"
] | wscotten@uci.edu |
5d83e02d2dd7ddd993c793737d4b12113fe040b4 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /FnyAGdwgcH4whynjR_20.py | b54ebb927a0a13a75b73876e6a6c573f4cdb22ca | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 289 | py |
from itertools import *
def get_subsets(lst, n):
comb = []
for i in range(1, len(lst) + 1):
comb.append(list([list(x) for x in combinations([x for x in range(len(lst))], i)]))
return [[lst[i] for i in j] for k in comb for j in k if sum([lst[i] for i in j]) == n]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
4226a49e6e67734940640f9964357c2e25d2691e | 62d55e4ea01c9475b77a4ba3a78d5c9b39f78419 | /algorithms/popularity.py | 35717ec957923dc43d0ab56b7591c2fbd0fa7130 | [] | no_license | hangvane/serviceCachingEdge | d942a55a711d6afb3fdc5e870b360e7954fe10bd | 3e0d3e4112544f32cbda8b5e686cee5b99432a76 | refs/heads/main | 2023-03-28T18:46:09.853927 | 2021-03-31T02:15:04 | 2021-03-31T02:15:04 | 353,171,421 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,564 | py | import config.config_sys as cfg_sys
from algorithms.utils import totalCost
from code_utils.utils import getLogger, shutdown
logger = getLogger()
def alg(userSet, dcSet, nodeSet, edgeList, serviceSet, delayMat, budget):
"""
return assignment: {
reqName: locationIp
}
"""
logger.warning('Popularity algorithm beginning')
costMat = totalCost(userSet, dcSet, nodeSet, edgeList, serviceSet)
logger.info('Popularity alg: costMat: %s', str(costMat))
residualCapacity = {
k: v['capacity'] for k, v in nodeSet.items()
if v['typee'] == cfg_sys.TYPEE_CL_USER or
v['typee'] == cfg_sys.TYPEE_DC
}
popularitySet = {
serviceName: {
nodeIp: 0
for nodeIp, node in nodeSet.items()
if node['typee'] == cfg_sys.TYPEE_CL_USER
}
for serviceName in serviceSet
}
assignment = {}
for userIp, user in userSet.items():
for reqName, req in user['reqSet'].items():
serviceName = req['service']
capacity = req['speed'] / serviceSet[serviceName]['miu']
maxDelay = max(delayMat[userIp].values())
popularitySubset = {
clIp: pop - delayMat[userIp][clIp] / maxDelay / 2
for clIp, pop in popularitySet[serviceName].items()
}
popularityList = sorted(
zip(popularitySubset.keys(), popularitySubset.values()),
key=lambda x: x[1],
reverse=True,
)
for clIp, _ in popularityList:
if residualCapacity[clIp] < capacity:
continue
popularitySet[serviceName][clIp] += 1
assignment[reqName] = clIp
residualCapacity[clIp] -= capacity
break
if reqName not in assignment:
for dcIp in dcSet:
if serviceName not in dcSet[dcIp]['serviceList']:
continue
assignment[reqName] = dcIp
residualCapacity[dcIp] -= capacity
break
if reqName not in assignment:
logger.error(
'request not assigned: reqName: %s, userIp: %s, assignment: %s', reqName, userIp, str(assignment)
)
shutdown()
logger.warning('Popularity algorithm finished, assignment: %s', str(assignment))
return assignment
| [
"noreply@github.com"
] | hangvane.noreply@github.com |
df863b036a7ff71d6894898a8b9e3248ceec66cf | 1dec956d45cb832e6194f4427a95e07f7b33149c | /deep_nets/keras/feature_extract_mobilenet.py | 9513361a265d410f467d349c150a113cefd0a0e5 | [] | no_license | pawanon61/extract-features | 8b41646e1d1713d36858e0b327adeb86084ab332 | 8d5da22ad1e7ee292a5efbc3925af8be6441aca7 | refs/heads/master | 2021-01-01T20:43:27.927215 | 2017-07-31T19:18:30 | 2017-07-31T19:18:30 | 98,918,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,959 | py | from keras.applications.mobilenet import MobileNet
from keras.preprocessing import image_new
from keras.applications.mobilenet import preprocess_input
from keras.models import Model
import tensorflow as tf
import numpy as np
import sys, os
from tqdm import tqdm
import scipy.misc
sys.path.insert(0, '/braintree/home/pgaire/deep_nets/keras') #to add/import the python module
sys.path.insert(1, '/braintree/home/pgaire/softwares/streams') #to add/import the python module
from streams.envs import hvm
from streams.metrics.neural_cons import NeuralFitAllSites
sys.path.insert(2, '/braintree/home/pgaire/softwares/tools')
from get_imagenet_images import get_imagenet_images
from sklearn.decomposition import PCA
import argparse
def get_args():
# assign description to help doc
parser = argparse.ArgumentParser()
# add arguments
parser.add_argument('-variation', '--variation', type=int, help='which variation of image to take', required=True)
parser.add_argument('-gpu', '--gpu', type=int, help='which gpu to use in the current hode', required=True)
parser.add_argument('-whichlayer', '--whichlayer', type=str, help='from which layer do you want to extract the feature', required=True)
parser.add_argument('-model', '--model_name', type=str, help='name of model to run', required=True)
# Array for all arguments passed to script
args = parser.parse_args()
variation_arg = args.variation
gpu_arg = args.gpu
whichlayer_arg = args.whichlayer
model_name_arg = args.model_name
# return all variable values
return variation_arg, gpu_arg, whichlayer_arg, model_name_arg
# match values returned from get_args() to assign to their respective variables
variation, gpu, feature_extraction_layer, net = get_args()
os.environ['CUDA_VISIBLE_DEVICES'] = '%d'%gpu #use gpu in the node specified by user
def extract_features(list_of_images):
features = []
for number_of_images, image in enumerate(tqdm(list_of_images)):
# image = scipy.misc.imresize(image, [229,229]) #xception takes image input of size 229x229
image = image_new.array_to_img(image)
image = image_new.load_img(image, target_size=(224, 224))
image = image_new.img_to_array(image)
image = np.expand_dims(image, axis=0)
image = preprocess_input(image)
feature_of_this_layer = model.predict(image)
feature_of_this_layer = feature_of_this_layer.flatten()
features.append(feature_of_this_layer)
features = np.asarray(features)
return features
#after each block after residual features has been added
if feature_extraction_layer == 'block1':
tensor_name = 'conv_pw_1_relu'
elif feature_extraction_layer == 'block2':
tensor_name = 'conv_pw_2_relu'
elif feature_extraction_layer == 'block3':
tensor_name = 'conv_pw_3_relu'
elif feature_extraction_layer == 'block4':
tensor_name = 'conv_pw_4_relu'
elif feature_extraction_layer == 'block5':
tensor_name = 'conv_pw_5_relu'
elif feature_extraction_layer == 'block6':
tensor_name = 'conv_pw_6_relu'
elif feature_extraction_layer == 'block7':
tensor_name = 'conv_pw_7_relu'
elif feature_extraction_layer == 'block8':
tensor_name = 'conv_pw_8_relu'
elif feature_extraction_layer == 'block9':
tensor_name = 'conv_pw_9_relu'
elif feature_extraction_layer == 'block10':
tensor_name = 'conv_pw_10_relu'
elif feature_extraction_layer == 'block11':
tensor_name = 'conv_pw_11_relu'
elif feature_extraction_layer == 'block12':
tensor_name = 'conv_pw_12_relu'
elif feature_extraction_layer == 'block13':
tensor_name = 'conv_pw_13_relu'
elif feature_extraction_layer == 'last_conv':
tensor_name = 'conv_preds'
base_model = MobileNet(weights='imagenet')
# for op in tf.get_default_graph().get_operations():
# print op.name
model = Model(inputs=base_model.input, outputs=base_model.get_layer(tensor_name).output)
hvmit = hvm.HvM(var=variation)
list_of_images = hvmit.images #get all the image for a variation
print ('*******getting var%d images and extracting feature from them*******' %variation)
total_features = extract_features(list_of_images)
if feature_extraction_layer == 'last_conv': # because the output of last_conv is already 1000 features
np.save('/braintree/home/pgaire/data/features_extracted/features_pretrained/%s_%s_features_for_var%d_images.npy'%(net, feature_extraction_layer, variation), total_features)
else:
print ("\n******getting 1000 imagenet images and extracting features from them to calculate PCA transform matrix********")
print ('because the layer you selected has way too many features. so, reducing the features to 1000 per image')
imagenet_images = get_imagenet_images(nimg = 1000)
imagenet_features = extract_features(imagenet_images)
reduced_total_features = PCA(n_components=1000).fit(imagenet_features).transform(total_features)
np.save('/braintree/home/pgaire/data/features_extracted/features_pretrained/%s_%s_features_for_var%d_images.npy'%(net, feature_extraction_layer, variation), reduced_total_features)
| [
"noreply@github.com"
] | pawanon61.noreply@github.com |
50759067858d9935dbdf8bd8cce1c9eaa02479d5 | 52ff7e90f6991a25fb0d6d34bf74e3d4e6ba8050 | /Tasks/t_5_3.py | ef9536b5f9baaa13333090378964fdb8e2eabb00 | [] | no_license | eugenmorok/egm001 | 73d03e6113457b6e8d9a413c9e754725fdc24ac4 | 3280510c6706b05a29c88105eda770dfe459d461 | refs/heads/master | 2023-01-15T10:15:40.736583 | 2020-11-10T10:16:45 | 2020-11-10T10:16:45 | 259,863,219 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 100 | py | n = 9
for i in range(1, n + 1):
for j in range(1, i + 1):
print(j, end="")
print()
| [
"64523467+eugenmorok@users.noreply.github.com"
] | 64523467+eugenmorok@users.noreply.github.com |
a1066204a1c376f77d4902f78fa503b027b7ff04 | 1713f42943617e5153ba339ef99e2ea913b40c77 | /main.py | 05954943f279122ff8d8b2ee4c3f23be6ddc263b | [] | no_license | BusyBoBo/laoba | ead69fac62b8d9f325999e09e73f5150f49607e4 | be6ca5c3bbe0377e4fca8e397a2d0c7fb8c69ab8 | refs/heads/master | 2021-05-06T03:22:25.313297 | 2017-12-19T01:40:08 | 2017-12-19T01:40:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | # #coding:utf-8
import sys
import logging
from PyQt4 import QtGui
from myutil.MainWindow import MainWindow
from myutil.MyLogUtil import MyLogUtil
if __name__ == '__main__':
MyLogUtil.init_logging()
try:
app = QtGui.QApplication(sys.argv)
MainWindow()
sys.exit(app.exec_())
except Exception, ex:
logging.error(ex, exc_info=1)
| [
"xihuanjianguo@gmail.com"
] | xihuanjianguo@gmail.com |
5f1eac55f319e241a1503b5bdedd4f2be24610c4 | 4e9520ecd9d976b3673827003cc16e5b41077b15 | /ThaiVectors/datasets/constants.py | b1f9ad7a57424edebdd19621d122068ce34bc825 | [] | no_license | sathachao/elm-for-thai-nlp | 37f4304529e2e970f467b8b8ec2cbf4c7a485525 | 31511d0e362456ca7bef5a187df8f735306e62ef | refs/heads/master | 2020-03-24T23:53:11.371245 | 2017-04-24T14:56:53 | 2017-04-24T14:56:53 | 143,158,673 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33 | py | NEW_LINE = "<NL>"
BLANK = "<BL>"
| [
"sathachao@gmail.com"
] | sathachao@gmail.com |
64deaf96035529aa07486d03f970d0b8fa49cf54 | 6c86a607dd1fc3c66213f13fe237b5c95942163f | /Documents/main.py | 9429123b088cd35e93b139666ad1cd8ba5f664fe | [] | no_license | miusan00/stc | 8d6cdf9446c3305bf39efc31dcab8ee48626e4a9 | 42c6ef22ddd67c42c317f14ac4cce6267e0a6500 | refs/heads/master | 2020-04-06T09:15:29.962113 | 2018-11-20T07:01:03 | 2018-11-20T07:01:03 | 157,334,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 536 | py | ReplyEndpoint ='https://api.line.me/v2/bot/message/reply'
def reply_text(reply_token,text):
header={
"Content-Type": "application/json",
"Authorization": "Bearer{ENTER_ACCESS_TOKEN}"
}
payload = {
"replyToken":reply_token,
"message":[
{
"type": "text"
"text": text
}
]
}
requests.post(ReplyEndpoint,headers=header,data=jason.dumps(payload))
| [
"haruki19990615@gmail.com"
] | haruki19990615@gmail.com |
58f49bd7654563565a044d29af1655320a91297b | 3c7a3b244df064e4bfe871a13851e358de98f85e | /System/Structures/Graphs/Graph.py | 4907c26884fffa78d5b39fb9c7939f846cc0ef5d | [] | no_license | AalizzweII/Autoit-Obfuscator | 85bec29a4e3d5dcff26d79ef152f234ad85e6873 | 3d5e88834708821458a811f5d3aa1922402e594e | refs/heads/master | 2021-06-05T21:41:33.718119 | 2016-09-14T13:24:49 | 2016-09-14T13:24:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 747 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Vertex import Vertex
class Graph:
def __init__(self):
self.vert_dict = {}
self.num_vertices = 0
def __iter__(self):
return iter(self.vert_dict.values())
def add_vertex(self, node):
self.num_vertices = self.num_vertices + 1
new_vertex = Vertex(node)
self.vert_dict[node] = new_vertex
return new_vertex
def get_vertex(self, n):
return self.vert_dict[n] if n in self.vert_dict else None
def add_edge(self, frm, to):
self.vert_dict[frm].add_neighbor(self.vert_dict[to])
self.vert_dict[to].add_neighbor(self.vert_dict[frm])
def get_vertices(self):
return self.vert_dict.keys()
| [
"jogonba2@inf.upv.es"
] | jogonba2@inf.upv.es |
c387e3b8c8a59eac91ba03e36e1f53c8e0e6dd7c | 082c6d8f248257c8442bbef7412f9915ac4c33bd | /mlrun/projects/pipelines.py | 0e464f7e8a422a342ccda4a4a42acdcbdb947b4d | [
"Apache-2.0"
] | permissive | eran-nussbaum/mlrun | 24e7db989b4eb03548f127ff26d36f77b1c82250 | 97209b27ccf3daf8f202a1a2bb1b01abd537ad70 | refs/heads/master | 2023-08-26T01:35:02.797712 | 2021-10-21T10:18:24 | 2021-10-21T10:18:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,933 | py | # Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import builtins
import importlib.util as imputil
import os
import tempfile
import traceback
import uuid
from kfp.compiler import compiler
import mlrun
from mlrun.utils import logger, new_pipe_meta, parse_versioned_object_uri
from ..config import config
from ..run import run_pipeline, wait_for_pipeline_completion
from ..runtimes.pod import AutoMountType
def get_workflow_engine(engine_kind, local=False):
if local:
if engine_kind == "kfp":
logger.warning(
"running kubeflow pipeline locally, note some ops may not run locally!"
)
return _LocalRunner
if not engine_kind or engine_kind == "kfp":
return _KFPRunner
if engine_kind == "local":
return _LocalRunner
raise mlrun.errors.MLRunInvalidArgumentError(
f"Provided workflow engine is not supported. engine_kind={engine_kind}"
)
class WorkflowSpec(mlrun.model.ModelObj):
"""workflow spec and helpers"""
def __init__(
self,
engine=None,
code=None,
path=None,
args=None,
name=None,
handler=None,
ttl=None,
):
self.engine = engine
self.code = code
self.path = path
self.args = args
self.name = name
self.handler = handler
self.ttl = ttl
self.run_local = False
self._tmp_path = None
def get_source_file(self, context=""):
if not self.code and not self.path:
raise mlrun.errors.MLRunInvalidArgumentError(
"workflow must have code or path properties"
)
if self.code:
with tempfile.NamedTemporaryFile(
mode="w", suffix=".py", delete=False
) as workflow_fh:
workflow_fh.write(self.code)
self._tmp_path = workflow_path = workflow_fh.name
else:
workflow_path = self.path or ""
if context and not workflow_path.startswith("/"):
workflow_path = os.path.join(context, workflow_path)
return workflow_path
def merge_args(self, extra_args):
self.args = self.args or {}
if extra_args:
for k, v in extra_args.items():
self.args[k] = v
def clear_tmp(self):
if self._tmp_path:
os.remove(self._tmp_path)
class FunctionsDict:
"""Virtual dictionary hosting the project functions, cached or in the DB"""
def __init__(self, project, decorator=None):
self.project = project
self._decorator = decorator
@property
def _functions(self):
return self.project.spec._function_objects
def enrich(self, function, key):
enriched_function = enrich_function_object(
self.project, function, self._decorator
)
self._functions[key] = enriched_function # update the cache
return self._functions[key]
def load_or_set_function(self, key, default=None) -> mlrun.runtimes.BaseRuntime:
try:
function = self.project.get_function(key)
except Exception as e:
if not default:
raise e
function = default
return self.enrich(function, key)
def get(self, key, default=None) -> mlrun.runtimes.BaseRuntime:
return self.load_or_set_function(key, default)
def __getitem__(self, key) -> mlrun.runtimes.BaseRuntime:
return self.load_or_set_function(key)
def __setitem__(self, key, val):
self._functions[key] = val
def values(self):
return [self.enrich(function, key) for key, function in self._functions.items()]
def keys(self):
return self._functions.keys()
def items(self):
return {
key: self.enrich(function, key) for key, function in self._functions.items()
}
def __len__(self):
return len(self._functions)
def __iter__(self):
yield from self._functions.keys()
def __delitem__(self, key):
del self._functions[key]
class _PipelineContext:
"""current (running) pipeline context"""
def __init__(self):
self.project = None
self.workflow = None
self.functions = FunctionsDict(None)
self.workflow_id = None
self.workflow_artifact_path = None
self.runs_map = {}
def set(self, project, workflow=None):
self.project = project
self.workflow = workflow
self.functions.project = project
self.runs_map = {}
def clear(self, with_project=False):
if with_project:
self.project = None
self.functions.project = None
self.workflow = None
self.runs_map = {}
self.workflow_id = None
self.workflow_artifact_path = None
def is_initialized(self, raise_exception=False):
if self.project:
return True
if raise_exception:
raise ValueError(
"pipeline context is not initialized, must be used inside a pipeline"
)
return False
pipeline_context = _PipelineContext()
def get_db_function(project, key) -> mlrun.runtimes.BaseRuntime:
project_instance, name, tag, hash_key = parse_versioned_object_uri(
key, project.metadata.name
)
runtime = mlrun.get_run_db().get_function(name, project_instance, tag, hash_key)
return mlrun.new_function(runtime=runtime)
def enrich_function_object(
project, function, decorator=None
) -> mlrun.runtimes.BaseRuntime:
if hasattr(function, "_enriched"):
return function
f = function.copy()
f.metadata.project = project.metadata.name
setattr(f, "_enriched", True)
src = f.spec.build.source
if src and src in [".", "./"]:
if not project.spec.source:
raise ValueError(
"project source must be specified when cloning context to a function"
)
if project.spec.mountdir:
f.spec.workdir = project.spec.mountdir
f.spec.build.source = ""
else:
f.spec.build.source = project.spec.source
f.spec.build.load_source_on_run = project.spec.load_source_on_run
if decorator:
decorator(f)
if (
decorator and AutoMountType.is_auto_modifier(decorator)
) or project.spec.disable_auto_mount:
f.spec.disable_auto_mount = True
f.try_auto_mount_based_on_config()
return f
class _PipelineRunStatus:
"""pipeline run result (status)"""
def __init__(self, run_id, engine, project, workflow=None, state=""):
self.run_id = run_id
self.project = project
self.workflow = workflow
self._engine = engine
self._state = state
@property
def state(self):
if self._state not in mlrun.run.RunStatuses.stable_statuses():
self._state = self._engine.get_state(self.run_id, self.project)
return self._state
def wait_for_completion(self, timeout=None, expected_statuses=None):
self._state = self._engine.wait_for_completion(
self.run_id,
project=self.project,
timeout=timeout,
expected_statuses=expected_statuses,
)
return self._state
def __str__(self):
return str(self.run_id)
def __repr__(self):
return str(self.run_id)
class _PipelineRunner(abc.ABC):
"""abstract pipeline runner class"""
engine = ""
@classmethod
@abc.abstractmethod
def save(cls, project, workflow_spec: WorkflowSpec, target, artifact_path=None):
raise NotImplementedError(
f"save operation not supported in {cls.engine} pipeline engine"
)
@classmethod
@abc.abstractmethod
def run(
cls,
project,
workflow_spec: WorkflowSpec,
name=None,
workflow_handler=None,
secrets=None,
artifact_path=None,
namespace=None,
) -> _PipelineRunStatus:
return None
@staticmethod
@abc.abstractmethod
def wait_for_completion(run_id, project=None, timeout=None, expected_statuses=None):
return ""
@staticmethod
@abc.abstractmethod
def get_state(run_id, project=None):
return ""
@staticmethod
def _get_handler(workflow_handler, workflow_spec, project, secrets):
if not (workflow_handler and callable(workflow_handler)):
workflow_file = workflow_spec.get_source_file(project.spec.context)
workflow_handler = create_pipeline(
project,
workflow_file,
pipeline_context.functions,
secrets,
handler=workflow_handler or workflow_spec.handler,
)
else:
builtins.funcs = pipeline_context.functions
return workflow_handler
class _KFPRunner(_PipelineRunner):
"""Kubeflow pipelines runner"""
engine = "kfp"
@classmethod
def save(cls, project, workflow_spec: WorkflowSpec, target, artifact_path=None):
workflow_file = workflow_spec.get_source_file(project.spec.context)
functions = FunctionsDict(project)
pipeline = create_pipeline(
project, workflow_file, functions, secrets=project._secrets,
)
artifact_path = artifact_path or project.spec.artifact_path
conf = new_pipe_meta(artifact_path, ttl=workflow_spec.ttl)
compiler.Compiler().compile(pipeline, target, pipeline_conf=conf)
workflow_spec.clear_tmp()
@classmethod
def run(
cls,
project,
workflow_spec: WorkflowSpec,
name=None,
workflow_handler=None,
secrets=None,
artifact_path=None,
namespace=None,
) -> _PipelineRunStatus:
pipeline_context.set(project, workflow_spec)
workflow_handler = _PipelineRunner._get_handler(
workflow_handler, workflow_spec, project, secrets
)
namespace = namespace or config.namespace
id = run_pipeline(
workflow_handler,
project=project.metadata.name,
arguments=workflow_spec.args,
experiment=name or workflow_spec.name,
namespace=namespace,
artifact_path=artifact_path,
ttl=workflow_spec.ttl,
)
project.notifiers.push_start_message(
project.metadata.name, project.get_param("commit_id", None), id
)
pipeline_context.clear()
return _PipelineRunStatus(id, cls, project=project, workflow=workflow_spec)
@staticmethod
def wait_for_completion(run_id, project=None, timeout=None, expected_statuses=None):
project_name = project.metadata.name if project else ""
run_info = wait_for_pipeline_completion(
run_id,
timeout=timeout,
expected_statuses=expected_statuses,
project=project_name,
)
status = ""
if run_info:
status = run_info["run"].get("status")
return status
@staticmethod
def get_state(run_id, project=None):
project_name = project.metadata.name if project else ""
resp = mlrun.run.get_pipeline(run_id, project=project_name)
if resp:
return resp["run"].get("status", "")
return ""
class _LocalRunner(_PipelineRunner):
"""local pipelines runner"""
engine = "local"
@classmethod
def run(
cls,
project,
workflow_spec: WorkflowSpec,
name=None,
workflow_handler=None,
secrets=None,
artifact_path=None,
namespace=None,
) -> _PipelineRunStatus:
pipeline_context.set(project, workflow_spec)
workflow_handler = _PipelineRunner._get_handler(
workflow_handler, workflow_spec, project, secrets
)
workflow_id = uuid.uuid4().hex
pipeline_context.workflow_id = workflow_id
pipeline_context.workflow_artifact_path = artifact_path
project.notifiers.push_start_message(project.metadata.name, id=workflow_id)
try:
workflow_handler(**workflow_spec.args)
state = mlrun.run.RunStatuses.succeeded
except Exception as e:
trace = traceback.format_exc()
logger.error(trace)
project.notifiers.push(
f"Workflow {workflow_id} run failed!, error: {e}\n{trace}"
)
state = mlrun.run.RunStatuses.failed
mlrun.run.wait_for_runs_completion(pipeline_context.runs_map.values())
project.notifiers.push_run_results(
pipeline_context.runs_map.values(), state=state
)
pipeline_context.clear()
return _PipelineRunStatus(
workflow_id, cls, project=project, workflow=workflow_spec, state=state
)
@staticmethod
def get_state(run_id, project=None):
return ""
def create_pipeline(project, pipeline, functions, secrets=None, handler=None):
spec = imputil.spec_from_file_location("workflow", pipeline)
if spec is None:
raise ImportError(f"cannot import workflow {pipeline}")
mod = imputil.module_from_spec(spec)
spec.loader.exec_module(mod)
setattr(mod, "funcs", functions) # should be replaced with "functions" in future
setattr(mod, "functions", functions)
setattr(mod, "this_project", project)
if hasattr(mod, "init_functions"):
getattr(mod, "init_functions")(functions, project, secrets)
# verify all functions are in this project (init_functions may add new functions)
for f in functions.values():
f.metadata.project = project.metadata.name
if not handler and hasattr(mod, "kfpipeline"):
handler = "kfpipeline"
if not handler and hasattr(mod, "pipeline"):
handler = "pipeline"
if not handler or not hasattr(mod, handler):
raise ValueError(f"pipeline function ({handler or 'pipeline'}) not found")
return getattr(mod, handler)
def github_webhook(request):
signature = request.headers.get("X-Hub-Signature")
data = request.data
print("sig:", signature)
print("headers:", request.headers)
print("data:", data)
print("json:", request.get_json())
if request.headers.get("X-GitHub-Event") == "ping":
return {"msg": "Ok"}
return {"msg": "pushed"}
| [
"noreply@github.com"
] | eran-nussbaum.noreply@github.com |
6ce2ccada352505e599aa287935709208778fe9e | 96f3701c13d1f4b6d1ee4b71962bd8f12d861db0 | /platform.py | 5862c78dbc16a72c2deb22ac5bc815fd9bed499d | [] | no_license | m00n33r/DoodleJump_PyGame | b64e6fd7ae74f07c8eaa42fe03f11ee38f854be5 | aaa606d5bbee8c1aca40cec6f2332e3422ceb999 | refs/heads/master | 2023-02-26T03:00:30.872228 | 2021-01-26T18:12:33 | 2021-01-26T18:12:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,148 | py | from config import *
from load_image import load_image
from random import choice
class Platform(pygame.sprite.Sprite):
"""Класс платформ"""
# Инициализация изображения
platform_img = pygame.transform.scale(load_image('game\\platform.png'), (59, 17))
def __init__(self, group, x, y):
super().__init__(group)
"""Инициализация"""
self.x = x
self.y = y
self.image = Platform.platform_img
self.rect = pygame.Rect(x, y, 59, 17)
def updating(self, screen):
"""Отрисовка"""
# Если платформа вышла за рамки окна
if self.rect.y >= HEIGHT - 20:
# Создаем новую
self.rect.y = -20
self.rect.x = choice(range(0, WIDTH - 60))
screen.blit(self.image, self.rect)
def top_rect(self):
"""Границы платформы"""
left = self.rect.left + 3
top = self.rect.top
width = self.rect.width - 3
height = self.rect.height
return pygame.Rect(left, top, width, height)
| [
"muyio0905@gmail.com"
] | muyio0905@gmail.com |
f25c8db4101f29619cb8f07900669ba1c024a289 | 1c17aaf0f96c2ba2e2eaf6b552017fb1393a5eaf | /day_1/015.py | 976ea6ac2d148af443f51e875b7a4b88e4d70271 | [] | no_license | ljwon0312/academy_python | b5372387ce317f487e7b498f5f875058661878d3 | b87c3ba38616f3785a1ba587472a3407e113d57a | refs/heads/master | 2020-06-17T03:33:29.989733 | 2019-07-10T07:53:31 | 2019-07-10T07:53:31 | 195,782,134 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | import sys
print('\"{0}\" is the thing you entered.'.format(sys.argv[1]))
| [
"jjww-0312@naver.com"
] | jjww-0312@naver.com |
2f33ffe4057054e28387dce8d78b3eda992a6bb3 | 04803c70bb97012b7d500a177ac0240fb2ddbe38 | /3heptane_pdep/pdep/network487_1.py | 3254df27bf284aa0be30602446ec71249f452594 | [] | no_license | shenghuiqin/chpd | 735e0415f6688d88579fc935459c1b0f53596d1d | 396ba54629036e3f2be0b3fabe09b78c90d56939 | refs/heads/master | 2023-03-01T23:29:02.118150 | 2019-10-05T04:02:23 | 2019-10-05T04:02:23 | 192,084,217 | 0 | 0 | null | 2019-06-18T18:33:13 | 2019-06-15T13:52:28 | HTML | UTF-8 | Python | false | false | 38,878 | py | species(
label = 'C=[C]OC[CH]C(698)',
structure = SMILES('C=[C]OC[CH]C'),
E0 = (230.359,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,458.08,458.185,458.229,458.289],'cm^-1')),
HinderedRotor(inertia=(0.000803108,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0983281,'amu*angstrom^2'), symmetry=1, barrier=(14.6245,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.098134,'amu*angstrom^2'), symmetry=1, barrier=(14.6238,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0980971,'amu*angstrom^2'), symmetry=1, barrier=(14.617,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.21694,0.0500502,-1.31876e-05,-2.25714e-08,1.34008e-11,27816,26.5423], Tmin=(100,'K'), Tmax=(965.293,'K')), NASAPolynomial(coeffs=[14.9903,0.0193723,-6.53438e-06,1.16226e-09,-8.26723e-14,23927.2,-45.7896], Tmin=(965.293,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(230.359,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CCJCO) + radical(C=CJO)"""),
)
species(
label = 'CH2CO(27)',
structure = SMILES('C=C=O'),
E0 = (-60.8183,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2120,512.5,787.5],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3625.12,'J/mol'), sigma=(3.97,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.13241,0.0181319,-1.74093e-05,9.35336e-09,-2.01725e-12,-7148.09,13.3808], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[5.75871,0.00635124,-2.25955e-06,3.62322e-10,-2.15856e-14,-8085.33,-4.9649], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-60.8183,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), label="""CH2CO""", comment="""Thermo library: FFCM1(-)"""),
)
species(
label = 'C3H6(59)',
structure = SMILES('C=CC'),
E0 = (5.9763,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.497558,'amu*angstrom^2'), symmetry=1, barrier=(11.4398,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.31912,0.00817957,3.34737e-05,-4.36194e-08,1.58214e-11,749.325,9.54025], Tmin=(100,'K'), Tmax=(983.754,'K')), NASAPolynomial(coeffs=[5.36755,0.0170743,-6.35108e-06,1.1662e-09,-8.27621e-14,-487.137,-4.54465], Tmin=(983.754,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(5.9763,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(203.705,'J/(mol*K)'), label="""C3H6""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,-2.38914e-13,3.12709e-16,-1.33367e-19,1.7499e-23,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(4383.16,'K')), NASAPolynomial(coeffs=[2.50003,-3.04997e-08,1.01101e-11,-1.48797e-15,8.20356e-20,25472.7,-0.459785], Tmin=(4383.16,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=[C]OC=CC(2031)',
structure = SMILES('C=[C]OC=CC'),
E0 = (173.532,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,329.639,329.641,329.644],'cm^-1')),
HinderedRotor(inertia=(0.182158,'amu*angstrom^2'), symmetry=1, barrier=(14.0461,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.182153,'amu*angstrom^2'), symmetry=1, barrier=(14.0461,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.182157,'amu*angstrom^2'), symmetry=1, barrier=(14.0462,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (83.1085,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.50643,0.0478119,-2.6622e-05,1.53467e-09,2.61338e-12,20966.9,24.9254], Tmin=(100,'K'), Tmax=(1051.94,'K')), NASAPolynomial(coeffs=[11.4777,0.0225422,-8.62121e-06,1.55449e-09,-1.07197e-13,18169.4,-27.0108], Tmin=(1051.94,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(173.532,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)(Cds-Cd)) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'C=[C]OCC=C(2032)',
structure = SMILES('C=[C]OCC=C'),
E0 = (163.796,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2850,1437.5,1250,1305,750,350,3010,987.5,1337.5,450,1655,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,394.297,394.642,394.877,395.664],'cm^-1')),
HinderedRotor(inertia=(0.146731,'amu*angstrom^2'), symmetry=1, barrier=(16.2309,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.147058,'amu*angstrom^2'), symmetry=1, barrier=(16.226,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.14668,'amu*angstrom^2'), symmetry=1, barrier=(16.2285,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (83.1085,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.44,0.0460217,-1.19253e-05,-2.03973e-08,1.19596e-11,19801.5,24.1031], Tmin=(100,'K'), Tmax=(973.112,'K')), NASAPolynomial(coeffs=[13.9703,0.018489,-6.43871e-06,1.1603e-09,-8.27083e-14,16227.8,-41.8388], Tmin=(973.112,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(163.796,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'C#COC[CH]C(2033)',
structure = SMILES('C#COC[CH]C'),
E0 = (199.941,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,750,770,3400,2100,2175,525,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,386.648,386.715,386.79],'cm^-1')),
HinderedRotor(inertia=(0.192981,'amu*angstrom^2'), symmetry=1, barrier=(20.4485,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.192668,'amu*angstrom^2'), symmetry=1, barrier=(20.4502,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.192809,'amu*angstrom^2'), symmetry=1, barrier=(20.4508,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.191625,'amu*angstrom^2'), symmetry=1, barrier=(20.4501,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (83.1085,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.01103,0.0538878,-2.22728e-05,-1.78021e-08,1.2922e-11,24165.7,21.6093], Tmin=(100,'K'), Tmax=(956.492,'K')), NASAPolynomial(coeffs=[17.3543,0.0139845,-4.30096e-06,7.61444e-10,-5.59705e-14,19738.2,-63.3119], Tmin=(956.492,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(199.941,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Ct-CtOs) + group(Ct-CtH) + radical(CCJCO)"""),
)
species(
label = 'C3H6(T)(82)',
structure = SMILES('[CH2][CH]C'),
E0 = (284.865,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.238388,'amu*angstrom^2'), symmetry=1, barrier=(5.48102,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00909639,'amu*angstrom^2'), symmetry=1, barrier=(22.1004,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0797,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.93778,0.0190991,4.26859e-06,-1.44876e-08,5.7495e-12,34303.2,12.9695], Tmin=(100,'K'), Tmax=(1046.8,'K')), NASAPolynomial(coeffs=[5.93907,0.0171892,-6.69154e-06,1.21547e-09,-8.39798e-14,33151.2,-4.14876], Tmin=(1046.8,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(284.865,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(199.547,'J/(mol*K)'), label="""C3H6(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'C=[C]O[CH]CC(2029)',
structure = SMILES('C=[C]O[CH]CC'),
E0 = (224.384,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.65424,0.0646709,-4.87239e-05,9.58118e-09,3.13206e-12,27115.7,25.539], Tmin=(100,'K'), Tmax=(969.077,'K')), NASAPolynomial(coeffs=[16.9401,0.0171675,-5.71706e-06,9.92424e-10,-6.90693e-14,23033.3,-57.2967], Tmin=(969.077,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(224.384,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CCsJOC(O)) + radical(C=CJO)"""),
)
species(
label = '[CH2]CCO[C]=C(2034)',
structure = SMILES('[CH2]CCO[C]=C'),
E0 = (235.703,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,3000,3100,440,815,1455,1000,2950,3100,1380,975,1025,1650,487.143,487.143,487.147,487.15],'cm^-1')),
HinderedRotor(inertia=(0.0204382,'amu*angstrom^2'), symmetry=1, barrier=(3.44185,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0907616,'amu*angstrom^2'), symmetry=1, barrier=(15.2838,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0907566,'amu*angstrom^2'), symmetry=1, barrier=(15.2838,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.664747,'amu*angstrom^2'), symmetry=1, barrier=(15.2838,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.04867,0.0561203,-3.26547e-05,-3.98954e-10,4.9755e-12,28462.6,26.4366], Tmin=(100,'K'), Tmax=(997.468,'K')), NASAPolynomial(coeffs=[14.3448,0.0211639,-7.70163e-06,1.37998e-09,-9.62081e-14,24896.6,-42.255], Tmin=(997.468,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(235.703,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(RCCJ)"""),
)
species(
label = '[CH]=COC[CH]C(2035)',
structure = SMILES('[CH]=COC[CH]C'),
E0 = (237.711,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2850,1437.5,1250,1305,750,350,3010,987.5,1337.5,450,1655,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,407.076,407.076,407.076],'cm^-1')),
HinderedRotor(inertia=(0.146213,'amu*angstrom^2'), symmetry=1, barrier=(17.1935,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.146213,'amu*angstrom^2'), symmetry=1, barrier=(17.1935,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.146213,'amu*angstrom^2'), symmetry=1, barrier=(17.1935,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.146213,'amu*angstrom^2'), symmetry=1, barrier=(17.1935,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.943686,0.0516915,-2.52501e-06,-4.45484e-08,2.39555e-11,28714.3,24.7978], Tmin=(100,'K'), Tmax=(935.956,'K')), NASAPolynomial(coeffs=[19.2936,0.0125439,-2.72816e-06,4.29345e-10,-3.35337e-14,23559.2,-71.705], Tmin=(935.956,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(237.711,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(CCJCO)"""),
)
species(
label = 'C=CO[CH][CH]C(2036)',
structure = SMILES('C=CO[CH][CH]C'),
E0 = (184.542,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.652412,0.0575331,-1.25986e-05,-3.95519e-08,2.35965e-11,22330.7,24.914], Tmin=(100,'K'), Tmax=(928.203,'K')), NASAPolynomial(coeffs=[21.5252,0.00894848,-9.31265e-07,7.97704e-11,-9.15975e-15,16673.9,-83.8289], Tmin=(928.203,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(184.542,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CCsJOC(O)) + radical(CCJCO)"""),
)
species(
label = '[CH]=[C]OCCC(2037)',
structure = SMILES('[CH]=[C]OCCC'),
E0 = (277.553,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,2750,2800,2850,1350,1500,750,1050,1375,1000,1685,370,256.441,256.465,256.504],'cm^-1')),
HinderedRotor(inertia=(0.00256172,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.352676,'amu*angstrom^2'), symmetry=1, barrier=(16.463,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.352703,'amu*angstrom^2'), symmetry=1, barrier=(16.4638,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.35259,'amu*angstrom^2'), symmetry=1, barrier=(16.4634,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.934154,0.0589529,-3.9022e-05,4.95417e-09,3.39468e-12,33499.8,25.4641], Tmin=(100,'K'), Tmax=(995.893,'K')), NASAPolynomial(coeffs=[14.8128,0.0205887,-7.41486e-06,1.31882e-09,-9.15356e-14,29873.6,-45.7623], Tmin=(995.893,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(277.553,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(Cds_P)"""),
)
species(
label = '[CH2][CH]COC=C(2038)',
structure = SMILES('[CH2][CH]COC=C'),
E0 = (195.861,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.05655,0.0488815,3.74655e-06,-4.97496e-08,2.54576e-11,23677.2,25.776], Tmin=(100,'K'), Tmax=(938.133,'K')), NASAPolynomial(coeffs=[18.8198,0.0131281,-3.01978e-06,4.91588e-10,-3.8293e-14,18584.8,-68.1647], Tmin=(938.133,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(195.861,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(RCCJ) + radical(CCJCO)"""),
)
species(
label = '[CH2][C]=O(266)',
structure = SMILES('[CH2][C]=O'),
E0 = (160.185,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,539.612,539.669],'cm^-1')),
HinderedRotor(inertia=(0.000578908,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.39563,0.0101365,2.30741e-06,-8.97566e-09,3.68242e-12,19290.3,10.0703], Tmin=(100,'K'), Tmax=(1068.9,'K')), NASAPolynomial(coeffs=[6.35055,0.00638951,-2.69368e-06,5.4221e-10,-4.02476e-14,18240.9,-6.33602], Tmin=(1068.9,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(160.185,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), comment="""Thermo library: FFCM1(-) + radical(CJC=O) + radical(CsCJ=O)"""),
)
species(
label = 'C=COC=CC(2039)',
structure = SMILES('C=COC=CC'),
E0 = (-66.2118,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.33928,0.0455803,1.74877e-06,-3.88798e-08,1.93743e-11,-7855.93,22.3951], Tmin=(100,'K'), Tmax=(956.621,'K')), NASAPolynomial(coeffs=[15.2214,0.019075,-6.14769e-06,1.08988e-09,-7.86297e-14,-11955.1,-51.5057], Tmin=(956.621,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-66.2118,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)(Cds-Cd)) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsOsH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=CCOC=C(2040)',
structure = SMILES('C=CCOC=C'),
E0 = (-75.948,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.23944,0.0441573,1.5312e-05,-5.96091e-08,2.83462e-11,-9019.9,21.6943], Tmin=(100,'K'), Tmax=(942.434,'K')), NASAPolynomial(coeffs=[18.0156,0.0145226,-3.68295e-06,6.29973e-10,-4.875e-14,-14028,-68.0403], Tmin=(942.434,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-75.948,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=C1OCC1C(2014)',
structure = SMILES('C=C1OCC1C'),
E0 = (-86.9896,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (84.1164,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.7939,0.0259972,7.19672e-05,-1.24486e-07,5.40811e-11,-10361.9,14.835], Tmin=(100,'K'), Tmax=(906.535,'K')), NASAPolynomial(coeffs=[18.8975,0.00966496,1.1421e-06,-4.4231e-10,2.86377e-14,-15892.8,-79.4038], Tmin=(906.535,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-86.9896,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(328.422,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + ring(2methyleneoxetane)"""),
)
species(
label = 'H2CC(T)(265)',
structure = SMILES('[C]=C'),
E0 = (600.324,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (26.0373,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.9483,-0.00181644,2.07122e-05,-2.36686e-08,8.4544e-12,72206.8,5.31552], Tmin=(100,'K'), Tmax=(953.673,'K')), NASAPolynomial(coeffs=[4.20276,0.00473416,-1.573e-06,2.85895e-10,-2.08634e-14,71811.9,2.28369], Tmin=(953.673,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(600.324,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""H2CC(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'C[CH]C[O](1252)',
structure = SMILES('C[CH]C[O]'),
E0 = (152.731,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,1354.16,2181.58],'cm^-1')),
HinderedRotor(inertia=(0.00393814,'amu*angstrom^2'), symmetry=1, barrier=(5.12701,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.222485,'amu*angstrom^2'), symmetry=1, barrier=(5.11536,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (58.0791,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.11,0.0164267,1.17442e-05,-1.70313e-08,5.13386e-12,18403.8,17.5214], Tmin=(100,'K'), Tmax=(1242.42,'K')), NASAPolynomial(coeffs=[4.47899,0.0232162,-9.97115e-06,1.8746e-09,-1.29968e-13,17199.4,7.14185], Tmin=(1242.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(152.731,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(CCOJ) + radical(CCJCO)"""),
)
species(
label = 'CHCH3(T)(80)',
structure = SMILES('[CH]C'),
E0 = (343.893,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,592.415,4000],'cm^-1')),
HinderedRotor(inertia=(0.00438701,'amu*angstrom^2'), symmetry=1, barrier=(26.7686,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (28.0532,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8236,-0.000909219,3.21369e-05,-3.73465e-08,1.33084e-11,41371.4,7.10957], Tmin=(100,'K'), Tmax=(960.825,'K')), NASAPolynomial(coeffs=[4.30495,0.00943054,-3.2755e-06,5.95101e-10,-4.2729e-14,40709.1,1.84155], Tmin=(960.825,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(343.893,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(128.874,'J/(mol*K)'), label="""CHCH3(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]O[C]=C(1770)',
structure = SMILES('[CH2]O[C]=C'),
E0 = (283.793,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,3000,3100,440,815,1455,1000,1685,370,607.819,607.836],'cm^-1')),
HinderedRotor(inertia=(0.0541333,'amu*angstrom^2'), symmetry=1, barrier=(14.1933,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0541477,'amu*angstrom^2'), symmetry=1, barrier=(14.1935,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (56.0633,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.71895,0.0199743,1.11333e-05,-3.28564e-08,1.53118e-11,34186,17.0659], Tmin=(100,'K'), Tmax=(932.117,'K')), NASAPolynomial(coeffs=[10.6429,0.00688603,-1.46285e-06,2.25575e-10,-1.75172e-14,31800.2,-25.4793], Tmin=(932.117,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(283.793,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(174.604,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-OsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(C=COCJ)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43375e-09,2.58635e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.97591,0.0016414,-7.19719e-07,1.25377e-10,-7.91522e-15,-1025.85,5.53754], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (230.359,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (392.094,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (378.768,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (426.843,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (253.93,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (339.143,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (387.582,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (343.148,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (372.336,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (422.738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (372.929,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (445.05,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (330.049,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (269.584,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (238.643,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (753.055,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (627.685,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['CH2CO(27)', 'C3H6(59)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(3)', 'C=[C]OC=CC(2031)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(2.182e+10,'cm^3/(mol*s)'), n=0.859, Ea=(6.76971,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 2821 used for Cds-OsH_Cds-CsH;HJ
Exact match found for rate rule [Cds-OsH_Cds-CsH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction3',
reactants = ['H(3)', 'C=[C]OCC=C(2032)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(5.014e+08,'cm^3/(mol*s)'), n=1.733, Ea=(3.17984,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2000,'K'), comment="""From training reaction 2823 used for Cds-HH_Cds-Cs\O2s/H;HJ
Exact match found for rate rule [Cds-HH_Cds-Cs\O2s/H;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['H(3)', 'C#COC[CH]C(2033)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(4278.27,'m^3/(mol*s)'), n=1.383, Ea=(15.1097,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ct_Ct;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['CH2CO(27)', 'C3H6(T)(82)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(11.6997,'m^3/(mol*s)'), n=2.021, Ea=(29.883,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Od_R;YJ] for rate rule [Od_Cdd;CJ]
Euclidian distance = 1.41421356237
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['C=[C]O[CH]CC(2029)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(5.4e-20,'s^-1'), n=9.13, Ea=(108.784,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 341 used for R2H_S;C_rad_out_H/NonDeC;Cs_H_out_H/NonDeO
Exact match found for rate rule [R2H_S;C_rad_out_H/NonDeC;Cs_H_out_H/NonDeO]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction7',
reactants = ['[CH2]CCO[C]=C(2034)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(718000,'s^-1'), n=2.05, Ea=(151.879,'kJ/mol'), T0=(1,'K'), Tmin=(500,'K'), Tmax=(2000,'K'), comment="""From training reaction 147 used for R2H_S;C_rad_out_2H;Cs_H_out_H/NonDeC
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['[CH]=COC[CH]C(2035)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(1.08e+06,'s^-1'), n=1.99, Ea=(105.437,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 17 used for R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd
Exact match found for rate rule [R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['C=CO[CH][CH]C(2036)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(4.823e+09,'s^-1'), n=1.00333, Ea=(141.977,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3H_SS;Cd_rad_out_Cd;XH_out] for rate rule [R3H_SS_O;Cd_rad_out_Cd;XH_out]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH]=[C]OCCC(2037)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(1.846e+10,'s^-1'), n=0.74, Ea=(145.185,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [RnH;Cd_rad_out_singleH;Cs_H_out_H/NonDeC] for rate rule [R5HJ_1;Cd_rad_out_singleH;Cs_H_out_H/NonDeC]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['[CH2][CH]COC=C(2038)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(5.59786e+07,'s^-1'), n=1.58088, Ea=(142.57,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_2H] for rate rule [R5HJ_3;Cd_rad_out_Cd;Cs_H_out_2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['C3H6(T)(82)', '[CH2][C]=O(266)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(7.35017e+06,'m^3/(mol*s)'), n=0.0284742, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction13',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['C=COC=CC(2039)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(6.94203e+09,'s^-1'), n=0.37, Ea=(99.6901,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R3;Y_rad_De;XH_Rrad_NDe] + [R3radExo;Y_rad;XH_Rrad_NDe] for rate rule [R3radExo;Y_rad_De;XH_Rrad_NDe]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction14',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['C=CCOC=C(2040)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(5.55988e+09,'s^-1'), n=0.137, Ea=(39.225,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad_De;XH_Rrad] for rate rule [R5radEndo;Y_rad_De;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction15',
reactants = ['C=[C]OC[CH]C(698)'],
products = ['C=C1OCC1C(2014)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;Y_rad_out;Cpri_rad_out_single] for rate rule [R4_SSS;Y_rad_out;Cpri_rad_out_H/NonDeC]
Euclidian distance = 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction16',
reactants = ['H2CC(T)(265)', 'C[CH]C[O](1252)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(54738.4,'m^3/(mol*s)'), n=0.884925, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [O_rad/NonDe;Birad]
Euclidian distance = 0
family: Birad_R_Recombination
Ea raised from -2.9 to 0 kJ/mol."""),
)
reaction(
label = 'reaction17',
reactants = ['CHCH3(T)(80)', '[CH2]O[C]=C(1770)'],
products = ['C=[C]OC[CH]C(698)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(2.23625e+06,'m^3/(mol*s)'), n=0.36814, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [C_rad/H2/O;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -1.7 to 0 kJ/mol."""),
)
network(
label = '487',
isomers = [
'C=[C]OC[CH]C(698)',
],
reactants = [
('CH2CO(27)', 'C3H6(59)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '487',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"qin.she@husky.neu.edu"
] | qin.she@husky.neu.edu |
dd1155253217fcb07f905b3d55b98b634de3a0e6 | 3ba81dd28f6f94fd01c92cda2ea03681cb265adb | /apps/chat_app/models.py | 073445666b2537eb62cd9287e66eab5065b66011 | [] | no_license | zhongweili2010/chattychat | a87e5b890c2e93ae266bf2e78e904c5c261060f6 | 2ba900032250304551dcab2c3e3a2a0be0cff99b | refs/heads/master | 2022-12-12T23:12:19.139605 | 2019-07-12T18:54:27 | 2019-07-12T18:54:27 | 183,483,396 | 0 | 1 | null | 2022-12-08T05:52:08 | 2019-04-25T17:49:39 | HTML | UTF-8 | Python | false | false | 1,185 | py | from django.db import models
from ..user_app.models import User
class Message(models.Model):
content=models.TextField()
created_at=models.DateTimeField(auto_now_add=True)
updated_at=models.DateTimeField(auto_now=True)
sender=models.ForeignKey(User,on_delete=models.SET_NULL,null=True,related_name='messages')
class Client(models.Model):
channel_name=models.CharField(max_length=50)
created_at=models.DateTimeField(auto_now_add=True)
updated_at=models.DateTimeField(auto_now=True)
user=models.OneToOneField(User,on_delete=models.CASCADE,related_name='client_of')
class ChatGroup(models.Model):
name=models.CharField(max_length=30,null=True)
users=models.ManyToManyField(User,related_name="groups_of")
created_at=models.DateTimeField(auto_now_add=True)
updated_at=models.DateTimeField(auto_now=True)
# class messages(models.Model):
# message = models.TextField()
# sender = models.ForeignKey(User,on_delete=SET_NULL)
# created_at = models.DateTimeField(auto_now_add=True)
# updated_at = models.DateTimeField(auto_now=True)
# reciever = models.ForeignKey(User,on_delete=SET_NULL)
# Create your models here.
| [
"zhongweili2010@gmail.com"
] | zhongweili2010@gmail.com |
298f726e82b6f397afccd86fed437e725ace18e8 | 78fd03f8c59b27133d46bcfc923df57f24b41081 | /oop/clase1.py | 5678658b10cf09d0d36b839f5cbda143258a3e39 | [] | no_license | Jhongesell/curso-python-igp | f7595f4093a935b02d53ee08aaf6bc0ad47a30c1 | 7519cf716fd6d45d7519be6c5f3a44b8f77b1869 | refs/heads/master | 2020-04-12T20:23:56.620484 | 2015-12-11T02:57:06 | 2015-12-11T02:57:06 | 162,734,898 | 1 | 0 | null | 2018-12-21T16:22:10 | 2018-12-21T16:22:09 | null | UTF-8 | Python | false | false | 661 | py | # -*- coding: utf8 -*-
class Alumno0():
pass
class Alumno(object):
pass
class Alumno2(object):
nombre = 'Juan'
def saludar(self):
print "Hola, me llamo %s" % self.nombre
class Alumno3(object):
def __init__(self, n, a):
self.nombre = nombre
self.apellido = apellido
class Alumno4(object):
def __init__(self, **kwargs):
kwargs.setdefault('edad', 10)
for k in kwargs:
setattr(self, k, kwargs[k])
class Alumno5(object):
def __init__(self, nombre='Pepe'):
self.nombre = nombre
self.edad = 10
if __name__ == '__main__':
obj = Alumno2()
obj.saludar()
| [
"antonio@ognio.com"
] | antonio@ognio.com |
4db4f4bde2cf7a0b91998d8f3928490c5685df1b | 5ba1d5958e16898153fc9a11e3d880131a0be81e | /app.py | 34ed4b296a024bc6f6fe55138e0d01d84f6d8f08 | [] | no_license | KeiSoto/Profeco | 0c2f7c980966629633043b88afb3164b2fb87ada | d8b9113a38d582c7353554ecc8bdf93b2a1e215c | refs/heads/master | 2020-12-10T03:58:34.809473 | 2016-11-11T18:24:16 | 2016-11-11T18:24:16 | 73,501,011 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # -*- coding: utf-8 -*-
import web
import json
render = web.template.render("views/")
urls = (
"/index(.*)", "index"
)
class index:
def GET(self, data_list):
with open('200datos.json', 'r') as file:
data_list = json.load(file)
return render.index(data_list['results'])
if __name__ == "__main__":
app = web.application(urls, globals())
web.config.debug = True
app.run()
| [
"keila soto"
] | keila soto |
01d0ed786036fa64499469ddced51bd6acd03276 | a9e77635cba311cc2f94902aae34b7112260d40b | /src/py_ask_sdk_test/validators/speech_validator.py | b0fd9cb907ef5945bc25eb462c963a1b87b3e2bd | [
"MIT"
] | permissive | utmostzl/py_ask_sdk_test | 3e4a0bdb7cc1b5c5ab5d47091d012d11f481f1a5 | 9ec432ca5992e1ec09d6657e2392ec17092ee8c6 | refs/heads/master | 2022-11-14T10:48:25.690789 | 2019-11-04T19:08:12 | 2019-11-04T19:08:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,479 | py | import re
from py_ask_sdk_test.validators.abstract_response_validator import AbstractResponseValidator
class SpeechValidator(AbstractResponseValidator):
"""
Validator against expected speech and repromt
"""
def validate(self, test_item, response):
"""
Validates the given response for the given test_item against expected speech and repromt
Args:
test_item(TestItem): The TestItem the response was given for
response(ResponseEnvelope): The response
Returns: True if the validation was successful otherwise should throw assertion
"""
if not response.response:
assert False, "No response given"
if test_item.expected_speech is not None:
expected_speech = test_item.expected_speech
self._assert_output_speech(response.response.output_speech, expected_speech,
"Not the expected speech output")
if test_item.expected_repromt is not None:
actual_repromt = response.response.reprompt
actual_repromt = actual_repromt.output_speech if actual_repromt is not None else None
expected_repromt = test_item.expected_repromt
self._assert_output_speech(actual_repromt, expected_repromt,
"Not the expected repromt output")
return True
@staticmethod
def _assert_output_speech(output_speech, expected_speech, msg):
is_regex = False
if type(expected_speech) is tuple:
is_regex = expected_speech[1]
expected_speech = expected_speech[0]
assert expected_speech is not None
if output_speech is None:
assert len(expected_speech) == 0, msg
return
speech_type = output_speech.object_type
actual_speech = None
if speech_type == 'SSML':
actual_speech = output_speech.ssml[7:-8]
elif speech_type == 'PlainText':
actual_speech = output_speech.text
if len(expected_speech) == 0:
assert actual_speech is None or len(actual_speech) == 0
elif is_regex:
match = re.fullmatch(expected_speech, actual_speech)
assert match is not None, msg + ": {} instead of {}".format(actual_speech, expected_speech)
else:
assert expected_speech == actual_speech, msg + ": '{}' instead of '{}'".format(actual_speech, expected_speech)
| [
"jonathan.loos@web.de"
] | jonathan.loos@web.de |
9ae75a368a4da72828b2e0c960271d481cc2e07d | 4103722ef93b4567c7fcc4c152c8295428109c68 | /bapsflib/_hdf/maps/digitizers/tests/test_map_digis.py | d4f370cf9b03e30466e84cf790bda7603fb1307e | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | StanczakDominik/bapsflib | 46d3b5caf3296116987d93d58df40f3fa7d19410 | a14e372eb1558cb6f4fe378b043ec05215e6f5f9 | refs/heads/master | 2023-04-11T05:02:07.690574 | 2020-09-15T00:20:32 | 2020-09-15T00:20:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,194 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the bapsflib package, a Python toolkit for the
# BaPSF group at UCLA.
#
# http://plasma.physics.ucla.edu/
#
# Copyright 2017-2018 Erik T. Everson and contributors
#
# License: Standard 3-clause BSD; see "LICENSES/LICENSE.txt" for full
# license terms and contributor agreement.
#
import h5py
import numpy as np
import unittest as ut
from bapsflib._hdf.maps import FauxHDFBuilder
from ..map_digis import HDFMapDigitizers
from ..templates import HDFMapDigiTemplate
class TestHDFMapDigitizers(ut.TestCase):
"""Test case for HDFMapDigitizers"""
f = NotImplemented # type: FauxHDFBuilder
DIGI_ROOT = 'Raw data + config'
MAP_CLASS = HDFMapDigitizers
@classmethod
def setUpClass(cls):
# create HDF5 file
super().setUpClass()
cls.f = FauxHDFBuilder()
def tearDown(self):
super().tearDown()
self.f.remove_all_modules()
@classmethod
def tearDownClass(cls):
"""Cleanup temporary HDF5 file"""
# cleanup and close HDF5 file
super().tearDownClass()
cls.f.cleanup()
@property
def group(self) -> h5py.Group:
"""Data group holding digitizer gorups."""
return self.f[self.DIGI_ROOT]
@property
def map(self):
"""Map of group holding all digitizers."""
return self.map_digis(self.group)
def map_digis(self,
group: h5py.Group):
"""Mapping function."""
return self.MAP_CLASS(group)
def test_not_h5py_group(self):
"""Test error if object to map is not h5py.Group"""
with self.assertRaises(TypeError):
self.map_digis(None)
def test_digi_scenarios(self):
"""
Test various scenarios of mappable and non-mappable digitizer
device groups.
"""
# -- data group has no digitizer devices ----
_map = self.map
self.assertBasics(_map)
self.assertEqual(_map, {})
# -- data group has all mappable devices ----
self.f.add_module('SIS 3301', {})
self.f.add_module('SIS crate', {})
_map = self.map
self.assertBasics(_map)
# check all controls were mapped
self.assertEqual(len(_map), 2)
self.assertIn('SIS 3301', _map)
self.assertIn('SIS crate', _map)
# the data group has mappable and unknown digitizers ----
self.f.remove_all_modules()
self.f.add_module('SIS 3301', {})
self.f['Raw data + config'].create_group('Not known')
_map = self.map
self.assertBasics(_map)
# check correct diagnostics were mapped
self.assertEqual(len(_map), 1)
self.assertIn('SIS 3301', _map)
self.assertNotIn('Not known', _map)
# delete unknown group
del self.f['Raw data + config/Not known']
# the data group has a dataset ----
self.f.remove_all_modules()
self.f.add_module('SIS crate', {})
data = np.empty((2, 100), dtype=np.float32)
self.f['Raw data + config'].create_dataset('A dataset',
data=data)
_map = self.map
self.assertBasics(_map)
# check correct diagnostics were mapped
self.assertEqual(len(_map), 1)
self.assertIn('SIS crate', _map)
self.assertNotIn('A dataset', _map)
# delete dataset
del self.f['Raw data + config/A dataset']
# the data group has a mappable digitizer but ----
# mapping fails ----
self.f.remove_all_modules()
self.f.add_module('SIS 3301', {})
self.f.add_module('SIS crate', {})
# remove a dataset from 'SIS 3301'
# - this will cause mapping of 'Waveform' to fail
#
sis_group = self.f['Raw data + config/SIS 3301']
for name in sis_group:
if isinstance(sis_group[name], h5py.Dataset):
del sis_group[name]
# check map
_map = self.map
self.assertBasics(_map)
# check correct controls were mapped
self.assertEqual(len(_map), 1)
self.assertIn('SIS crate', _map)
self.assertNotIn('SIS 3301', _map)
def assertBasics(self, _map: HDFMapDigitizers):
# mapped object is a dictionary
self.assertIsInstance(_map, dict)
# all dict items are a mapping class
for key, val in _map.items():
self.assertIsInstance(key, str)
self.assertIsInstance(val, HDFMapDigiTemplate)
# look for map attributes
self.assertTrue(hasattr(_map, '_defined_mapping_classes'))
self.assertTrue(hasattr(_map, 'mappable_devices'))
# check attribute types
self.assertIsInstance(_map.mappable_devices, tuple)
self.assertIsInstance(type(_map).mappable_devices, property)
self.assertEqual(sorted(list(_map.mappable_devices)),
sorted(list(_map._defined_mapping_classes)))
if __name__ == '__main__':
ut.main()
| [
"eteverson@gmail.com"
] | eteverson@gmail.com |
0c458fbfc43deedc1ff6cacc0268e0fbb085ec90 | 79eadcf837fe233e7902c9793dcc4fc1f84c9e52 | /TP6_modules.py | 36cdd3ff57874303712cb4a194fbdb22f528fe8e | [] | no_license | Fabezio/py_start | 6c6123aa099b95ce6917c097cebf497a83477d49 | 45bfadf4996db1b7bf43842f927341a592fe932d | refs/heads/main | 2023-02-24T02:41:30.534416 | 2021-01-27T12:35:27 | 2021-01-27T12:35:27 | 333,363,436 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37 | py | import glob
print(glob.glob("*.py")) | [
"fabezio@outlook.fr"
] | fabezio@outlook.fr |
85a7f50676e8c253aa893c4e83e0316368bd6b80 | 10a59cf4987b300b2e13c74cf35c3d8e8f7c0527 | /kw_webapp/tests/views/test_level.py | 9e9c030b61a084da3ca62de89da8d6da68de6dfc | [] | no_license | ddaws/kw-backend | 25e7fbc3b6a9d0243e58e2345810a9056cfea245 | e03d2cc0b0cc7c12ef05c78ed51eb9995c08dc90 | refs/heads/master | 2020-03-23T14:56:05.584806 | 2018-07-10T00:10:23 | 2018-07-10T00:10:23 | 141,709,057 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,449 | py | from unittest import mock
from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from kw_webapp.models import Level
from kw_webapp.tests.utils import setupTestFixture
from kw_webapp.utils import one_time_orphaned_level_clear
class TestLevel(APITestCase):
def setUp(self):
setupTestFixture(self)
def test_locking_current_level_disables_following_setting(self):
self.client.force_login(user=self.user)
self.user.profile.follow_me = True
self.user.profile.level = 5
self.user.save()
self.client.post(reverse("api:level-lock", args=(self.user.profile.level,)))
response = self.client.get(reverse("api:user-me"))
self.assertFalse(response.data["profile"]["follow_me"])
def test_locking_a_level_locks_successfully(self):
self.client.force_login(user=self.user)
response = self.client.post(reverse("api:level-lock", args=(self.user.profile.level,)))
self.assertEqual(response.data["locked"], 1)
def test_user_unlocking_too_high_level_fails(self):
self.client.force_login(user=self.user)
self.user.profile.level = 5
self.user.save()
level_too_high = 20
response = self.client.post(reverse("api:level-unlock", args=(level_too_high,)))
self.assertEqual(response.status_code, 403)
@mock.patch("api.views.unlock_eligible_vocab_from_levels", side_effect=lambda x, y: [1, 0, 0])
def test_unlocking_a_level_unlocks_all_vocab(self, garbage):
self.client.force_login(user=self.user)
self.user.profile.api_valid = True
self.user.profile.save()
s1 = reverse("api:level-unlock", args=(5,))
response = self.client.post(s1)
self.assertEqual(response.data['unlocked_now'], 1)
def test_locking_a_level_successfully_clears_the_level_object(self):
self.client.force_login(user=self.user)
level = Level.objects.get(profile=self.user.profile, level=5)
self.assertTrue(level is not None)
self.client.post(reverse("api:level-lock", args=(self.user.profile.level,)))
levels = Level.objects.filter(profile=self.user.profile, level=5)
self.assertEqual(levels.count(), 0)
def test_one_time_orphan_clear_deletes_orphaned_levels(self):
l5 = self.user.profile.unlocked_levels.get_or_create(level=5)[0]
l6 = self.user.profile.unlocked_levels.get_or_create(level=6)[0]
l7 = self.user.profile.unlocked_levels.get_or_create(level=7)[0]
l8 = self.user.profile.unlocked_levels.get_or_create(level=8)[0]
l9 = self.user.profile.unlocked_levels.get_or_create(level=9)[0]
level_count = Level.objects.filter(profile=self.user.profile).count()
self.assertEqual(level_count, 5)
self.user.profile.unlocked_levels.remove(l6)
self.user.profile.unlocked_levels.remove(l7)
#Oh no two orphaned levels.
level_count = Level.objects.filter(profile=None).count()
self.assertEqual(level_count, 2)
one_time_orphaned_level_clear()
# Our user has the correct amount of levels associated..
level_count = Level.objects.filter(profile=self.user.profile).count()
self.assertEqual(len(self.user.profile.unlocked_levels_list()), 3)
# No more orphaned levels!
level_count = Level.objects.filter(profile=None).count()
self.assertEqual(level_count, 0)
| [
"noreply@github.com"
] | ddaws.noreply@github.com |
3cbdccfeb61342f5ebfdb47f21713cc8f78809ee | cdad738a7085a997b5349a94aedb4db8da78da8f | /CumulantAnalysis/test/crab/crab.py | 651feaa7a95127ab9744a9310a2c606677463214 | [
"MIT"
] | permissive | tuos/DirectLoopAnalysis | 4851d122d4723e498705c1d2cb100cbf3eda8d43 | 6f5f02538454d2240d0232665b9b17d07eb79854 | refs/heads/master | 2020-06-12T22:24:01.081755 | 2020-01-21T17:49:37 | 2020-01-21T17:49:37 | 194,446,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | from CRABClient.UserUtilities import config, getUsernameFromSiteDB
config = config()
config.General.requestName = 'PYTHIA8_gap08v4_OCT13TeV500M'
config.General.workArea = 'project_PYTHIA8_gap08v4_OCT13TeV500M'
config.General.transferOutputs = True
config.General.transferLogs = False
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'ConfFile_cfg.py'
config.JobType.allowUndistributedCMSSW = True
config.Data.inputDataset = '/MinBias/tuos-PYTHIA8_OCT13TeV200M_v1-b760962cb2794e34f66b7129f2502618/USER'
config.Data.inputDBS = 'phys03'
config.Data.splitting = 'FileBased'
#config.Data.splitting = 'Automatic'
config.Data.unitsPerJob = 1
config.Data.outLFNDirBase = '/store/user/tuos/loops/cumulants/pythia/PYTHIA8_gap08v4_OCT13TeV500M'
config.Data.publication = False
config.Data.outputDatasetTag = 'PYTHIA8_gap08v4_OCT13TeV500M'
config.Site.storageSite = 'T2_US_Vanderbilt'
| [
"shengquan.tuo@cern.ch"
] | shengquan.tuo@cern.ch |
432d3160831f7f926b41f2dc0dd647e4bf5b83c3 | fa25ba0783d1ebb489b2a2c320295a1e1d3e34ba | /hellowTensorFlow/ch_test_with_tensorflow.py | f6729900e912f137a70fc39b9c6a4bd0f86d419a | [] | no_license | chenzhe3701/PWD | 103649ad4da866d168c3b7f44f2216eb6a30088d | e9e6f3e58606159895e5e3080bfb187ee5cb6c77 | refs/heads/master | 2021-11-15T20:24:26.445504 | 2021-09-23T21:40:35 | 2021-09-23T21:40:35 | 114,500,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | import tensorflow as tf
import numpy as np
mat = np.matrix([[1., 2], [3, 4]])
t = tf.nn.softmax(mat, 1)
sess = tf.InteractiveSession()
print(sess.run(t))
t.eval() | [
"chenzhezju@gmail.com"
] | chenzhezju@gmail.com |
70ff2b7a43aa1a4f7e7950ec9730b1ec544751da | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/perf/test_long_cycles_nbrows_cycle_length_21000_440.py | 6c61e0872dee66a3d086ba51c5d757e29cca171a | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 89 | py | import tests.perf.test_cycles_full_long_long as gen
gen.test_nbrows_cycle(21000 , 440)
| [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
7cc1316df32fd28ca111f58b906a3e70cd026c40 | 708e6a5d97c5f8d1519852bc6866612b587a234c | /LevelModel/mobile_model2.py | c36d70055643b6e7aab46ee7b5bbe1ff0bea32e6 | [] | no_license | younglalala/handwrite_TOF | e3638ec1e1072e620d4916fd41a4815c25e5026c | 8afde14d8073038c0c62dfe8576cbdc3c5e8c06a | refs/heads/master | 2020-04-10T04:40:15.436265 | 2019-01-17T05:37:04 | 2019-01-17T05:37:04 | 160,805,354 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,105 | py | import tensorflow as tf
import numpy as np
from scipy import misc
import matplotlib.pyplot as plt
from LevelModel.train_sample import *
from LevelModel.test_sample import *
import tempfile
import subprocess
tf.contrib.lite.tempfile = tempfile
tf.contrib.lite.subprocess = subprocess
from box_model.one_hott import one_hot,one_hot2,one_hot3
class Modle:
def __init__(self):
self.x=tf.placeholder(dtype=tf.float32,shape=[None,64,64,1],name='input')
self.y_=tf.placeholder(dtype=tf.float32,shape=[None,5])
# self.dp=tf.placeholder(dtype=tf.float32,name='dp')
#主干分支权重
self.conv1_w = tf.Variable(tf.random_normal([3, 3, 1, 32],dtype=tf.float32,stddev=tf.sqrt(1 / 32)))
self.conv1_b = tf.Variable(tf.zeros([32]))
self.conv1d_w = tf.Variable(tf.random_normal([3, 3, 32, 1], dtype=tf.float32, stddev=tf.sqrt(1 / 32)))
self.conv1d_b = tf.Variable(tf.zeros([32]))
self.conv2_w = tf.Variable(tf.random_normal([1, 1, 32, 16],dtype=tf.float32,stddev=tf.sqrt(1 / 16)))
self.conv2_b = tf.Variable(tf.zeros([16]))
self.conv2d_w = tf.Variable(tf.random_normal([3, 3, 16, 1], dtype=tf.float32, stddev=tf.sqrt(1 / 16)))
self.conv2d_b = tf.Variable(tf.zeros([16]))
self.conv3_w = tf.Variable(tf.random_normal([1, 1, 16, 32], dtype=tf.float32, stddev=tf.sqrt(1 / 32)))
self.conv3_b = tf.Variable(tf.zeros([32]))
self.conv3d_w = tf.Variable(tf.random_normal([3, 3, 32, 1], dtype=tf.float32, stddev=tf.sqrt(1 / 32)))
self.conv3d_b = tf.Variable(tf.zeros([32]))
self.conv4_w = tf.Variable(tf.random_normal([1, 1, 32, 64], dtype=tf.float32, stddev=tf.sqrt(1 / 64)))
self.conv4_b = tf.Variable(tf.zeros([64]))
self.conv4d_w = tf.Variable(tf.random_normal([3, 3, 64, 1], dtype=tf.float32, stddev=tf.sqrt(1 / 64)))
self.conv4d_b = tf.Variable(tf.zeros([64]))
self.conv5_w = tf.Variable(tf.random_normal([1, 1, 64, 128], dtype=tf.float32, stddev=tf.sqrt(1 / 128)))
self.conv5_b = tf.Variable(tf.zeros([128]))
self.conv5d_w = tf.Variable(tf.random_normal([3, 3, 128, 1], dtype=tf.float32, stddev=tf.sqrt(1 / 128)))
self.conv5d_b = tf.Variable(tf.zeros([128]))
self.fc_w=tf.Variable(tf.random_normal([2*2*128,128],dtype=tf.float32,stddev=tf.sqrt(1 / 128)))
self.fc_b=tf.Variable(tf.zeros([128]))
self.out_w=tf.Variable(tf.random_normal([128,5],dtype=tf.float32,stddev=tf.sqrt(1 / 5)))
self.out_b=tf.Variable(tf.zeros([5]))
def forward(self):
self.conv1=tf.nn.relu(tf.layers.batch_normalization(
tf.nn.conv2d(self.x,self.conv1_w,strides=[1,1,1,1],padding='SAME')+self.conv1_b))
self.conv1d=tf.nn.relu(tf.layers.batch_normalization(
tf.nn.depthwise_conv2d(self.conv1,self.conv1d_w,strides=[1,2,2,1],padding="SAME")+self.conv1d_b)) #32,32
self.conv2 = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.conv2d(self.conv1d, self.conv2_w, strides=[1, 1, 1, 1], padding='SAME') + self.conv2_b))
self.conv2d = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.depthwise_conv2d(self.conv2, self.conv2d_w, strides=[1, 2, 2, 1], padding="SAME") + self.conv2d_b)) #16,16
self.conv3 = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.conv2d(self.conv2d, self.conv3_w, strides=[1, 1, 1, 1], padding='SAME') + self.conv3_b))
self.conv3d = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.depthwise_conv2d(self.conv3, self.conv3d_w, strides=[1, 2, 2, 1], padding="SAME") + self.conv3d_b)) #8,8
self.conv4 = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.conv2d(self.conv3d, self.conv4_w, strides=[1, 1, 1, 1], padding='SAME') + self.conv4_b))
self.conv4d = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.depthwise_conv2d(self.conv4, self.conv4d_w, strides=[1, 2, 2, 1], padding="SAME") + self.conv4d_b)) #4,4
self.conv5 = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.conv2d(self.conv4d, self.conv5_w, strides=[1, 1, 1, 1], padding='SAME') + self.conv5_b))
# self.conv5=tf.nn.dropout(self.conv5,keep_prob=self.dp)
self.conv5d = tf.nn.relu(tf.layers.batch_normalization(
tf.nn.depthwise_conv2d(self.conv5, self.conv5d_w, strides=[1, 2, 2, 1], padding="SAME") + self.conv5d_b)) #2,2,128
# self.avg_pool = tf.nn.avg_pool(self.conv5d,ksize=[1,2,2,1],strides=[1,2,2,1],padding='VALID')
# print(self.avg_pool)
self.flat = tf.reshape(self.conv5d,[-1,2*2*128])
self.fc = tf.nn.relu(tf.matmul(self.flat,self.fc_w)+self.fc_b)
# self.fc = tf.nn.dropout(self.fc,keep_prob=self.dp)
self.out = tf.matmul(self.fc,self.out_w)+self.out_b
def backward(self):
self.loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=self.y_,logits=self.out))
self.opt=tf.train.AdamOptimizer(1e-5).minimize(self.loss)
self.argamx_label=tf.argmax(self.y_,axis=1)
self.argamx_out= tf.reshape(tf.argmax(self.out,axis=1),[-1],name='output')
self.acc=tf.reduce_mean(tf.cast(tf.equal(self.argamx_label,self.argamx_out),'float'))
if __name__=='__main__':
net=Modle()
net.forward()
net.backward()
train_data = train_shuffle_batch(train_filename, [64, 64, 1], 128)
test_data = test_shuffle_batch(test_filename, [64, 64, 1], 1000)
init = tf.global_variables_initializer()
saver = tf.train.Saver()
x = []
y = []
chioce_dict=dict(zip([0,1,2,3,4],list('ABCDX')))
with tf.Session() as sess:
sess.run(init)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
saver.restore(sess,'./save/m_4.dpk')
for i in range(100000):
# train_img,train_label,tarin_file=sess.run(train_data)
# train_img1=train_img/255-0.5
# train_label=one_hot(train_label.tolist(),5)
#
# _,train_loss,train_acc,train_out,train_l=sess.run(
# [net.opt,net.loss,net.acc,net.argamx_out,net.argamx_label],
# feed_dict={net.x:train_img1,net.y_:train_label,net.dp:0.8})
#
#
#
# tr_acc = []
# for train_index in range(len(train_out)):
# if train_out[train_index] == train_l[train_index]:
# tr_acc.append(1)
# else:
# tr_acc.append(0)
# # misc.imsave('/Users/wywy/Desktop/train_e1'+'/'+str(chioce_dict.get(train_out[train_index]))+'_'+bytes.decode(tarin_file[train_index]),train_img.reshape([-1,64,64]) [train_index])
#
# train_acc = np.mean(np.array(tr_acc))
#
# print('train iter :{}, train loss:{}, train acc:{}'.format(i,train_loss,train_acc))
# # #
if i%100==0:
test_img, test_label1, test_name = sess.run(test_data)
test_label = one_hot(test_label1.tolist(),5)
test_img1 = test_img / 255 - 0.5
test_loss ,test_acc,test_out,test_l= sess.run(
[net.loss,net.acc,net.argamx_out,net.argamx_label],
feed_dict={net.x: test_img1, net.y_: test_label})
tes_acc = []
for test_index in range(len(test_out)):
if test_out[test_index] == test_l[test_index]:
tes_acc.append(1)
else:
tes_acc.append(0)
# misc.imsave('/Users/wywy/Desktop/test_e'+'/'+str(chioce_dict.get(test_out[test_index])) +'_'+bytes.decode(test_name[test_index]),test_img.reshape([-1,64,64])[test_index])
test_acc = np.mean(np.array(tes_acc))
#
graph_def = tf.get_default_graph().as_graph_def()
output_graph_def = tf.graph_util.convert_variables_to_constants(sess, graph_def,
['output'])
with tf.gfile.GFile("./m_55.pb", 'wb') as f:
f.write(output_graph_def.SerializeToString())
#保存成tflite
# frozen_graphdef = tf.graph_util.convert_variables_to_constants(sess, sess.graph_def,
# ['output']) # 这里 ['output']是输出tensor的名字
# tflite_model = tf.contrib.lite.toco_convert(frozen_graphdef, [net.x],
# [net.argamx_out]) # 这里[input], [out]这里分别是输入tensor或者输出tensor的集合,是变量实体不是名字
# open("level_mobile5.tflite", "wb").write(tflite_model)
saver.save(sess,'./save/m_4.dpk')
print('------------test iter :{}, test loss:{}, test acc:{}----------'.format(i,test_loss,test_acc))
# plt.show()
| [
"1428867223@qq.com"
] | 1428867223@qq.com |
cd6cdfe1a60ef95bdd7224edfda4e62ba8d8413c | 19585a907ab1e1dafb00e53cce4f1803e805f4a6 | /src/ai/connect4/c4_state.py | ca2ca00d9b3ca436cd78e2f6d97d232af6822438 | [] | no_license | Angeall/pyConnect4NAO | 00ad27370746f36480b42cb49690ce6d3dadfece | 700d84b0cde2cb8e3e7d5ff2c5ca7858679b4e8d | refs/heads/master | 2020-12-01T11:38:46.776337 | 2018-09-02T12:41:57 | 2018-09-02T12:41:57 | 43,453,113 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,742 | py | import numpy as np
import disc
from utils.ai.game_state import GameState
__author__ = 'Anthony Rouneau'
class C4State(GameState):
"""
Represents a Conect 4 state, with the positions of the played discs and the next color to play.
"""
def __init__(self, _next_color=disc.RED, copied_state=None):
"""
:param _next_color: the next color that will play
:type _next_color: int
:param copied_state: the state from which this new state is created (used to simulate actions)
:type copied_state: C4State
"""
super(C4State, self).__init__()
self.next_color = _next_color
if copied_state is None:
self.board = np.array(np.zeros((6, 7)), np.int8)
self.board[:] = disc.EMPTY
self.actions = range(7)
self.terminal = False, False, False
self.hash = self.compute_hash()
self.empty = True
else: # If this C4State is created from another one
self.board = copied_state.board.copy()
self.empty = copied_state.empty
self.terminal = copied_state.terminal
self.hash = copied_state.hash
self.actions = copied_state.actions
def computePossibleActions(self):
"""
Refresh with a list of hole indices for which the last slot is still empty.
"""
return np.dstack(np.where(self.board[0] == disc.EMPTY))[0][:, 0].ravel().tolist()
# @Override
def possibleActions(self):
"""
:return: the indices of the _holes that can be used
"""
return self.actions
# @Override
def performAction(self, column_no):
"""
:param column_no: the number of the column where the disc will be placed if possible
:type column_no: int
"""
self.empty = False
if column_no not in self.actions:
raise AttributeError("This column is full")
line_no = self.getTopSlotNumber(column_no)
self.board[line_no][column_no] = self.next_color
color_played = self.next_color
# Now, it's the other player's turn
self.next_color = disc.get_opposite_color(self.next_color)
# We refresh the vars
self.actions = self.computePossibleActions()
self.hash = self.compute_hash()
self.terminal = self.computeTerminalStateLocally(line_no, column_no, color_played)
# @Override
def terminalTest(self):
"""
:return: a tuple containing three booleans : (red_won, green_won, draw).
red_won, green_won is True if there is 4 discs of that color in a row
and draw is True if there is a draw
:rtype: tuple
"""
return self.terminal
def computeTerminalStateLocally(self, line, column, color):
"""
:param line: The line of the last played disc
:param column: The column of the last played disc
:param color: The color of the last disc played
:return: a tuple containing three booleans : (red_won, green_won, draw).
red_won, green_won is True if there is 4 discs of that color in a row
and draw is True if there is a draw
:rtype: tuple
Assume that the game was not terminal before the disc at (line, column) was placed.
Check if the game is terminated due to the (line, column) disc.
"""
if len(self.actions) == 0:
return False, False, True
red_won = False
green_won = False
rows = self.enumerateLocalRows(line, column)
for row in rows:
i = 0
j = 4
while j <= len(row) and not red_won and not green_won:
slack = row[i:j]
if (slack == color).all():
if color == disc.RED:
red_won = True
elif color == disc.GREEN:
green_won = True
break
i += 1
j += 1
if red_won or green_won:
break
if self.next_color == disc.RED:
return red_won, green_won, False
else:
return green_won, red_won, False
def computeTerminalStateGlobally(self):
"""
:return: a tuple containing three booleans : (red_won, green_won, draw).
red_won, green_won is True if there is 4 discs of that color in a row
and draw is True if there is a draw
:rtype: tuple
Check if the game is terminated for every combination possible on the board
"""
win_possible = False
draw = False
if len(self.actions) == 0:
return False, False, True
red_won = False
green_won = False
rows = self.enumerateGlobalRows()
for row in rows:
i = 0
j = 4
while j <= len(row) and not red_won and not green_won:
slack = row[i:j]
if np.logical_or(slack == disc.EMPTY, slack == disc.RED).all():
win_possible = True # If 4 _holes can be filled with red discs, a win can happen
if (slack == disc.RED).all():
red_won = True
break
if np.logical_or(slack == disc.EMPTY, slack == disc.GREEN).all():
win_possible = True # If 4 _holes can be filled with green discs, a win can happen
if (slack == disc.GREEN).all():
green_won = True
break
i += 1
j += 1
if red_won or green_won:
break
if not red_won and not green_won and not win_possible:
draw = True
assert not (red_won and green_won) and not ((draw and red_won) or (draw and green_won))
if self.next_color == disc.RED:
return red_won, green_won, draw
else:
return green_won, red_won, draw
def enumerateLocalRows(self, line, column):
"""
:param line: the line of the last disc placed
:param column: the column of the last disc placed
:return: a list of rows in which the disc in (line, column) could have changed
"""
rows = []
# Diagonals
inverted_board = np.fliplr(self.board)
rows.append(self.board.diagonal(column - line))
rows.append(inverted_board.diagonal(6 - column - line))
# Line
rows.append(self.board[line])
# Column
rows.append(self.board[:, column][line:])
return rows
def enumerateGlobalRows(self):
"""
:return: a list of rows in which there could be 4 discs aligned
"""
rows = []
# Diagonals
inverted_board = np.fliplr(self.board)
for i in range(-2, 4): # Getting the diagonals where there might be 4 in a row
rows.append(self.board.diagonal(i))
rows.append(inverted_board.diagonal(i))
# Lines
for i in range(6):
rows.append(self.board[i])
# Columns
for i in range(7):
rows.append(self.board[:, i])
return rows
# @Override
def copy(self):
"""
:return: A copy of this GameState
"""
return C4State(self.next_color, self)
# @Override
def __hash__(self):
return self.hash
def compute_hash(self):
"""
:return: the hash code of this GameState
"""
res = (tuple(self.board.ravel().tolist()), self.next_color).__hash__()
return res
def getTopSlotNumber(self, column_no):
"""
:param column_no: the column in whinch we want the first available slot
:return: the number of the line in which is located the first available slot in the column column_no
"""
column = self.board[:, column_no]
# has_disc is a vector of bool in which a slot is True if it contains a disc
has_disc = column > disc.EMPTY
# We take the lowest slot available by looking at the slot just before the first disc
return (np.argmax(has_disc) - 1) % 6
def checkTopColumn(self, line_no, column_no):
"""
:param line_no: the number of the line to check
:param column_no: the number of the column in which we want to check
:return: true if the slot at (line_no, column_no) is the first one available in the column.
"""
# We check if line_no is the lowest slot available by looking at the slot just before the first disc
return line_no == self.getTopSlotNumber(column_no)
| [
"angeal1105@gmail.com"
] | angeal1105@gmail.com |
b56d3d0b6224b8807bca8490a1261a9d66b66571 | c133aefa3b7a0c4988cbe80703aaf310f14e126b | /busdata1.py | 187a8107b91959990e47f0c047f8fddef78aadc0 | [] | no_license | tej1996nitrr/LiveBus-Map | 624192dcfba52400be952e9eefe0d0bd3ce18674 | b73f01900edfa95c18c8667b5828075c4c5f3285 | refs/heads/master | 2022-06-29T16:46:37.106810 | 2020-05-11T15:37:25 | 2020-05-11T15:37:25 | 263,058,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py |
from pykafka import KafkaClient
import json
import uuid
from datetime import datetime
import time
client = KafkaClient(hosts="localhost:9092")
topic = client.topics['geoData']
producer = topic.get_sync_producer()
with open('data/bus1.json') as f:
data = json.load(f)
coordinates = data['features'][0]['geometry']['coordinates']
def generate_uuid():
return uuid.uuid4()
data={}
data['busline']='0001'
data['key'] = data['busline']+str(generate_uuid())
data['timestamp'] = str(datetime.utcnow())
data['latitude'] = coordinates[0][1]
data['longitude'] = coordinates[0][0]
def generate_checkpoint(coordinates):
i = 0
while i <len(coordinates):
data['key'] = data['busline'] +'_'+str(generate_uuid())
data['timestamp'] = str(datetime.utcnow())
data['latitude'] = coordinates[i][1]
data['longitude'] = coordinates[i][0]
message= json.dumps(data)
print(message)
producer.produce(message.encode('ascii'))
time.sleep(1)
# if bus reaches a last coordinate, start from beginning
if i==len(coordinates)-1:
i=0
i+=1
generate_checkpoint(coordinates)
print(data)
| [
"tejaskanikdaley1996@gmail.com"
] | tejaskanikdaley1996@gmail.com |
81003b8f530c2ccbbf5340af444d48c5e9365bfe | 8872b6448a660137a1b01e4e662554bc28aef81c | /setup.py | 35c706354527c88a751d7cd8a81f031c3fd6d40e | [
"MIT"
] | permissive | honzajavorek/tit | 3c40276ffbfb512b4feaf6c744581e534fcb901b | 57445ffc45b8a76e1227ec86438fd682d57e9da3 | refs/heads/master | 2021-01-10T10:04:05.865940 | 2016-01-28T15:18:27 | 2016-01-28T15:18:27 | 50,044,425 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py |
from setuptools import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(
name='tit',
description='Better titulky.com search',
long_description=readme(),
version='0.0.3',
url='http://github.com/honzajavorek/tit',
author='Honza Javorek',
author_email='mail@honzajavorek.cz',
license='MIT',
py_modules=['tit'],
install_requires=[
'click',
'cssselect',
'requests',
'lxml',
'sh',
],
entry_points='''
[console_scripts]
tit=tit:cli
''',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Multimedia :: Video',
],
keywords='subtitles titulky.com csfd.cz',
)
| [
"mail@honzajavorek.cz"
] | mail@honzajavorek.cz |
451cf5b762ff80c500db8f9a27811fedd6610a49 | 2a3e908bf532750a1d2af7a4eb69a45e5ee1ce54 | /main.py | 09a6d4112745809c00359f53dd926ed43c705cd2 | [] | no_license | S-Newcomb/Chess | 03ae659b124e44bbab69f0d78f9c1720c3a235d7 | 547112000f78682cbf1f91470d0515bcb5226746 | refs/heads/main | 2023-02-18T00:59:18.885577 | 2021-01-20T04:52:46 | 2021-01-20T04:52:46 | 326,856,876 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,987 | py | #Hopefully a fully working chess game built from scratch in python
import random
#Returns whether int1 and int2 are both positive or negative
def samePolarity(int1, int2):
if int1 > 0:
if int2 > 0:
return True
if int1 < 0:
if int2 < 0:
return True
return False
#Returns whether each matching element in pos1 and pos2 are both positive or negative
def samePolarityPos(pos1, pos2):
return samePolarity(pos1[0], pos2[0]) and samePolarity(pos1[1], pos2[1])
class Board:
#A class to represent the chess board
squares = {}
#Probably a better way for board to have access to players
players = []
#Creates all squares and adds them to the board
def populateSquares(self):
# print("here")
for x in range(8):
for y in range(8):
number = x * 8 + y
pos = [x, y]
# if (y == 1):
# occ = Pawn(pos,"White")
# self.players[0].pieces.append(occ)
# elif (y == 6):
# occ = Pawn(pos,"Black")
# self.players[1].pieces.append(occ)
if (y == 0):
occ = self.populateKingRow(0,pos,"White")
self.players[0].pieces.append(occ)
elif (y == 7):
occ = self.populateKingRow(7,pos,"Black")
self.players[1].pieces.append(occ)
else:
occ = None
#Even rows have black - white pattern
if (x % 2 == 0):
if (y % 2 == 0):
color = "Black"
else: color = "White"
#Odd rows have white - black pattern
else:
if (y % 2 == 0):
color = "White"
else: color = "Black"
self.squares[number] = (Square(pos, occ, color))
#Populates given row with classic pieces (non pawns)
def populateKingRow(self, row, pos, color):
if (pos[1] == row):
if (pos[0] == 0 or pos[0] == 7):
return Rook(pos, color)
elif (pos[0] == 1 or pos[0] == 6):
return Knight(pos, color)
elif (pos[0] == 2 or pos[0] == 5):
return Bishop(pos, color)
elif (pos[0] == 3):
return Queen(pos, color)
else:
return King(pos, color)
def getSquareAtPos(self, pos):
num = pos[0] * 8 + pos[1]
#If pos is out of bounds then return None
if (num > 63 or num < 0):
return None
return self.squares[num]
def getPieceAtPos(self, pos):
square = self.getSquareAtPos(pos)
return square.occupied
def getPlayer(self, color):
if color == "White":
return self.players[0]
return self.players[1]
class Square:
number = int
position = [int, int]
occupied = None
color = str
def __init__(self, pos, occ, color):
self.position = pos
self.occupied = occ
self.color = color
class Piece:
name = str
position = [int, int]
color = str
#Returns if moving to square will put you in check in dir incX incY
def selfCheckLine(self, board, pos, incX, incY):
newPos = pos.copy()
newSquare = board.getSquareAtPos(newPos)
while newSquare != None:
piece = newSquare.occupied
if piece != None: #if it has a piece on it
if newPos == self.position: #if this is this piece's pos skip
pass
elif piece.color == self.color: #if another of players pieces is blocking King
return False
else:
if piece.name == "Queen": #if it is an enemy Queen
return True
if incX == 0 or incY == 0: #if this is a column or row
if piece.name == "Rook":
return True
else: #if this is a diagonal
if piece.name == "Bishop":
return True
newPos[0] += incX
newPos[1] += incY
newSquare = board.getSquareAtPos(newPos)
return False
#Returns whether moving King to this square will put yourself in check
def kingSelfCheck(self, board, square):
kingPos = self.position
newPos = square.position.copy()
#check columns
if self.selfCheckLine(board, newPos, 0, 1):
return True
if self.selfCheckLine(board, newPos, 0, -1):
return True
#check rows
if self.selfCheckLine(board, newPos, 1, 0):
return True
if self.selfCheckLine(board, newPos, -1, 0):
return True
#check diagonals
if self.selfCheckLine(board, newPos, 1, 1):
return True
if self.selfCheckLine(board, newPos, -1, -1):
return True
if self.selfCheckLine(board, newPos, -1, 1):
return True
if self.selfCheckLine(board, newPos, 1, -1):
return True
#check knights
xPos = newPos[0]
yPos = newPos[1]
possibleKnights = [[xPos + 2, yPos + 1],
[xPos + 2, yPos - 1],
[xPos - 2, yPos + 1],
[xPos - 2, yPos - 1],
[xPos + 1, yPos + 2],
[xPos - 1, yPos + 2],
[xPos + 1, yPos - 2],
[xPos - 1, yPos - 2]]
for k in possibleKnights:
if board.getSquareAtPos(k) != None:
if board.getSquareAtPos(k).occupied == "Knight":
return True
#check pawns
if self.color == "White": #Pawns can only attack in 1 direction so only check
pawnY = 1 # the spots they can hit the square from
else:
pawnY = -1
if board.getSquareAtPos([xPos+1, pawnY]).occupied == "Pawn":
return True
if board.getSquareAtPos([xPos-1, pawnY]).occupied == "Pawn":
return True
return False
#Returns whether moving to this square will put yourself in check
def selfCheck(self, board, square):
player = board.getPlayer(self.color)
opponent = player.getOpponent(board)
#If this piece is players King
if self.name == "King":
return self.kingSelfCheck(board, square)
else:
#Get the players King
king = None
for piece in player.pieces:
if piece.name == "King":
king = piece
break
posDif = [self.position[0] - king.position[0],
self.position[1] - king.position[1]]
#in the same column as King
if self.position[0] == king.position[0]:
if square.position[0] == self.position[0]: #if square is in same column, king is still safe
return False
if posDif[1] > 0: #if piece is above King
increment = 1
else: #piece is below King
increment = -1
newPos = self.position.copy()
return self.selfCheckLine(board, newPos, 0, increment)
#in the same row as King
if self.position[1] == king.position[1]:
if square.position[1] == self.position[1]: #if move is in same row, king is still safe
return False
if posDif[0] > 0: #if piece is right of King
increment = 1
else: #piece is left of King
increment = -1
newPos = self.position.copy()
return self.selfCheckLine(board, newPos, increment, 0)
#diagonal to King
if abs(posDif[0]) == abs(posDif[1]):
squareDif = [square.position[0] - king.position[0],
square.position[1] - king.position[1]]
#If square in the same diagonal
if abs(squareDif[0]) == abs(squareDif[1]) and samePolarityPos(posDif, squareDif):
return False
if posDif[0] > 0: #if piece is right of King
incrementX = 1
else: #piece is left of King
incrementX = -1
if posDif[1] > 0: #if piece is above King
incrementY = 1
else: #piece is below King
incrementY = -1
newPos = self.position.copy()
return self.selfCheckLine(board, newPos, incrementX, incrementY)
return False
#Checks that the square reached by pos is a valid square for this piece"""
def isSquareValid(self, board, square):
if square == None:
return False
if self.selfCheck(board, square):
print("Cannot put yourself in check")
return False
#Is that square occupied by another of your pieces?
piece = square.occupied
if (piece != None and piece.color == self.color):
return False
#Pawns cannot attack up
if (piece != None and self.name == "Pawn" and piece.position[0] == self.position[0]):
return False
#Pawns cannot move diagonally unless capturing
if (piece == None and self.name == "Pawn" and
(square.position[0] == self.position[0] + 1 or square.position[0] == self.position[0] - 1)):
return False
return True
""" Checks whether every square in a line determined by change in x and y is valid """
def isLineValid(self, board, changeInX, changeInY, range):
if (range == 0): range = 7
validSquares = []
newPos = self.position.copy()
newPos[0] += changeInX
newPos[1] += changeInY
count = 0
while self.isSquareValid(board, board.getSquareAtPos(newPos)) and count < range:
square = board.getSquareAtPos(newPos)
#if current square has an enemy on it break
if (square.occupied != None):
validSquares.append(square)
return validSquares
else:
validSquares.append(square)
newPos[0] += changeInX
newPos[1] += changeInY
count += 1
return validSquares
#Moves the piece to designated square if possible
# returns True if move was valid, False if not
def move(self, board, square, player):
if (self.color != player.color):
print("Not your piece")
return False
validMoves = self.getValidMoves(board)
if square in validMoves:
#Remove Piece from current square
board.getSquareAtPos(self.position).occupied = None
#If square has an enemy piece on it, capture it
if (square.occupied != None):
player.capturedPieces.append(square.occupied)
#Move Piece to new square
self.position = square.position
#Mark that square is occupied by this piece
square.occupied = self
return True
else:
print("Not a valid move")
return False
class Pawn(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "Pawn"
def getValidMoves(self, board):
validMoves = []
#Up or forward depends on color
if (self.color == "White"):
up = 1
else:
up = -1
#If the pawn has not moved
if ((self.color == "White" and self.position[1] == 1) or
(self.color == "Black" and self.position[1] == 6)):
validMoves.extend(super().isLineValid(board, 0, up, 2)) #Can move up 2
else:
validMoves.extend(super().isLineValid(board, 0, up, 1)) #Up
validMoves.extend(super().isLineValid(board, 1, up, 1)) #Right Diagonal
validMoves.extend(super().isLineValid(board, -1, up, 1)) #Left Diagonal
return validMoves
class Rook(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "Rook"
def getValidMoves(self, board):
validMoves = []
validMoves.extend(super().isLineValid(board, 1, 0, 0)) #Right
validMoves.extend(super().isLineValid(board, -1, 0, 0)) #Left
validMoves.extend(super().isLineValid(board, 0, 1, 0)) #Up
validMoves.extend(super().isLineValid(board, 0, -1, 0)) #Down
return validMoves
class Knight(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "Knight"
def getValidMoves(self, board):
pos = self.position.copy()
xPos = pos[0]
yPos = pos[1]
validMoves = []
possibleMoves = [[xPos + 2, yPos + 1],
[xPos + 2, yPos - 1],
[xPos - 2, yPos + 1],
[xPos - 2, yPos - 1],
[xPos + 1, yPos + 2],
[xPos - 1, yPos + 2],
[xPos + 1, yPos - 2],
[xPos - 1, yPos - 2]]
for move in possibleMoves:
square = board.getSquareAtPos(move)
if (self.isSquareValid(board, square)):
validMoves.append(square)
return validMoves
class Bishop(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "Bishop"
def getValidMoves(self, board):
validMoves = []
validMoves.extend(super().isLineValid(board, 1, 1, 0)) #Right Forward Diag
validMoves.extend(super().isLineValid(board, -1, 1, 0)) #Left Forward Diag
validMoves.extend(super().isLineValid(board, 1, -1, 0)) #Right Back Diag
validMoves.extend(super().isLineValid(board, -1, -1, 0)) #Left Back Diag
return validMoves
class Queen(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "Queen"
def getValidMoves(self, board):
validMoves = []
validMoves.extend(super().isLineValid(board, 1, 0, 0)) #Right
validMoves.extend(super().isLineValid(board, -1, 0, 0)) #Left
validMoves.extend(super().isLineValid(board, 0, 1, 0)) #Up
validMoves.extend(super().isLineValid(board, 0, -1, 0)) #Down
validMoves.extend(super().isLineValid(board, 1, 1, 0)) #Right Forward Diag
validMoves.extend(super().isLineValid(board, -1, 1, 0)) #Left Forward Diag
validMoves.extend(super().isLineValid(board, 1, -1, 0)) #Right Back Diag
validMoves.extend(super().isLineValid(board, -1, -1, 0)) #Left Back Diag
return validMoves
class King(Piece):
def __init__(self, pos, col):
self.position = pos
self.color = col
self.name = "King"
def getValidMoves(self, board):
validMoves = []
validMoves.extend(super().isLineValid(board, 1, 0, 1)) #Right
validMoves.extend(super().isLineValid(board, -1, 0, 1)) #Left
validMoves.extend(super().isLineValid(board, 0, 1, 1)) #Up
validMoves.extend(super().isLineValid(board, 0, -1, 1)) #Down
validMoves.extend(super().isLineValid(board, 1, 1, 1)) #Right Forward Diag
validMoves.extend(super().isLineValid(board, -1, 1, 1)) #Left Forward Diag
validMoves.extend(super().isLineValid(board, 1, -1, 1)) #Right Back Diag
validMoves.extend(super().isLineValid(board, -1, -1, 1)) #Left Back Diag
return validMoves
class Player:
color = str
pieces = []
capturedPieces = []
def __init__(self, color):
self.color = color
self.pieces = []
self.capturedPieces = []
def getOpponent(self, board):
opponent = (board.players[1] if board.players[0] == self else board.players[0])
return opponent
def drawBoard(board):
print(" ")
print(" ")
print("Black Captured: ", end = "")
for captured in board.players[1].capturedPieces:
print(captured.name[:4], end = " ")
print(" ")
print(" ")
for y in range(9):
#print guide numbers before each row
if (y < 8):
print(8-y, end = " ")
else:
print(" ", end = " ")
for x in range(8):
#If on the last row print guide letters
if (y==8):
print (" " +numToLetter(x) + " ", end = "")
else:
invrow = 7-y
square = board.getSquareAtPos([x, invrow])
if (square.occupied == None):
print("| " + square.color[0] + ' |', end = "")
else:
print("|" + square.occupied.name[0:4] + ' |', end = "")
if (x == 7):
print(" ")
print(" ")
print(" ")
print("White captured: ", end = "")
for captured in board.players[0].capturedPieces:
print(captured.name[:4], end = " ")
print(" ")
print(" ")
#Converts number of row to a letter
def numToLetter(num):
if (num == 0):
num = "A"
elif (num == 1):
num = "B"
elif (num == 2):
num = "C"
elif (num == 3):
num = "D"
elif (num == 4):
num = "E"
elif (num == 5):
num = "F"
elif (num == 6):
num = "G"
elif (num == 7):
num = "H"
else:
print("Number: " + num + " is not valid")
return
return num
#Converts letter of a row to a number
def letterToNum(letter):
if (letter == "A"):
letter = 0
elif (letter == "B"):
letter = 1
elif (letter == "C"):
letter = 2
elif (letter == "D"):
letter = 3
elif (letter == "E"):
letter = 4
elif (letter == "F"):
letter = 5
elif (letter == "G"):
letter = 6
elif (letter == "H"):
letter = 7
else:
print("Letter: " + letter + " is not valid")
return
return letter
#Converts traditional chess positions (i.e A8) to positions
def alphaNumMoveToPos(text):
letter = text[0].capitalize()
number = text[1]
#convert letters to x
letter = letterToNum(letter)
#convert numbers from 1-8 to 0-7
if (number.isnumeric()):
number = int(number) - 1
if (number > 7 or number < 0):
print("Number: " + number + " is not valid")
return
else:
print("Number: " + number + " is not valid")
return
return [letter, number]
#Parses the text into a move and moves the pieces
#Returns whether the move was valid
def parseMove(text, board, player):
try:
trimText = text.strip()
piecePos = alphaNumMoveToPos(trimText[0:2])
targetSquare = alphaNumMoveToPos(trimText[3:])
except:
print("Move not Valid")
return False
if (piecePos == None or targetSquare == None):
return False
piece = board.getPieceAtPos(piecePos)
if piece == None:
return False
square = board.getSquareAtPos(targetSquare)
validMove = piece.move(board, square, player)
return validMove
def startGame():
#Create the board
board = Board()
#Create Players and assign them random color
colors = ["White", "Black"]
playerColor = random.randint(0, 1)
player1 = Player(colors[playerColor])
player2 = Player(colors[1-playerColor])
#Probably a cleaner way to pass players into move function, Fine for now
if (player1.color == "White"):
whitePlayer = player1
blackPlayer = player2
else:
whitePlayer = player2
blackPlayer = player1
board.players = [whitePlayer, blackPlayer]
board.populateSquares()
drawBoard(board)
gameOver = False
whiteTurn = True
while not gameOver:
if (whiteTurn):
move = input("White's move:")
if (move == "end"):
gameOver = True
return
validMove = parseMove(move, board, whitePlayer)
else:
move = input("Black's move:")
if (move == "end"):
gameOver = True
return
validMove = parseMove(move, board, blackPlayer)
if (validMove):
whiteTurn = not whiteTurn
drawBoard(board)
startGame() | [
"stephen@newcombnet.com"
] | stephen@newcombnet.com |
6dda590fd31be17f57b17fb5c28c7e0bb50f245f | fa50869bb134a32fc2b7910900e4a76ab411530a | /onlineshoppingwebsite/shop/models.py | bb5d4c30c5c144e497dc21bd56a6a4bc8d9833d7 | [] | no_license | Rahul2706/Shopping-Cart | 9822bc91e86d5930e0769a75f83caf3517b1a2c3 | 33707378443a4aa2c8b1e0a147ad0b1f938faf4d | refs/heads/master | 2022-12-07T09:20:26.254857 | 2020-08-11T13:20:10 | 2020-08-11T13:20:10 | 286,726,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,653 | py | from django.db import models
class Product(models.Model):
product_id = models.AutoField
product_name = models.CharField(max_length=50)
category = models.CharField(max_length=50,default="")
subcategory = models.CharField(max_length=50,default="")
price = models.IntegerField(default=0)
desc = models.CharField(max_length=1000)
pub_date = models.DateField()
img = models.ImageField(upload_to="shop/images",default="")
def __str__(self):
return self.product_name
class Contact(models.Model):
msg_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=50)
email = models.CharField(max_length=70, default="")
phone = models.CharField(max_length=70, default="")
desc = models.CharField(max_length=500, default="")
def __str__(self):
return self.name
class Orders(models.Model):
order_id = models.AutoField(primary_key=True)
items_json = models.CharField(max_length=5000)
amount= models.IntegerField(default=0)
name = models.CharField(max_length=90)
email = models.CharField(max_length=111)
address = models.CharField(max_length=111)
city = models.CharField(max_length=111)
state = models.CharField(max_length=111)
zip_code = models.CharField(max_length=111)
phone = models.CharField(max_length=111, default="")
class OrderUpdate(models.Model):
update_id = models.AutoField(primary_key=True)
order_id = models.IntegerField(default="")
update_desc = models.CharField(max_length=5000)
timestamp = models.DateField(auto_now_add=True)
def __str__(self):
return self.update_desc[0:7] + "..."
| [
"51708827+Rahul2706@users.noreply.github.com"
] | 51708827+Rahul2706@users.noreply.github.com |
8a8ae1f5a51641cfd46a1edfbdc8e068dba546e3 | dddaa0b722e7ff191224ae3b212d1774babf8d79 | /App.py | c0e11e6bfa54c835f7f0d9ee429a2ca1933c1db3 | [
"MIT"
] | permissive | furukawaTakumi/SnakeGameByPython | 7312f9ce0a907f128f4fd1e36d18affdd1705d4f | 4fe768f524ef5b657ed2716db41f1af7abc79771 | refs/heads/master | 2020-08-12T17:22:48.503680 | 2019-11-27T10:46:13 | 2019-11-27T10:46:13 | 214,808,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,128 | py | import pyxel
from screen.ScreenCreator import CreateStartScreen
from screen.ScreenCreator import CreateGameOverScreen
from GameStatus import GameStatus
from game_contents.GameController import GameController
resourse_path = "assets/course.pyxres"
class App:
def __init__(self):
pyxel.init(130, 90,border_width=4, fps=30)
pyxel.mouse(True)
pyxel.load(resourse_path)
self.now_status = GameStatus.START
self.startScreen = CreateStartScreen()
self.startScreen.PrepareDisplay()
self.gameoverScreen = CreateGameOverScreen()
self.gameController = GameController()
pyxel.run(self.update, self.draw)
pass
def update(self):
if self.now_status == GameStatus.START:
if self.startScreen.isBtnClicked("Start!"):
self.now_status = GameStatus.GAME
pyxel.mouse(False)
elif self.now_status == GameStatus.GAME:
self.gameController.UpdateData()
if self.gameController.CheckGameOver():
self.gameoverScreen.PrepareDisplay()
self.now_status = GameStatus.GAMEOVER
elif self.now_status == GameStatus.GAMEOVER:
pyxel.mouse(True)
if self.gameoverScreen.isBtnClicked("Restart?"):
self.gameController = GameController()
self.now_status = GameStatus.GAME
pyxel.mouse(False)
elif self.gameoverScreen.isBtnClicked("toTitle"):
self.gameController = GameController()
self.gameoverScreen.PrepareDisplay()
self.startScreen.PrepareDisplay()
self.now_status = GameStatus.START
pyxel.mouse(True)
pass
def draw(self):
pyxel.cls(0)
if self.now_status == GameStatus.START:
self.startScreen.ScreenUpdate()
elif self.now_status == GameStatus.GAME:
self.gameController.UpdateDisplay()
elif self.now_status == GameStatus.GAMEOVER:
self.gameoverScreen.ScreenUpdate()
if __name__ == "__main__":
App()
| [
"black3moon88@gmail.com"
] | black3moon88@gmail.com |
76cb125716e94cf19110dbb8f86729e290579884 | 85d0f57a25d94d0fe0842958da75187bee2957a1 | /boto-2.8.0/boto/s3/key.py | cf8a092f15c0b87c4b3e722cd0d1c3f5e5f8d2d5 | [] | no_license | bopopescu/Boto-4 | 06c7f070cd57b775061654dab400f2c4a48119e7 | 8152d03878bd324c4d2c6523ac3f257678465c56 | refs/heads/master | 2022-11-24T19:24:43.687681 | 2013-04-09T08:16:34 | 2013-04-09T08:16:34 | 282,525,330 | 0 | 0 | null | 2020-07-25T21:09:43 | 2020-07-25T21:09:43 | null | UTF-8 | Python | false | false | 74,083 | py | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011, Nexenta Systems Inc.
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import mimetypes
import os
import re
import rfc822
import StringIO
import base64
import binascii
import math
import urllib
import boto.utils
from boto.exception import BotoClientError
from boto.provider import Provider
from boto.s3.keyfile import KeyFile
from boto.s3.user import User
from boto import UserAgent
from boto.utils import compute_md5
try:
from hashlib import md5
except ImportError:
from md5 import md5
class Key(object):
"""
Represents a key (object) in an S3 bucket.
:ivar bucket: The parent :class:`boto.s3.bucket.Bucket`.
:ivar name: The name of this Key object.
:ivar metadata: A dictionary containing user metadata that you
wish to store with the object or that has been retrieved from
an existing object.
:ivar cache_control: The value of the `Cache-Control` HTTP header.
:ivar content_type: The value of the `Content-Type` HTTP header.
:ivar content_encoding: The value of the `Content-Encoding` HTTP header.
:ivar content_disposition: The value of the `Content-Disposition` HTTP
header.
:ivar content_language: The value of the `Content-Language` HTTP header.
:ivar etag: The `etag` associated with this object.
:ivar last_modified: The string timestamp representing the last
time this object was modified in S3.
:ivar owner: The ID of the owner of this object.
:ivar storage_class: The storage class of the object. Currently, one of:
STANDARD | REDUCED_REDUNDANCY | GLACIER
:ivar md5: The MD5 hash of the contents of the object.
:ivar size: The size, in bytes, of the object.
:ivar version_id: The version ID of this object, if it is a versioned
object.
:ivar encrypted: Whether the object is encrypted while at rest on
the server.
"""
DefaultContentType = 'application/octet-stream'
RestoreBody = """<?xml version="1.0" encoding="UTF-8"?>
<RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01">
<Days>%s</Days>
</RestoreRequest>"""
BufferSize = 8192
# The object metadata fields a user can set, other than custom metadata
# fields (i.e., those beginning with a provider-specific prefix like
# x-amz-meta).
base_user_settable_fields = set(["cache-control", "content-disposition",
"content-encoding", "content-language",
"content-md5", "content-type"])
_underscore_base_user_settable_fields = set()
for f in base_user_settable_fields:
_underscore_base_user_settable_fields.add(f.replace('-', '_'))
def __init__(self, bucket=None, name=None):
self.bucket = bucket
self.name = name
self.metadata = {}
self.cache_control = None
self.content_type = self.DefaultContentType
self.content_encoding = None
self.content_disposition = None
self.content_language = None
self.filename = None
self.etag = None
self.is_latest = False
self.last_modified = None
self.owner = None
self.storage_class = 'STANDARD'
self.md5 = None
self.base64md5 = None
self.path = None
self.resp = None
self.mode = None
self.size = None
self.version_id = None
self.source_version_id = None
self.delete_marker = False
self.encrypted = None
# If the object is being restored, this attribute will be set to True.
# If the object is restored, it will be set to False. Otherwise this
# value will be None. If the restore is completed (ongoing_restore =
# False), the expiry_date will be populated with the expiry date of the
# restored object.
self.ongoing_restore = None
self.expiry_date = None
def __repr__(self):
if self.bucket:
return '<Key: %s,%s>' % (self.bucket.name, self.name)
else:
return '<Key: None,%s>' % self.name
def __getattr__(self, name):
if name == 'key':
return self.name
else:
raise AttributeError
def __setattr__(self, name, value):
if name == 'key':
self.__dict__['name'] = value
else:
self.__dict__[name] = value
def __iter__(self):
return self
@property
def provider(self):
provider = None
if self.bucket and self.bucket.connection:
provider = self.bucket.connection.provider
return provider
def get_md5_from_hexdigest(self, md5_hexdigest):
"""
A utility function to create the 2-tuple (md5hexdigest, base64md5)
from just having a precalculated md5_hexdigest.
"""
digest = binascii.unhexlify(md5_hexdigest)
base64md5 = base64.encodestring(digest)
if base64md5[-1] == '\n':
base64md5 = base64md5[0:-1]
return (md5_hexdigest, base64md5)
def handle_encryption_headers(self, resp):
provider = self.bucket.connection.provider
if provider.server_side_encryption_header:
self.encrypted = resp.getheader(provider.server_side_encryption_header, None)
else:
self.encrypted = None
def handle_version_headers(self, resp, force=False):
provider = self.bucket.connection.provider
# If the Key object already has a version_id attribute value, it
# means that it represents an explicit version and the user is
# doing a get_contents_*(version_id=<foo>) to retrieve another
# version of the Key. In that case, we don't really want to
# overwrite the version_id in this Key object. Comprende?
if self.version_id is None or force:
self.version_id = resp.getheader(provider.version_id, None)
self.source_version_id = resp.getheader(provider.copy_source_version_id,
None)
if resp.getheader(provider.delete_marker, 'false') == 'true':
self.delete_marker = True
else:
self.delete_marker = False
def handle_restore_headers(self, response):
header = response.getheader('x-amz-restore')
if header is None:
return
parts = header.split(',', 1)
for part in parts:
key, val = [i.strip() for i in part.split('=')]
val = val.replace('"', '')
if key == 'ongoing-request':
self.ongoing_restore = True if val.lower() == 'true' else False
elif key == 'expiry-date':
self.expiry_date = val
def open_read(self, headers=None, query_args='',
override_num_retries=None, response_headers=None):
"""
Open this key for reading
:type headers: dict
:param headers: Headers to pass in the web request
:type query_args: string
:param query_args: Arguments to pass in the query string
(ie, 'torrent')
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying GET.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
if self.resp == None:
self.mode = 'r'
provider = self.bucket.connection.provider
self.resp = self.bucket.connection.make_request(
'GET', self.bucket.name, self.name, headers,
query_args=query_args,
override_num_retries=override_num_retries)
if self.resp.status < 199 or self.resp.status > 299:
body = self.resp.read()
raise provider.storage_response_error(self.resp.status,
self.resp.reason, body)
response_headers = self.resp.msg
self.metadata = boto.utils.get_aws_metadata(response_headers,
provider)
for name, value in response_headers.items():
# To get correct size for Range GETs, use Content-Range
# header if one was returned. If not, use Content-Length
# header.
if (name.lower() == 'content-length' and
'Content-Range' not in response_headers):
self.size = int(value)
elif name.lower() == 'content-range':
end_range = re.sub('.*/(.*)', '\\1', value)
self.size = int(end_range)
elif name.lower() == 'etag':
self.etag = value
elif name.lower() == 'content-type':
self.content_type = value
elif name.lower() == 'content-encoding':
self.content_encoding = value
elif name.lower() == 'content-language':
self.content_language = value
elif name.lower() == 'last-modified':
self.last_modified = value
elif name.lower() == 'cache-control':
self.cache_control = value
elif name.lower() == 'content-disposition':
self.content_disposition = value
self.handle_version_headers(self.resp)
self.handle_encryption_headers(self.resp)
def open_write(self, headers=None, override_num_retries=None):
"""
Open this key for writing.
Not yet implemented
:type headers: dict
:param headers: Headers to pass in the write request
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying PUT.
"""
raise BotoClientError('Not Implemented')
def open(self, mode='r', headers=None, query_args=None,
override_num_retries=None):
if mode == 'r':
self.mode = 'r'
self.open_read(headers=headers, query_args=query_args,
override_num_retries=override_num_retries)
elif mode == 'w':
self.mode = 'w'
self.open_write(headers=headers,
override_num_retries=override_num_retries)
else:
raise BotoClientError('Invalid mode: %s' % mode)
closed = False
def close(self):
if self.resp:
self.resp.read()
self.resp = None
self.mode = None
self.closed = True
def next(self):
"""
By providing a next method, the key object supports use as an iterator.
For example, you can now say:
for bytes in key:
write bytes to a file or whatever
All of the HTTP connection stuff is handled for you.
"""
self.open_read()
data = self.resp.read(self.BufferSize)
if not data:
self.close()
raise StopIteration
return data
def read(self, size=0):
self.open_read()
if size == 0:
data = self.resp.read()
else:
data = self.resp.read(size)
if not data:
self.close()
return data
def change_storage_class(self, new_storage_class, dst_bucket=None,
validate_dst_bucket=True):
"""
Change the storage class of an existing key.
Depending on whether a different destination bucket is supplied
or not, this will either move the item within the bucket, preserving
all metadata and ACL info bucket changing the storage class or it
will copy the item to the provided destination bucket, also
preserving metadata and ACL info.
:type new_storage_class: string
:param new_storage_class: The new storage class for the Key.
Possible values are:
* STANDARD
* REDUCED_REDUNDANCY
:type dst_bucket: string
:param dst_bucket: The name of a destination bucket. If not
provided the current bucket of the key will be used.
:type validate_dst_bucket: bool
:param validate_dst_bucket: If True, will validate the dst_bucket
by using an extra list request.
"""
if new_storage_class == 'STANDARD':
return self.copy(self.bucket.name, self.name,
reduced_redundancy=False, preserve_acl=True,
validate_dst_bucket=validate_dst_bucket)
elif new_storage_class == 'REDUCED_REDUNDANCY':
return self.copy(self.bucket.name, self.name,
reduced_redundancy=True, preserve_acl=True,
validate_dst_bucket=validate_dst_bucket)
else:
raise BotoClientError('Invalid storage class: %s' %
new_storage_class)
def copy(self, dst_bucket, dst_key, metadata=None,
reduced_redundancy=False, preserve_acl=False,
encrypt_key=False, validate_dst_bucket=True):
"""
Copy this Key to another bucket.
:type dst_bucket: string
:param dst_bucket: The name of the destination bucket
:type dst_key: string
:param dst_key: The name of the destination key
:type metadata: dict
:param metadata: Metadata to be associated with new key. If
metadata is supplied, it will replace the metadata of the
source key being copied. If no metadata is supplied, the
source key's metadata will be copied to the new key.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will force the
storage class of the new Key to be REDUCED_REDUNDANCY
regardless of the storage class of the key being copied.
The Reduced Redundancy Storage (RRS) feature of S3,
provides lower redundancy at lower storage cost.
:type preserve_acl: bool
:param preserve_acl: If True, the ACL from the source key will
be copied to the destination key. If False, the
destination key will have the default ACL. Note that
preserving the ACL in the new key object will require two
additional API calls to S3, one to retrieve the current
ACL and one to set that ACL on the new object. If you
don't care about the ACL, a value of False will be
significantly more efficient.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:type validate_dst_bucket: bool
:param validate_dst_bucket: If True, will validate the dst_bucket
by using an extra list request.
:rtype: :class:`boto.s3.key.Key` or subclass
:returns: An instance of the newly created key object
"""
dst_bucket = self.bucket.connection.lookup(dst_bucket,
validate_dst_bucket)
if reduced_redundancy:
storage_class = 'REDUCED_REDUNDANCY'
else:
storage_class = self.storage_class
return dst_bucket.copy_key(dst_key, self.bucket.name,
self.name, metadata,
storage_class=storage_class,
preserve_acl=preserve_acl,
encrypt_key=encrypt_key)
def startElement(self, name, attrs, connection):
if name == 'Owner':
self.owner = User(self)
return self.owner
else:
return None
def endElement(self, name, value, connection):
if name == 'Key':
self.name = value
elif name == 'ETag':
self.etag = value
elif name == 'IsLatest':
if value == 'true':
self.is_latest = True
else:
self.is_latest = False
elif name == 'LastModified':
self.last_modified = value
elif name == 'Size':
self.size = int(value)
elif name == 'StorageClass':
self.storage_class = value
elif name == 'Owner':
pass
elif name == 'VersionId':
self.version_id = value
else:
setattr(self, name, value)
def exists(self):
"""
Returns True if the key exists
:rtype: bool
:return: Whether the key exists on S3
"""
return bool(self.bucket.lookup(self.name))
def delete(self):
"""
Delete this key from S3
"""
return self.bucket.delete_key(self.name, version_id=self.version_id)
def get_metadata(self, name):
return self.metadata.get(name)
def set_metadata(self, name, value):
self.metadata[name] = value
def update_metadata(self, d):
self.metadata.update(d)
# convenience methods for setting/getting ACL
def set_acl(self, acl_str, headers=None):
if self.bucket != None:
self.bucket.set_acl(acl_str, self.name, headers=headers)
def get_acl(self, headers=None):
if self.bucket != None:
return self.bucket.get_acl(self.name, headers=headers)
def get_xml_acl(self, headers=None):
if self.bucket != None:
return self.bucket.get_xml_acl(self.name, headers=headers)
def set_xml_acl(self, acl_str, headers=None):
if self.bucket != None:
return self.bucket.set_xml_acl(acl_str, self.name, headers=headers)
def set_canned_acl(self, acl_str, headers=None):
return self.bucket.set_canned_acl(acl_str, self.name, headers)
def get_redirect(self):
"""Return the redirect location configured for this key.
If no redirect is configured (via set_redirect), then None
will be returned.
"""
response = self.bucket.connection.make_request(
'HEAD', self.bucket.name, self.name)
if response.status == 200:
return response.getheader('x-amz-website-redirect-location')
else:
raise self.provider.storage_response_error(
response.status, response.reason, response.read())
def set_redirect(self, redirect_location):
"""Configure this key to redirect to another location.
When the bucket associated with this key is accessed from the website
endpoint, a 301 redirect will be issued to the specified
`redirect_location`.
:type redirect_location: string
:param redirect_location: The location to redirect.
"""
headers = {'x-amz-website-redirect-location': redirect_location}
response = self.bucket.connection.make_request('PUT', self.bucket.name,
self.name, headers)
if response.status == 200:
return True
else:
raise self.provider.storage_response_error(
response.status, response.reason, response.read())
def make_public(self, headers=None):
return self.bucket.set_canned_acl('public-read', self.name, headers)
def generate_url(self, expires_in, method='GET', headers=None,
query_auth=True, force_http=False, response_headers=None,
expires_in_absolute=False, version_id=None,
policy=None, reduced_redundancy=False, encrypt_key=False):
"""
Generate a URL to access this key.
:type expires_in: int
:param expires_in: How long the url is valid for, in seconds
:type method: string
:param method: The method to use for retrieving the file
(default is GET)
:type headers: dict
:param headers: Any headers to pass along in the request
:type query_auth: bool
:param query_auth:
:type force_http: bool
:param force_http: If True, http will be used instead of https.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:type expires_in_absolute: bool
:param expires_in_absolute:
:type version_id: string
:param version_id: The version_id of the object to GET. If specified
this overrides any value in the key.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:rtype: string
:return: The URL to access the key
"""
provider = self.bucket.connection.provider
version_id = version_id or self.version_id
if headers is None:
headers = {}
else:
headers = headers.copy()
# add headers accordingly (usually PUT case)
if policy:
headers[provider.acl_header] = policy
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
headers = boto.utils.merge_meta(headers, self.metadata, provider)
return self.bucket.connection.generate_url(expires_in, method,
self.bucket.name, self.name,
headers, query_auth,
force_http,
response_headers,
expires_in_absolute,
version_id)
def send_file(self, fp, headers=None, cb=None, num_cb=10,
query_args=None, chunked_transfer=False, size=None):
"""
Upload a file to a key into a bucket on S3.
:type fp: file
:param fp: The file pointer to upload. The file pointer must
point point at the offset from which you wish to upload.
ie. if uploading the full file, it should point at the
start of the file. Normally when a file is opened for
reading, the fp will point at the first byte. See the
bytes parameter below for more info.
:type headers: dict
:param headers: The headers to pass along with the PUT request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file
transfer. Providing a negative integer will cause your
callback to be called with each buffer read.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the file
up into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
"""
provider = self.bucket.connection.provider
try:
spos = fp.tell()
except IOError:
spos = None
self.read_from_stream = False
def sender(http_conn, method, path, data, headers):
# This function is called repeatedly for temporary retries
# so we must be sure the file pointer is pointing at the
# start of the data.
if spos is not None and spos != fp.tell():
fp.seek(spos)
elif spos is None and self.read_from_stream:
# if seek is not supported, and we've read from this
# stream already, then we need to abort retries to
# avoid setting bad data.
raise provider.storage_data_error(
'Cannot retry failed request. fp does not support seeking.')
http_conn.putrequest(method, path)
for key in headers:
http_conn.putheader(key, headers[key])
http_conn.endheaders()
# Calculate all MD5 checksums on the fly, if not already computed
if not self.base64md5:
m = md5()
else:
m = None
save_debug = self.bucket.connection.debug
self.bucket.connection.debug = 0
# If the debuglevel < 3 we don't want to show connection
# payload, so turn off HTTP connection-level debug output (to
# be restored below).
# Use the getattr approach to allow this to work in AppEngine.
if getattr(http_conn, 'debuglevel', 0) < 3:
http_conn.set_debuglevel(0)
data_len = 0
if cb:
if size:
cb_size = size
elif self.size:
cb_size = self.size
else:
cb_size = 0
if chunked_transfer and cb_size == 0:
# For chunked Transfer, we call the cb for every 1MB
# of data transferred, except when we know size.
cb_count = (1024 * 1024) / self.BufferSize
elif num_cb > 1:
cb_count = int(math.ceil(cb_size / self.BufferSize / (num_cb - 1.0)))
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = 0
cb(data_len, cb_size)
bytes_togo = size
if bytes_togo and bytes_togo < self.BufferSize:
chunk = fp.read(bytes_togo)
else:
chunk = fp.read(self.BufferSize)
if spos is None:
# read at least something from a non-seekable fp.
self.read_from_stream = True
while chunk:
chunk_len = len(chunk)
data_len += chunk_len
if chunked_transfer:
http_conn.send('%x;\r\n' % chunk_len)
http_conn.send(chunk)
http_conn.send('\r\n')
else:
http_conn.send(chunk)
if m:
m.update(chunk)
if bytes_togo:
bytes_togo -= chunk_len
if bytes_togo <= 0:
break
if cb:
i += 1
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
if bytes_togo and bytes_togo < self.BufferSize:
chunk = fp.read(bytes_togo)
else:
chunk = fp.read(self.BufferSize)
self.size = data_len
if m:
# Use the chunked trailer for the digest
hd = m.hexdigest()
self.md5, self.base64md5 = self.get_md5_from_hexdigest(hd)
if chunked_transfer:
http_conn.send('0\r\n')
# http_conn.send("Content-MD5: %s\r\n" % self.base64md5)
http_conn.send('\r\n')
if cb and (cb_count <= 1 or i > 0) and data_len > 0:
cb(data_len, cb_size)
response = http_conn.getresponse()
body = response.read()
http_conn.set_debuglevel(save_debug)
self.bucket.connection.debug = save_debug
if ((response.status == 500 or response.status == 503 or
response.getheader('location')) and not chunked_transfer):
# we'll try again.
return response
elif response.status >= 200 and response.status <= 299:
self.etag = response.getheader('etag')
if self.etag != '"%s"' % self.md5:
raise provider.storage_data_error(
'ETag from S3 did not match computed MD5')
return response
else:
raise provider.storage_response_error(
response.status, response.reason, body)
if not headers:
headers = {}
else:
headers = headers.copy()
headers['User-Agent'] = UserAgent
if self.storage_class != 'STANDARD':
headers[provider.storage_class_header] = self.storage_class
if 'Content-Encoding' in headers:
self.content_encoding = headers['Content-Encoding']
if 'Content-Language' in headers:
self.content_encoding = headers['Content-Language']
if 'Content-Type' in headers:
# Some use cases need to suppress sending of the Content-Type
# header and depend on the receiving server to set the content
# type. This can be achieved by setting headers['Content-Type']
# to None when calling this method.
if headers['Content-Type'] is None:
# Delete null Content-Type value to skip sending that header.
del headers['Content-Type']
else:
self.content_type = headers['Content-Type']
elif self.path:
self.content_type = mimetypes.guess_type(self.path)[0]
if self.content_type == None:
self.content_type = self.DefaultContentType
headers['Content-Type'] = self.content_type
else:
headers['Content-Type'] = self.content_type
if self.base64md5:
headers['Content-MD5'] = self.base64md5
if chunked_transfer:
headers['Transfer-Encoding'] = 'chunked'
#if not self.base64md5:
# headers['Trailer'] = "Content-MD5"
else:
headers['Content-Length'] = str(self.size)
headers['Expect'] = '100-Continue'
headers = boto.utils.merge_meta(headers, self.metadata, provider)
resp = self.bucket.connection.make_request('PUT', self.bucket.name,
self.name, headers,
sender=sender,
query_args=query_args)
self.handle_version_headers(resp, force=True)
def compute_md5(self, fp, size=None):
"""
:type fp: file
:param fp: File pointer to the file to MD5 hash. The file
pointer will be reset to the same position before the
method returns.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where the file is being split
inplace into different parts. Less bytes may be available.
:rtype: tuple
:return: A tuple containing the hex digest version of the MD5
hash as the first element and the base64 encoded version
of the plain digest as the second element.
"""
tup = compute_md5(fp, size=size)
# Returned values are MD5 hash, base64 encoded MD5 hash, and data size.
# The internal implementation of compute_md5() needs to return the
# data size but we don't want to return that value to the external
# caller because it changes the class interface (i.e. it might
# break some code) so we consume the third tuple value here and
# return the remainder of the tuple to the caller, thereby preserving
# the existing interface.
self.size = tup[2]
return tup[0:2]
def set_contents_from_stream(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None,
reduced_redundancy=False, query_args=None,
size=None):
"""
Store an object using the name of the Key object as the key in
cloud and the contents of the data stream pointed to by 'fp' as
the contents.
The stream object is not seekable and total size is not known.
This has the implication that we can't specify the
Content-Size and Content-MD5 in the header. So for huge
uploads, the delay in calculating MD5 is avoided but with a
penalty of inability to verify the integrity of the uploaded
data.
:type fp: file
:param fp: the file whose contents are to be uploaded
:type headers: dict
:param headers: additional HTTP headers to be sent with the
PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will first check
to see if an object exists in the bucket with the same key. If it
does, it won't overwrite it. The default value is True which will
overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two integer
parameters, the first representing the number of bytes that have
been successfully transmitted to GS and the second representing the
total number of bytes that need to be transmitted.
:type num_cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter, this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.gs.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the new key
in GS.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type size: int
:param size: (optional) The Maximum number of bytes to read from
the file pointer (fp). This is useful when uploading a
file in multiple parts where you are splitting the file up
into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
"""
provider = self.bucket.connection.provider
if not provider.supports_chunked_transfer():
raise BotoClientError('%s does not support chunked transfer'
% provider.get_provider_name())
# Name of the Object should be specified explicitly for Streams.
if not self.name or self.name == '':
raise BotoClientError('Cannot determine the destination '
'object name for the given stream')
if headers is None:
headers = {}
if policy:
headers[provider.acl_header] = policy
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
if self.bucket != None:
if not replace:
if self.bucket.lookup(self.name):
return
self.send_file(fp, headers, cb, num_cb, query_args,
chunked_transfer=True, size=size)
def set_contents_from_file(self, fp, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False, query_args=None,
encrypt_key=False, size=None, rewind=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file pointed to by 'fp' as the
contents. The data is read from 'fp' from its current position until
'size' bytes have been read or EOF.
:type fp: file
:param fp: the file whose contents to upload
:type headers: dict
:param headers: Additional HTTP headers that will be sent with
the PUT request.
:type replace: bool
:param replace: If this parameter is False, the method will
first check to see if an object exists in the bucket with
the same key. If it does, it won't overwrite it. The
default value is True which will overwrite the object.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
:type size: int
:param size: (optional) The Maximum number of bytes to read
from the file pointer (fp). This is useful when uploading
a file in multiple parts where you are splitting the file
up into different ranges to be uploaded. If not specified,
the default behaviour is to read all bytes from the file
pointer. Less bytes may be available.
:type rewind: bool
:param rewind: (optional) If True, the file pointer (fp) will
be rewound to the start before any bytes are read from
it. The default behaviour is False which reads from the
current position of the file pointer (fp).
:rtype: int
:return: The number of bytes written to the key.
"""
provider = self.bucket.connection.provider
headers = headers or {}
if policy:
headers[provider.acl_header] = policy
if encrypt_key:
headers[provider.server_side_encryption_header] = 'AES256'
if rewind:
# caller requests reading from beginning of fp.
fp.seek(0, os.SEEK_SET)
else:
# The following seek/tell/seek logic is intended
# to detect applications using the older interface to
# set_contents_from_file(), which automatically rewound the
# file each time the Key was reused. This changed with commit
# 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads
# split into multiple parts and uploaded in parallel, and at
# the time of that commit this check was added because otherwise
# older programs would get a success status and upload an empty
# object. Unfortuantely, it's very inefficient for fp's implemented
# by KeyFile (used, for example, by gsutil when copying between
# providers). So, we skip the check for the KeyFile case.
# TODO: At some point consider removing this seek/tell/seek
# logic, after enough time has passed that it's unlikely any
# programs remain that assume the older auto-rewind interface.
if not isinstance(fp, KeyFile):
spos = fp.tell()
fp.seek(0, os.SEEK_END)
if fp.tell() == spos:
fp.seek(0, os.SEEK_SET)
if fp.tell() != spos:
# Raise an exception as this is likely a programming
# error whereby there is data before the fp but nothing
# after it.
fp.seek(spos)
raise AttributeError('fp is at EOF. Use rewind option '
'or seek() to data start.')
# seek back to the correct position.
fp.seek(spos)
if reduced_redundancy:
self.storage_class = 'REDUCED_REDUNDANCY'
if provider.storage_class_header:
headers[provider.storage_class_header] = self.storage_class
# TODO - What if provider doesn't support reduced reduncancy?
# What if different providers provide different classes?
if hasattr(fp, 'name'):
self.path = fp.name
if self.bucket != None:
if not md5 and provider.supports_chunked_transfer():
# defer md5 calculation to on the fly and
# we don't know anything about size yet.
chunked_transfer = True
self.size = None
else:
chunked_transfer = False
if isinstance(fp, KeyFile):
# Avoid EOF seek for KeyFile case as it's very inefficient.
key = fp.getkey()
size = key.size - fp.tell()
self.size = size
# At present both GCS and S3 use MD5 for the etag for
# non-multipart-uploaded objects. If the etag is 32 hex
# chars use it as an MD5, to avoid having to read the file
# twice while transferring.
if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)):
etag = key.etag.strip('"')
md5 = (etag, base64.b64encode(binascii.unhexlify(etag)))
if not md5:
# compute_md5() and also set self.size to actual
# size of the bytes read computing the md5.
md5 = self.compute_md5(fp, size)
# adjust size if required
size = self.size
elif size:
self.size = size
else:
# If md5 is provided, still need to size so
# calculate based on bytes to end of content
spos = fp.tell()
fp.seek(0, os.SEEK_END)
self.size = fp.tell() - spos
fp.seek(spos)
size = self.size
self.md5 = md5[0]
self.base64md5 = md5[1]
if self.name == None:
self.name = self.md5
if not replace:
if self.bucket.lookup(self.name):
return
self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb,
query_args=query_args,
chunked_transfer=chunked_transfer, size=size)
# return number of bytes written.
return self.size
def set_contents_from_filename(self, filename, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False,
encrypt_key=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the contents of the file named by 'filename'.
See set_contents_from_file method for details about the
parameters.
:type filename: string
:param filename: The name of the file that you want to put onto S3
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file
if it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost. :type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object
will be encrypted on the server-side by S3 and will be
stored in an encrypted form while at rest in S3.
"""
fp = open(filename, 'rb')
try:
self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy,
encrypt_key=encrypt_key)
finally:
fp.close()
def set_contents_from_string(self, s, headers=None, replace=True,
cb=None, num_cb=10, policy=None, md5=None,
reduced_redundancy=False,
encrypt_key=False):
"""
Store an object in S3 using the name of the Key object as the
key in S3 and the string 's' as the contents.
See set_contents_from_file method for details about the
parameters.
:type headers: dict
:param headers: Additional headers to pass along with the
request to AWS.
:type replace: bool
:param replace: If True, replaces the contents of the file if
it already exists.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type policy: :class:`boto.s3.acl.CannedACLStrings`
:param policy: A canned ACL policy that will be applied to the
new key in S3.
:type md5: A tuple containing the hexdigest version of the MD5
checksum of the file as the first element and the
Base64-encoded version of the plain checksum as the second
element. This is the same format returned by the
compute_md5 method.
:param md5: If you need to compute the MD5 for any reason
prior to upload, it's silly to have to do it twice so this
param, if present, will be used as the MD5 values of the
file. Otherwise, the checksum will be computed.
:type reduced_redundancy: bool
:param reduced_redundancy: If True, this will set the storage
class of the new Key to be REDUCED_REDUNDANCY. The Reduced
Redundancy Storage (RRS) feature of S3, provides lower
redundancy at lower storage cost.
:type encrypt_key: bool
:param encrypt_key: If True, the new copy of the object will
be encrypted on the server-side by S3 and will be stored
in an encrypted form while at rest in S3.
"""
if isinstance(s, unicode):
s = s.encode("utf-8")
fp = StringIO.StringIO(s)
r = self.set_contents_from_file(fp, headers, replace, cb, num_cb,
policy, md5, reduced_redundancy,
encrypt_key=encrypt_key)
fp.close()
return r
def get_file(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None):
"""
Retrieves a file from an S3 Key
:type fp: file
:param fp: File pointer to put the data into
:type headers: string
:param: headers to send when retrieving the files
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: Flag for whether to get a torrent for the file
:type override_num_retries: int
:param override_num_retries: If not None will override configured
num_retries parameter for underlying GET.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb,
torrent=torrent, version_id=version_id,
override_num_retries=override_num_retries,
response_headers=response_headers,
query_args=None)
def _get_file_internal(self, fp, headers=None, cb=None, num_cb=10,
torrent=False, version_id=None, override_num_retries=None,
response_headers=None, query_args=None):
if headers is None:
headers = {}
save_debug = self.bucket.connection.debug
if self.bucket.connection.debug == 1:
self.bucket.connection.debug = 0
query_args = query_args or []
if torrent:
query_args.append('torrent')
m = None
else:
m = md5()
# If a version_id is passed in, use that. If not, check to see
# if the Key object has an explicit version_id and, if so, use that.
# Otherwise, don't pass a version_id query param.
if version_id is None:
version_id = self.version_id
if version_id:
query_args.append('versionId=%s' % version_id)
if response_headers:
for key in response_headers:
query_args.append('%s=%s' % (key, urllib.quote(response_headers[key])))
query_args = '&'.join(query_args)
self.open('r', headers, query_args=query_args,
override_num_retries=override_num_retries)
data_len = 0
if cb:
if self.size is None:
cb_size = 0
else:
cb_size = self.size
if self.size is None and num_cb != -1:
# If size is not available due to chunked transfer for example,
# we'll call the cb for every 1MB of data transferred.
cb_count = (1024 * 1024) / self.BufferSize
elif num_cb > 1:
cb_count = int(math.ceil(cb_size/self.BufferSize/(num_cb-1.0)))
elif num_cb < 0:
cb_count = -1
else:
cb_count = 0
i = 0
cb(data_len, cb_size)
for bytes in self:
fp.write(bytes)
data_len += len(bytes)
if m:
m.update(bytes)
if cb:
if cb_size > 0 and data_len >= cb_size:
break
i += 1
if i == cb_count or cb_count == -1:
cb(data_len, cb_size)
i = 0
if cb and (cb_count <= 1 or i > 0) and data_len > 0:
cb(data_len, cb_size)
if m:
self.md5 = m.hexdigest()
if self.size is None and not torrent and "Range" not in headers:
self.size = data_len
self.close()
self.bucket.connection.debug = save_debug
def get_torrent_file(self, fp, headers=None, cb=None, num_cb=10):
"""
Get a torrent file (see to get_file)
:type fp: file
:param fp: The file pointer of where to put the torrent
:type headers: dict
:param headers: Headers to be passed
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
"""
return self.get_file(fp, headers, cb, num_cb, torrent=True)
def get_contents_to_file(self, fp, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
res_download_handler=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Write the contents of the object to the file pointed
to by 'fp'.
:type fp: File -like object
:param fp:
:type headers: dict
:param headers: additional HTTP headers that will be sent with
the GET request.
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent
file as a string.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: If provided, this handler will
perform the download.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
if self.bucket != None:
if res_download_handler:
res_download_handler.get_file(self, fp, headers, cb, num_cb,
torrent=torrent,
version_id=version_id)
else:
self.get_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
def get_contents_to_filename(self, filename, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
res_download_handler=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Store contents of the object to a file named by 'filename'.
See get_contents_to_file method for details about the
parameters.
:type filename: string
:param filename: The filename of where to put the file contents
:type headers: dict
:param headers: Any additional headers to send in the request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent file
as a string.
:type res_upload_handler: ResumableDownloadHandler
:param res_download_handler: If provided, this handler will
perform the download.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
"""
fp = open(filename, 'wb')
try:
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
res_download_handler=res_download_handler,
response_headers=response_headers)
except Exception:
os.remove(filename)
raise
finally:
fp.close()
# if last_modified date was sent from s3, try to set file's timestamp
if self.last_modified != None:
try:
modified_tuple = rfc822.parsedate_tz(self.last_modified)
modified_stamp = int(rfc822.mktime_tz(modified_tuple))
os.utime(fp.name, (modified_stamp, modified_stamp))
except Exception:
pass
def get_contents_as_string(self, headers=None,
cb=None, num_cb=10,
torrent=False,
version_id=None,
response_headers=None):
"""
Retrieve an object from S3 using the name of the Key object as the
key in S3. Return the contents of the object as a string.
See get_contents_to_file method for details about the
parameters.
:type headers: dict
:param headers: Any additional headers to send in the request
:type cb: function
:param cb: a callback function that will be called to report
progress on the upload. The callback should accept two
integer parameters, the first representing the number of
bytes that have been successfully transmitted to S3 and
the second representing the size of the to be transmitted
object.
:type cb: int
:param num_cb: (optional) If a callback is specified with the
cb parameter this parameter determines the granularity of
the callback by defining the maximum number of times the
callback will be called during the file transfer.
:type torrent: bool
:param torrent: If True, returns the contents of a torrent file
as a string.
:type response_headers: dict
:param response_headers: A dictionary containing HTTP
headers/values that will override any headers associated
with the stored object in the response. See
http://goo.gl/EWOPb for details.
:rtype: string
:returns: The contents of the file as a string
"""
fp = StringIO.StringIO()
self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent,
version_id=version_id,
response_headers=response_headers)
return fp.getvalue()
def add_email_grant(self, permission, email_address, headers=None):
"""
Convenience method that provides a quick way to add an email grant
to a key. This method retrieves the current ACL, creates a new
grant based on the parameters passed in, adds that grant to the ACL
and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type email_address: string
:param email_address: The email address associated with the AWS
account your are granting the permission to.
:type recursive: boolean
:param recursive: A boolean value to controls whether the
command will apply the grant to all keys within the bucket
or not. The default value is False. By passing a True
value, the call will iterate through all keys in the
bucket and apply the same grant to each key. CAUTION: If
you have a lot of keys, this could take a long time!
"""
policy = self.get_acl(headers=headers)
policy.acl.add_email_grant(permission, email_address)
self.set_acl(policy, headers=headers)
def add_user_grant(self, permission, user_id, headers=None,
display_name=None):
"""
Convenience method that provides a quick way to add a canonical
user grant to a key. This method retrieves the current ACL,
creates a new grant based on the parameters passed in, adds that
grant to the ACL and then PUT's the new ACL back to S3.
:type permission: string
:param permission: The permission being granted. Should be one of:
(READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL).
:type user_id: string
:param user_id: The canonical user id associated with the AWS
account your are granting the permission to.
:type display_name: string
:param display_name: An option string containing the user's
Display Name. Only required on Walrus.
"""
policy = self.get_acl(headers=headers)
policy.acl.add_user_grant(permission, user_id,
display_name=display_name)
self.set_acl(policy, headers=headers)
def _normalize_metadata(self, metadata):
if type(metadata) == set:
norm_metadata = set()
for k in metadata:
norm_metadata.add(k.lower())
else:
norm_metadata = {}
for k in metadata:
norm_metadata[k.lower()] = metadata[k]
return norm_metadata
def _get_remote_metadata(self, headers=None):
"""
Extracts metadata from existing URI into a dict, so we can
overwrite/delete from it to form the new set of metadata to apply to a
key.
"""
metadata = {}
for underscore_name in self._underscore_base_user_settable_fields:
if hasattr(self, underscore_name):
value = getattr(self, underscore_name)
if value:
# Generate HTTP field name corresponding to "_" named field.
field_name = underscore_name.replace('_', '-')
metadata[field_name.lower()] = value
# self.metadata contains custom metadata, which are all user-settable.
prefix = self.provider.metadata_prefix
for underscore_name in self.metadata:
field_name = underscore_name.replace('_', '-')
metadata['%s%s' % (prefix, field_name.lower())] = (
self.metadata[underscore_name])
return metadata
def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl,
headers=None):
metadata_plus = self._normalize_metadata(metadata_plus)
metadata_minus = self._normalize_metadata(metadata_minus)
metadata = self._get_remote_metadata()
metadata.update(metadata_plus)
for h in metadata_minus:
if h in metadata:
del metadata[h]
src_bucket = self.bucket
# Boto prepends the meta prefix when adding headers, so strip prefix in
# metadata before sending back in to copy_key() call.
rewritten_metadata = {}
for h in metadata:
if (h.startswith('x-goog-meta-') or h.startswith('x-amz-meta-')):
rewritten_h = (h.replace('x-goog-meta-', '')
.replace('x-amz-meta-', ''))
else:
rewritten_h = h
rewritten_metadata[rewritten_h] = metadata[h]
metadata = rewritten_metadata
src_bucket.copy_key(self.name, self.bucket.name, self.name,
metadata=metadata, preserve_acl=preserve_acl)
def restore(self, days, headers=None):
"""Restore an object from an archive.
:type days: int
:param days: The lifetime of the restored object (must
be at least 1 day). If the object is already restored
then this parameter can be used to readjust the lifetime
of the restored object. In this case, the days
param is with respect to the initial time of the request.
If the object has not been restored, this param is with
respect to the completion time of the request.
"""
response = self.bucket.connection.make_request(
'POST', self.bucket.name, self.name,
data=self.RestoreBody % days,
headers=headers, query_args='restore')
if response.status not in (200, 202):
provider = self.bucket.connection.provider
raise provider.storage_response_error(response.status,
response.reason,
response.read())
| [
"martheeswaran.m@cognizant.com"
] | martheeswaran.m@cognizant.com |
71b4e536f4120398df1b3c0cc53820ac8108799e | 1fe692e8dcdc68231f35aebbf894696ff3ca5329 | /Python-Standard-Library/InternationalizationAndLocalization/locale_delocalize.py | 9d22f1df34a04288db62450ccb153d33199a5fe5 | [
"MIT"
] | permissive | gaufung/CodeBase | 301ec33197771a937f75ce6ffb04b61443fc8f43 | 0292b06cfe002b3ad0299e43bb51192816a02c74 | refs/heads/master | 2021-07-19T15:00:47.000077 | 2018-02-28T11:39:28 | 2018-02-28T11:39:28 | 123,276,169 | 0 | 1 | MIT | 2021-07-14T05:41:46 | 2018-02-28T11:33:58 | Jupyter Notebook | UTF-8 | Python | false | false | 487 | py | import locale
import locale
sample_locales = [
('USA', 'en_US'),
('France', 'fr_FR'),
('Spain', 'es_ES'),
('Portugal', 'pt_PT'),
('Poland', 'pl_PL'),
('China', 'zh_CN')
]
for name, loc in sample_locales:
locale.setlocale(locale.LC_ALL, loc)
localized = locale.format('%0.2f', 123456.78, grouping=True)
delocalized = locale.delocalize(localized)
print('{:>10}: {:>10} {:>10}'.format(
name,
localized,
delocalized,
)) | [
"gaufung@outlook.com"
] | gaufung@outlook.com |
949524b41d7cf28af18cd24696ad573eb6ac7c96 | a9deaec7a219c4fa79e801c9ee61347f643ffe44 | /example_1.py | bb175aecd9c1bba0b6162d9464fb5bfe8eef0d13 | [] | no_license | qyn6/HackMIT-2016 | 0dca0115cb213a70ba3a1e5b7ab78062c4bac7c0 | 4bbb0db8ea97bead56829bd480b029d3aef29f64 | refs/heads/master | 2021-01-21T06:21:11.411986 | 2016-09-18T14:10:14 | 2016-09-18T14:10:14 | 68,525,379 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | import midi
# Instantiate a MIDI Pattern (contains a list of tracks)
pattern = midi.Pattern()
# Instantiate a MIDI Track (contains a list of MIDI events)
track = midi.Track()
# Append the track to the pattern
pattern.append(track)
# Instantiate a MIDI note on event, append it to the track
on = midi.NoteOnEvent(tick=0, velocity=20, pitch=midi.G_3)
track.append(on)
# Instantiate a MIDI note off event, append it to the track
off = midi.NoteOffEvent(tick=100, pitch=midi.G_3)
track.append(off)
# Add the end of track event, append it to the track
eot = midi.EndOfTrackEvent(tick=1)
track.append(eot)
# Print out the pattern
print pattern
# Save the pattern to disk
midi.write_midifile("example.txt", pattern)
| [
"noreply@github.com"
] | qyn6.noreply@github.com |
1293440ff6afb7f2806715d8e701f07f48c0a721 | 7e1d9d3a8a650814cbcdcce4efe667aebe933166 | /abstractionAndEncapsulation.py | bca421daa18144bc76b63c4ad25a03388612286d | [] | no_license | pragya5833/pythonAndGoovy | 390a937225af29b62c62fa25baf8a9ab510e8207 | 15b1341cdd6b4816760a55f36a7e0617a76c58f3 | refs/heads/master | 2023-07-04T03:10:32.454390 | 2021-08-05T12:50:03 | 2021-08-05T12:50:03 | 392,803,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,324 | py | class Library:
# display,lend,add books
def __init__(self,listOfbooks):
self.listOfbooks=listOfbooks
def displayBooks(self):
for i in range(0,len(self.listOfbooks)):
print(self.listOfbooks[i])
def lendbooks(self,requestedbook):
if requestedbook in self.listOfbooks:
print("you have now borrowed the book")
self.listOfbooks.remove(requestedbook)
else:
print("book unavailable")
def addbooks(self,returnedbook):
self.listOfbooks.append(returnedbook)
class Customer:
#request and return books
def borrow(self):
self.borrowing=input("Enter the book you want to borrow")
return self.borrowing
def returned(self):
self.returning=input("Name of book returning")
return self.returning
l1=Library(['art of the day','day to remember','subtle art'])
c1=Customer()
print("1 to display books")
print("2 to request a book")
print("3 to return a book")
print("4 to exit")
while True:
userChoice=int(input("Enter your choice"))
if userChoice==1:
l1.displayBooks()
elif userChoice==2:
borr=c1.borrow()
l1.lendbooks(borr)
elif userChoice==3:
ret=c1.returned()
l1.addbooks(ret)
elif userChoice==4:
quit()
| [
"pragyabharti@pragyas-MacBook-Air.local"
] | pragyabharti@pragyas-MacBook-Air.local |
414341dee4ca533d432b69f131ef10bbd93b47fd | 38b7b357449db4ef659e4268cbb29dd30b0b2cb1 | /exe3/assignment-3-6.py | 1bd3aa7723b1678f3524cfad5f4d38ca042e71a3 | [] | no_license | kasrafallah/python_course | b19dc5a7e0d4c8228a9f427cb18b664aeb07a46a | 4f7313dd3d9a558e1ef2e0a833d72ed3a6381366 | refs/heads/main | 2023-06-18T22:06:38.700437 | 2021-07-23T22:23:46 | 2021-07-23T22:23:46 | 388,939,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | def printPascal(n):
for line in range(0, n):
for i in range(0, line + 1):
print(binomialCoeff(line, i),
" ", end="")
print()
def binomialCoeff(n, k):
res = 1
if (k > n - k):
k = n - k
for i in range(0, k):
res = res * (n - i)
res = res // (i + 1)
return res
n = int(input())
printPascal(n)
| [
"noreply@github.com"
] | kasrafallah.noreply@github.com |
39452873fe562acdac01d398e0eab744b4ab98f0 | e6284222b47a3109ba7a77350196af74bd676d84 | /examples/example_date_range_list.py | f68679f84f73cd4ce1efb17a947f572fb4ef6d41 | [
"MIT"
] | permissive | jeff00seattle/pyfortified-dateutil | 33b7b2141c806697157e85d90c0df48697bc70c8 | 374ce46787e24fa2e29e6a16bb64fe0f93a545ae | refs/heads/master | 2020-03-17T10:48:04.383392 | 2018-05-15T16:30:18 | 2018-05-15T16:30:18 | 133,525,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
from pprintpp import pprint
import datetime as dt
import pyfortified_dateutil
start_dt = dt.date(2015, 12, 20)
end_dt = dt.date(2016, 1, 11)
print(type(pyfortified_dateutil.date_range_list(start_dt, end_dt)))
for day in pyfortified_dateutil.date_range_list(start_dt, end_dt):
pprint(day)
| [
"jeff00seattle@gmail.com"
] | jeff00seattle@gmail.com |
42c3958dbc7a37064ca7777728dee812665dc21f | 3b504a983f1807ae7c5af51078bfab8c187fc82d | /client/Helpers/html/__init__.py | 8b0c9427d27064815cfd2bd299b92cbc041d448f | [] | no_license | SEA-group/wowp_scripts | 7d35fd213db95ea6b3dbd1ec6d3e0f13de86ba58 | 2fe54a44df34f2dcaa6860a23b835dcd8dd21402 | refs/heads/master | 2021-09-07T23:10:13.706605 | 2018-03-02T17:23:48 | 2018-03-02T17:23:48 | 117,280,141 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | # Embedded file name: scripts/client/Helpers/html/__init__.py
from debug_utils import LOG_CURRENT_EXCEPTION
from Helpers import i18n
import re
_getText_re = re.compile('\\_\\(([^)]+)\\)', re.U | re.M)
def _search(match):
if match.group(1):
return i18n.makeString(match.group(1))
return ''
def escape(text):
return text.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", ''')
def translation(text):
result = text
try:
result = _getText_re.sub(_search, text)
except re.error:
LOG_CURRENT_EXCEPTION()
finally:
return result | [
"55k@outlook.com"
] | 55k@outlook.com |
97380b01e145adc803dbf9609008fa4b30efeff5 | 55173732ce1f2537a4fd8a6137b2a813f594b250 | /azure-mgmt-web/azure/mgmt/web/operations/managed_hosting_environments_operations.py | 9aeec4531eb407ac2cae4a8fc46ef0b8a1fc5927 | [
"Apache-2.0"
] | permissive | dipple/azure-sdk-for-python | ea6e93b84bfa8f2c3e642aecdeab9329658bd27d | 9d746cb673c39bee8bd3010738c37f26ba6603a4 | refs/heads/master | 2020-02-26T15:32:39.178116 | 2016-03-01T19:25:05 | 2016-03-01T19:25:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,979 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft and contributors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
import uuid
from .. import models
class ManagedHostingEnvironmentsOperations(object):
"""ManagedHostingEnvironmentsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
"""
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config
def get_managed_hosting_environment(
self, resource_group_name, name, custom_headers={}, raw=False, **operation_config):
"""
Get properties of a managed hosting environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: ManagedHostingEnvironment
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ManagedHostingEnvironment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update_managed_hosting_environment(
self, resource_group_name, name, managed_hosting_environment_envelope, custom_headers={}, raw=False, **operation_config):
"""
Create or update a managed hosting environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param managed_hosting_environment_envelope: Properties of managed
hosting environment
:type managed_hosting_environment_envelope: HostingEnvironment
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: HostingEnvironment
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(managed_hosting_environment_envelope, 'HostingEnvironment')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers={}):
request = self._client.get(status_link)
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 400, 404, 409]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('HostingEnvironment', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def delete_managed_hosting_environment(
self, resource_group_name, name, force_delete=None, custom_headers={}, raw=False, **operation_config):
"""
Delete a managed hosting environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param force_delete: Delete even if the managed hosting environment
contains resources
:type force_delete: bool
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: object
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
if force_delete is not None:
query_parameters['forceDelete'] = self._serialize.query("force_delete", force_delete, 'bool')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers={}):
request = self._client.get(status_link)
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 400, 404, 409]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 202:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get_managed_hosting_environments(
self, resource_group_name, custom_headers={}, raw=False, **operation_config):
"""
Get all managed hosting environments in a resource group.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: HostingEnvironmentCollection
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('HostingEnvironmentCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_managed_hosting_environment_vips(
self, resource_group_name, name, custom_headers={}, raw=False, **operation_config):
"""
Get list of ip addresses assigned to a managed hosting environment
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: AddressResponse
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}/capacities/virtualip'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AddressResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_managed_hosting_environment_operation(
self, resource_group_name, name, operation_id, custom_headers={}, raw=False, **operation_config):
"""
Get status of an operation on a managed hosting environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param operation_id: operation identifier GUID
:type operation_id: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: object
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}/operations/{operationId}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'operationId': self._serialize.url("operation_id", operation_id, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200, 202, 404, 500]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('object', response)
if response.status_code == 202:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_managed_hosting_environment_sites(
self, resource_group_name, name, properties_to_include=None, custom_headers={}, raw=False, **operation_config):
"""
Get all sites on the managed hosting environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param properties_to_include: Comma separated list of site properties
to include
:type properties_to_include: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: SiteCollection
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}/sites'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
if properties_to_include is not None:
query_parameters['propertiesToInclude'] = self._serialize.query("properties_to_include", properties_to_include, 'str')
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SiteCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_managed_hosting_environment_web_hosting_plans(
self, resource_group_name, name, custom_headers={}, raw=False, **operation_config):
"""
Get all serverfarms (App Service Plans) on the managed hosting
environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: ServerFarmCollection
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}/webhostingplans'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ServerFarmCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def get_managed_hosting_environment_server_farms(
self, resource_group_name, name, custom_headers={}, raw=False, **operation_config):
"""
Get all serverfarms (App Service Plans) on the managed hosting
environment.
:param resource_group_name: Name of resource group
:type resource_group_name: str
:param name: Name of managed hosting environment
:type name: str
:param dict custom_headers: headers that will be added to the request
:param boolean raw: returns the direct response alongside the
deserialized response
:rtype: ServerFarmCollection
:rtype: msrest.pipeline.ClientRawResponse if raw=True
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/managedHostingEnvironments/{name}/serverfarms'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'name': self._serialize.url("name", name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = url.format(**path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.config.api_version", self.config.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ServerFarmCollection', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
| [
"lmazuel@microsoft.com"
] | lmazuel@microsoft.com |
2b795e3874b712d22f0169eb14d4d445083b7004 | 682de6ff278f5347145956a52d5405b3e42a31bf | /Pegasus/service/auth.py | 1589144e8c6d6e2d4310c75bffd103dd08834a80 | [] | no_license | ahnitz/pegasus-wms-python3 | c53ebfa10496671ec1648540e0de329e4652aa71 | 137b262c6f0695b7179d2456b5284c578cab4de5 | refs/heads/master | 2021-06-18T17:21:29.911166 | 2020-03-12T11:51:58 | 2020-03-12T11:51:58 | 190,265,125 | 0 | 0 | null | 2021-03-20T01:06:30 | 2019-06-04T19:19:25 | Python | UTF-8 | Python | false | false | 6,462 | py | import logging
import os
import pam
from flask import Response, abort, g, make_response, request, url_for
from Pegasus import user
from Pegasus.service import app
log = logging.getLogger(__name__)
class BaseAuthentication(object):
def __init__(self, username, password):
self.username = username
self.password = password
def authenticate(self):
raise Exception("Not implemented")
def get_user(self):
raise Exception("Not implemented")
class NoAuthentication(BaseAuthentication):
def __init__(self, *args):
# For no auth. username and password is not required
pass
def authenticate(self):
# Always authenticate the user
return True
def get_user(self):
# Just return info for the user running the service
if 'username' in g:
return user.get_user_by_username(g.username)
else:
return user.get_user_by_uid(os.getuid())
class PAMAuthentication(BaseAuthentication):
def authenticate(self):
try:
return pam.authenticate(self.username, self.password)
except Exception as e:
log.exception(e)
return False
def get_user(self):
return user.get_user_by_username(self.username)
def basic_auth_response():
return Response(
'Basic Auth Required', 401, {
'WWW-Authenticate': 'Basic realm="Pegasus Service"'
}
)
def is_user_an_admin(username):
"""
Check if user ia a valid admin user.
"""
admin_users = app.config['ADMIN_USERS']
if isinstance(admin_users, str) or isinstance(admin_users, unicode):
admin_users = admin_users.strip()
if admin_users is None or admin_users is False or admin_users == '':
return False
elif admin_users == '*':
return True
elif hasattr(admin_users, '__iter__'):
return username in admin_users
else:
log.error('Invalid configuration: ADMIN_USERS is invalid.')
abort(500)
@app.url_defaults
def add_username(endpoint, values):
"""
If the endpoint expects a variable username, then set it's value to g.username.
This is done so as not to provide g.username as a parameter to every call to url_for.
"""
#
# Route does not expects a value for username
#
if not app.url_map.is_endpoint_expecting(endpoint, 'username'):
return
#
# Route expects a value for username
#
# Value for username has already been provided
if 'username' in values or ('username' in g and not g.username):
return
values['username'] = g.username
@app.url_value_preprocessor
def pull_username(endpoint, values):
"""
If the requested endpoint contains a value for username variable then extract it and set it in g.username.
"""
if values and 'username' in values:
g.username = values['username']
@app.before_request
def before():
# Static files do not need to be authenticated.
if (request.script_root + request.path).startswith(
url_for('static', filename='')
):
return
#
# Authentication
#
cred = request.authorization
username = cred.username if cred else None
password = cred.password if cred else None
authclass = app.config["AUTHENTICATION"]
if authclass not in globals():
log.error("Unknown authentication method: %s", authclass)
return make_response("Invalid server configuration", 500)
Authentication = globals()[authclass]
auth = Authentication(username, password)
if not auth.authenticate():
log.error("Invalid login: %s", username)
return basic_auth_response()
try:
g.user = auth.get_user()
except user.NoSuchUser as e:
log.error("No such user: %s" % username)
return basic_auth_response()
log.info('Authenticated user %s', g.user.username)
# If a username is not specified in the requested URI, then set username to the logged in user?
if 'username' not in g:
g.username = g.user.username
#
# Authorization
#
# Root user is off limits.
if g.username == 'root':
log.error('Accessing root user info. is not allowed')
# If the user has logged in as root, then ask user to login as a regular user.
# If the non-root logged in user is attempting to access root user's data, then return 403 FORBIDDEN
if g.user.username == 'root':
return basic_auth_response()
else:
abort(403)
user_info = g.user
if g.username != g.user.username:
# Is user (g.user.username) allowed to view user (g.username) runs?
if not is_user_an_admin(g.user.username):
log.error(
"User %s is accessing user %s's runs" %
(g.user.username, g.username)
)
abort(403)
# Is user a valid system user?
try:
user_info = user.get_user_by_username(g.username)
except user.NoSuchUser as e:
log.error('User %s is not a valid user' % g.username)
abort(400)
if app.config["PROCESS_SWITCHING"]:
# If required, set uid and gid of handler process
if os.getuid() != user_info.uid:
if os.getuid() != 0:
log.error(
"Pegasus service must run as root to enable process switching"
)
return make_response(
"Pegasus service must run as root to enable process switching",
500
)
os.setgid(user_info.gid)
os.setuid(user_info.uid)
# Does the user have a Pegasus home directory?
user_pegasus_dir = user_info.get_pegasus_dir()
if not os.path.isdir(user_pegasus_dir):
log.info("User's pegasus directory does not exist. Creating one...")
try:
os.makedirs(user_pegasus_dir, mode=0o744)
except OSError:
log.info(
"Invalid Permissions: Could not create user's pegasus directory."
)
return make_response(
"Could not find user's Pegasus directory", 404
)
# Set master DB URL for the dashboard
# For testing master_db_url would be pre-populated, so let's not overwrite it here.
if 'master_db_url' not in g:
g.master_db_url = user_info.get_master_db_url()
| [
"alex.nitz@gmail.com"
] | alex.nitz@gmail.com |
3bfafd0020c815995057fce3d4d091516f9ee1d8 | 415282d3d38d2d19a8fceff6ad098e8a5cd881bd | /src/lastfm/driver.py | f85eaa0194a26e37be12b979426b33b60bd133f0 | [
"MIT"
] | permissive | lnunno/python | fc9c9cf8495eb6b6bd5335eb7a5351415efa35a9 | 8fb9e9c39621a8bc0196a5346e42d4349c8d965d | refs/heads/master | 2020-05-02T12:34:07.875386 | 2013-09-20T18:25:37 | 2013-09-20T18:25:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 790 | py | '''
Created on Aug 19, 2013
@author: lnunno
'''
from album_images import save_url, top_album_image_urls,convert_img_dir, artist_image_urls
import os
def main():
image_links = top_album_image_urls('GrokThis', '423f4d46b119a24a89e91a7d6f945f9a')
i = 1
for link in image_links:
filename = str(i).zfill(4) + os.path.splitext(link)[1]
local_path = os.path.join('output',filename)
save_url(link, local_path)
i += 1
if __name__ == '__main__':
# main()
# convert_img_dir('output/', ['.png','.jpg'], '.ppm','output/ppms')
#print artist_image_urls('423f4d46b119a24a89e91a7d6f945f9a', 'Cake')
indir = '/home/lnunno/Dropbox/UNM/Fall13/CS522_Digital_Image_Processing/hw/hw2/images'
convert_img_dir(indir, ['.pgm','.ppm'], '.png', indir) | [
"lucasnunno@gmail.com"
] | lucasnunno@gmail.com |
6e792582555f1ca0406a9e0a94fdc3dde54d0f15 | 215d52a4107354ef814ca2d9d2e5074b41127195 | /polyaxon/api/activitylogs/serializers.py | ceccf0085891b2d8508dcbbdea0bea79bc6e295e | [
"MIT"
] | permissive | GarnettWang/polyaxon | e13296d3df614d18061f303c7b7d5aa100cf1fff | 90d107aad132f94a0f4acfda7c58c90b20f923fc | refs/heads/master | 2020-03-23T14:43:09.591539 | 2018-07-17T23:05:03 | 2018-07-17T23:05:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | from rest_framework import serializers
from db.models.activitylogs import ActivityLog
class ActivityLogsSerializer(serializers.ModelSerializer):
object_name = serializers.SerializerMethodField()
class Meta:
model = ActivityLog
fields = ['event_type', 'actor', 'created_at', 'object_id', 'object_name']
def get_object_name(self, obj):
# Deleted objects don't have a content object any more
if not obj.content_object:
return None
if hasattr(obj.content_object, 'unique_name'):
return obj.content_object.unique_name
if hasattr(obj.content_object, 'name'):
return obj.content_object.name
return None
| [
"mouradmourafiq@gmail.com"
] | mouradmourafiq@gmail.com |
d7b91b0476a1f2e00408ce1f1501bf98d4c06e4e | 692b2bc4846fc8437969835b445756b452a7f057 | /manage.py | a1434acfb0f3447feb703e619d758e9d31e875ec | [] | no_license | chelis/events | 9f4aa9cb6d4aadd09e20f586b542932ee7c79c14 | 03376be9501f0bff7be8fa5333a2fb9c35d63f16 | refs/heads/master | 2021-08-11T04:46:49.733501 | 2016-06-10T14:17:38 | 2016-06-10T14:17:38 | 58,487,708 | 0 | 0 | null | 2021-08-03T07:42:08 | 2016-05-10T19:22:38 | JavaScript | UTF-8 | Python | false | false | 483 | py | # -*- coding: utf-8 -*-
# @Author: Marcela Campo
# @Date: 2016-05-06 18:56:47
# @Last Modified by: Marcela Campo
# @Last Modified time: 2016-05-06 19:03:21
import os
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from server import app, db
app.config.from_object('config.DevelopmentConfig')
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
| [
"marcelacampo@gmail.com"
] | marcelacampo@gmail.com |
f6954f9e4c3afcbe84f45e922d7e3d83fc76d10e | 8151e38f79c379f13ed74efd57bb7cd35726b22b | /bin/jupyter-trust | 90821353f65749e54c54736e851c19ce56088388 | [
"MIT"
] | permissive | Fernando-Montes/TimeSeries | c3cdd49e815f2585bf9790ff4dbb642bda860ee5 | 45e882b33f4a7e1fed6d0de491f32cfb31278cb5 | refs/heads/master | 2020-04-23T13:05:52.572797 | 2019-02-18T01:31:01 | 2019-02-18T01:31:01 | 171,191,492 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 317 | #!/Users/fernandomontes/Dropbox/Courses/Python/Templates/timeseries/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from nbformat.sign import TrustNotebookApp
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(TrustNotebookApp.launch_instance())
| [
"fernandoamontes@gmail.com"
] | fernandoamontes@gmail.com | |
17ada3e3523df390cc3e0347c250e6841ee2a3c5 | 06476bc4cb7fc3ce378beb357fac7d5aacb87b3b | /Prototype/env/lib/python3.8/site-packages/scipy/sparse/coo.py | 1b6f078ec8e1903ca9126d72020e33f6732ff50c | [
"MIT",
"GPL-3.0-or-later",
"BSD-3-Clause",
"GPL-3.0-only",
"BSD-3-Clause-Open-MPI",
"BSD-2-Clause",
"GCC-exception-3.1",
"Python-2.0",
"Qhull",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | marc-ortuno/VOPEC | 44d3a74d3e0686474dd57fcb21e845fd5fd48897 | e7ed1f13cc1868a824f4036dd08ec6bed4266c08 | refs/heads/main | 2023-06-12T19:15:18.060897 | 2021-07-01T17:15:03 | 2021-07-01T17:15:03 | 344,433,646 | 0 | 0 | MIT | 2021-06-14T19:15:47 | 2021-03-04T10:22:05 | Python | UTF-8 | Python | false | false | 22,207 | py | """ A sparse matrix in COOrdinate or 'triplet' format"""
__docformat__ = "restructuredtext en"
__all__ = ['coo_matrix', 'isspmatrix_coo']
from warnings import warn
import numpy as np
from ._sparsetools import coo_tocsr, coo_todense, coo_matvec
from .base import isspmatrix, SparseEfficiencyWarning, spmatrix
from .data import _data_matrix, _minmax_mixin
from .sputils import (upcast, upcast_char, to_native, isshape, getdtype,
get_index_dtype, downcast_intp_index, check_shape,
check_reshape_kwargs, matrix)
import operator
class coo_matrix(_data_matrix, _minmax_mixin):
"""
A sparse matrix in COOrdinate format.
Also known as the 'ijv' or 'triplet' format.
This can be instantiated in several ways:
coo_matrix(D)
with a dense matrix D
coo_matrix(S)
with another sparse matrix S (equivalent to S.tocoo())
coo_matrix((M, N), [dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
coo_matrix((data, (i, j)), [shape=(M, N)])
to construct from three arrays:
1. data[:] the entries of the matrix, in any order
2. i[:] the row indices of the matrix entries
3. j[:] the column indices of the matrix entries
Where ``A[i[k], j[k]] = data[k]``. When shape is not
specified, it is inferred from the index arrays
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of stored values, including explicit zeros
data
COO format data array of the matrix
row
COO format row index array of the matrix
col
COO format column index array of the matrix
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Advantages of the COO format
- facilitates fast conversion among sparse formats
- permits duplicate entries (see example)
- very fast conversion to and from CSR/CSC formats
Disadvantages of the COO format
- does not directly support:
+ arithmetic operations
+ slicing
Intended Usage
- COO is a fast format for constructing sparse matrices
- Once a matrix has been constructed, convert to CSR or
CSC format for fast arithmetic and matrix vector operations
- By default when converting to CSR or CSC format, duplicate (i,j)
entries will be summed together. This facilitates efficient
construction of finite element matrices and the like. (see example)
Examples
--------
>>> # Constructing an empty matrix
>>> from scipy.sparse import coo_matrix
>>> coo_matrix((3, 4), dtype=np.int8).toarray()
array([[0, 0, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 0]], dtype=int8)
>>> # Constructing a matrix using ijv format
>>> row = np.array([0, 3, 1, 0])
>>> col = np.array([0, 3, 1, 2])
>>> data = np.array([4, 5, 7, 9])
>>> coo_matrix((data, (row, col)), shape=(4, 4)).toarray()
array([[4, 0, 9, 0],
[0, 7, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 5]])
>>> # Constructing a matrix with duplicate indices
>>> row = np.array([0, 0, 1, 3, 1, 0, 0])
>>> col = np.array([0, 2, 1, 3, 1, 0, 0])
>>> data = np.array([1, 1, 1, 1, 1, 1, 1])
>>> coo = coo_matrix((data, (row, col)), shape=(4, 4))
>>> # Duplicate indices are maintained until implicitly or explicitly summed
>>> np.max(coo.data)
1
>>> coo.toarray()
array([[3, 0, 1, 0],
[0, 2, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 1]])
"""
format = 'coo'
def __init__(self, arg1, shape=None, dtype=None, copy=False):
_data_matrix.__init__(self)
if isinstance(arg1, tuple):
if isshape(arg1):
M, N = arg1
self._shape = check_shape((M, N))
idx_dtype = get_index_dtype(maxval=max(M, N))
data_dtype = getdtype(dtype, default=float)
self.row = np.array([], dtype=idx_dtype)
self.col = np.array([], dtype=idx_dtype)
self.data = np.array([], dtype=data_dtype)
self.has_canonical_format = True
else:
try:
obj, (row, col) = arg1
except (TypeError, ValueError) as e:
raise TypeError('invalid input format') from e
if shape is None:
if len(row) == 0 or len(col) == 0:
raise ValueError('cannot infer dimensions from zero '
'sized index arrays')
M = operator.index(np.max(row)) + 1
N = operator.index(np.max(col)) + 1
self._shape = check_shape((M, N))
else:
# Use 2 steps to ensure shape has length 2.
M, N = shape
self._shape = check_shape((M, N))
idx_dtype = get_index_dtype(maxval=max(self.shape))
data_dtype = getdtype(dtype, obj, default=float)
self.row = np.array(row, copy=copy, dtype=idx_dtype)
self.col = np.array(col, copy=copy, dtype=idx_dtype)
self.data = np.array(obj, copy=copy, dtype=data_dtype)
self.has_canonical_format = False
else:
if isspmatrix(arg1):
if isspmatrix_coo(arg1) and copy:
self.row = arg1.row.copy()
self.col = arg1.col.copy()
self.data = arg1.data.copy()
self._shape = check_shape(arg1.shape)
else:
coo = arg1.tocoo()
self.row = coo.row
self.col = coo.col
self.data = coo.data
self._shape = check_shape(coo.shape)
self.has_canonical_format = False
else:
#dense argument
M = np.atleast_2d(np.asarray(arg1))
if M.ndim != 2:
raise TypeError('expected dimension <= 2 array or matrix')
self._shape = check_shape(M.shape)
if shape is not None:
if check_shape(shape) != self._shape:
raise ValueError('inconsistent shapes: %s != %s' %
(shape, self._shape))
self.row, self.col = M.nonzero()
self.data = M[self.row, self.col]
self.has_canonical_format = True
if dtype is not None:
self.data = self.data.astype(dtype, copy=False)
self._check()
def reshape(self, *args, **kwargs):
shape = check_shape(args, self.shape)
order, copy = check_reshape_kwargs(kwargs)
# Return early if reshape is not required
if shape == self.shape:
if copy:
return self.copy()
else:
return self
nrows, ncols = self.shape
if order == 'C':
# Upcast to avoid overflows: the coo_matrix constructor
# below will downcast the results to a smaller dtype, if
# possible.
dtype = get_index_dtype(maxval=(ncols * max(0, nrows - 1) + max(0, ncols - 1)))
flat_indices = np.multiply(ncols, self.row, dtype=dtype) + self.col
new_row, new_col = divmod(flat_indices, shape[1])
elif order == 'F':
dtype = get_index_dtype(maxval=(nrows * max(0, ncols - 1) + max(0, nrows - 1)))
flat_indices = np.multiply(nrows, self.col, dtype=dtype) + self.row
new_col, new_row = divmod(flat_indices, shape[0])
else:
raise ValueError("'order' must be 'C' or 'F'")
# Handle copy here rather than passing on to the constructor so that no
# copy will be made of new_row and new_col regardless
if copy:
new_data = self.data.copy()
else:
new_data = self.data
return coo_matrix((new_data, (new_row, new_col)),
shape=shape, copy=False)
reshape.__doc__ = spmatrix.reshape.__doc__
def getnnz(self, axis=None):
if axis is None:
nnz = len(self.data)
if nnz != len(self.row) or nnz != len(self.col):
raise ValueError('row, column, and data array must all be the '
'same length')
if self.data.ndim != 1 or self.row.ndim != 1 or \
self.col.ndim != 1:
raise ValueError('row, column, and data arrays must be 1-D')
return int(nnz)
if axis < 0:
axis += 2
if axis == 0:
return np.bincount(downcast_intp_index(self.col),
minlength=self.shape[1])
elif axis == 1:
return np.bincount(downcast_intp_index(self.row),
minlength=self.shape[0])
else:
raise ValueError('axis out of bounds')
getnnz.__doc__ = spmatrix.getnnz.__doc__
def _check(self):
""" Checks data structure for consistency """
# index arrays should have integer data types
if self.row.dtype.kind != 'i':
warn("row index array has non-integer dtype (%s) "
% self.row.dtype.name)
if self.col.dtype.kind != 'i':
warn("col index array has non-integer dtype (%s) "
% self.col.dtype.name)
idx_dtype = get_index_dtype(maxval=max(self.shape))
self.row = np.asarray(self.row, dtype=idx_dtype)
self.col = np.asarray(self.col, dtype=idx_dtype)
self.data = to_native(self.data)
if self.nnz > 0:
if self.row.max() >= self.shape[0]:
raise ValueError('row index exceeds matrix dimensions')
if self.col.max() >= self.shape[1]:
raise ValueError('column index exceeds matrix dimensions')
if self.row.min() < 0:
raise ValueError('negative row index found')
if self.col.min() < 0:
raise ValueError('negative column index found')
def transpose(self, axes=None, copy=False):
if axes is not None:
raise ValueError(("Sparse matrices do not support "
"an 'axes' parameter because swapping "
"dimensions is the only logical permutation."))
M, N = self.shape
return coo_matrix((self.data, (self.col, self.row)),
shape=(N, M), copy=copy)
transpose.__doc__ = spmatrix.transpose.__doc__
def resize(self, *shape):
shape = check_shape(shape)
new_M, new_N = shape
M, N = self.shape
if new_M < M or new_N < N:
mask = np.logical_and(self.row < new_M, self.col < new_N)
if not mask.all():
self.row = self.row[mask]
self.col = self.col[mask]
self.data = self.data[mask]
self._shape = shape
resize.__doc__ = spmatrix.resize.__doc__
def toarray(self, order=None, out=None):
"""See the docstring for `spmatrix.toarray`."""
B = self._process_toarray_args(order, out)
fortran = int(B.flags.f_contiguous)
if not fortran and not B.flags.c_contiguous:
raise ValueError("Output array must be C or F contiguous")
M,N = self.shape
coo_todense(M, N, self.nnz, self.row, self.col, self.data,
B.ravel('A'), fortran)
return B
def tocsc(self, copy=False):
"""Convert this matrix to Compressed Sparse Column format
Duplicate entries will be summed together.
Examples
--------
>>> from numpy import array
>>> from scipy.sparse import coo_matrix
>>> row = array([0, 0, 1, 3, 1, 0, 0])
>>> col = array([0, 2, 1, 3, 1, 0, 0])
>>> data = array([1, 1, 1, 1, 1, 1, 1])
>>> A = coo_matrix((data, (row, col)), shape=(4, 4)).tocsc()
>>> A.toarray()
array([[3, 0, 1, 0],
[0, 2, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 1]])
"""
from .csc import csc_matrix
if self.nnz == 0:
return csc_matrix(self.shape, dtype=self.dtype)
else:
M,N = self.shape
idx_dtype = get_index_dtype((self.col, self.row),
maxval=max(self.nnz, M))
row = self.row.astype(idx_dtype, copy=False)
col = self.col.astype(idx_dtype, copy=False)
indptr = np.empty(N + 1, dtype=idx_dtype)
indices = np.empty_like(row, dtype=idx_dtype)
data = np.empty_like(self.data, dtype=upcast(self.dtype))
coo_tocsr(N, M, self.nnz, col, row, self.data,
indptr, indices, data)
x = csc_matrix((data, indices, indptr), shape=self.shape)
if not self.has_canonical_format:
x.sum_duplicates()
return x
def tocsr(self, copy=False):
"""Convert this matrix to Compressed Sparse Row format
Duplicate entries will be summed together.
Examples
--------
>>> from numpy import array
>>> from scipy.sparse import coo_matrix
>>> row = array([0, 0, 1, 3, 1, 0, 0])
>>> col = array([0, 2, 1, 3, 1, 0, 0])
>>> data = array([1, 1, 1, 1, 1, 1, 1])
>>> A = coo_matrix((data, (row, col)), shape=(4, 4)).tocsr()
>>> A.toarray()
array([[3, 0, 1, 0],
[0, 2, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, 1]])
"""
from .csr import csr_matrix
if self.nnz == 0:
return csr_matrix(self.shape, dtype=self.dtype)
else:
M,N = self.shape
idx_dtype = get_index_dtype((self.row, self.col),
maxval=max(self.nnz, N))
row = self.row.astype(idx_dtype, copy=False)
col = self.col.astype(idx_dtype, copy=False)
indptr = np.empty(M + 1, dtype=idx_dtype)
indices = np.empty_like(col, dtype=idx_dtype)
data = np.empty_like(self.data, dtype=upcast(self.dtype))
coo_tocsr(M, N, self.nnz, row, col, self.data,
indptr, indices, data)
x = csr_matrix((data, indices, indptr), shape=self.shape)
if not self.has_canonical_format:
x.sum_duplicates()
return x
def tocoo(self, copy=False):
if copy:
return self.copy()
else:
return self
tocoo.__doc__ = spmatrix.tocoo.__doc__
def todia(self, copy=False):
from .dia import dia_matrix
self.sum_duplicates()
ks = self.col - self.row # the diagonal for each nonzero
diags, diag_idx = np.unique(ks, return_inverse=True)
if len(diags) > 100:
# probably undesired, should todia() have a maxdiags parameter?
warn("Constructing a DIA matrix with %d diagonals "
"is inefficient" % len(diags), SparseEfficiencyWarning)
#initialize and fill in data array
if self.data.size == 0:
data = np.zeros((0, 0), dtype=self.dtype)
else:
data = np.zeros((len(diags), self.col.max()+1), dtype=self.dtype)
data[diag_idx, self.col] = self.data
return dia_matrix((data,diags), shape=self.shape)
todia.__doc__ = spmatrix.todia.__doc__
def todok(self, copy=False):
from .dok import dok_matrix
self.sum_duplicates()
dok = dok_matrix((self.shape), dtype=self.dtype)
dok._update(zip(zip(self.row,self.col),self.data))
return dok
todok.__doc__ = spmatrix.todok.__doc__
def diagonal(self, k=0):
rows, cols = self.shape
if k <= -rows or k >= cols:
return np.empty(0, dtype=self.data.dtype)
diag = np.zeros(min(rows + min(k, 0), cols - max(k, 0)),
dtype=self.dtype)
diag_mask = (self.row + k) == self.col
if self.has_canonical_format:
row = self.row[diag_mask]
data = self.data[diag_mask]
else:
row, _, data = self._sum_duplicates(self.row[diag_mask],
self.col[diag_mask],
self.data[diag_mask])
diag[row + min(k, 0)] = data
return diag
diagonal.__doc__ = _data_matrix.diagonal.__doc__
def _setdiag(self, values, k):
M, N = self.shape
if values.ndim and not len(values):
return
idx_dtype = self.row.dtype
# Determine which triples to keep and where to put the new ones.
full_keep = self.col - self.row != k
if k < 0:
max_index = min(M+k, N)
if values.ndim:
max_index = min(max_index, len(values))
keep = np.logical_or(full_keep, self.col >= max_index)
new_row = np.arange(-k, -k + max_index, dtype=idx_dtype)
new_col = np.arange(max_index, dtype=idx_dtype)
else:
max_index = min(M, N-k)
if values.ndim:
max_index = min(max_index, len(values))
keep = np.logical_or(full_keep, self.row >= max_index)
new_row = np.arange(max_index, dtype=idx_dtype)
new_col = np.arange(k, k + max_index, dtype=idx_dtype)
# Define the array of data consisting of the entries to be added.
if values.ndim:
new_data = values[:max_index]
else:
new_data = np.empty(max_index, dtype=self.dtype)
new_data[:] = values
# Update the internal structure.
self.row = np.concatenate((self.row[keep], new_row))
self.col = np.concatenate((self.col[keep], new_col))
self.data = np.concatenate((self.data[keep], new_data))
self.has_canonical_format = False
# needed by _data_matrix
def _with_data(self,data,copy=True):
"""Returns a matrix with the same sparsity structure as self,
but with different data. By default the index arrays
(i.e. .row and .col) are copied.
"""
if copy:
return coo_matrix((data, (self.row.copy(), self.col.copy())),
shape=self.shape, dtype=data.dtype)
else:
return coo_matrix((data, (self.row, self.col)),
shape=self.shape, dtype=data.dtype)
def sum_duplicates(self):
"""Eliminate duplicate matrix entries by adding them together
This is an *in place* operation
"""
if self.has_canonical_format:
return
summed = self._sum_duplicates(self.row, self.col, self.data)
self.row, self.col, self.data = summed
self.has_canonical_format = True
def _sum_duplicates(self, row, col, data):
# Assumes (data, row, col) not in canonical format.
if len(data) == 0:
return row, col, data
order = np.lexsort((row, col))
row = row[order]
col = col[order]
data = data[order]
unique_mask = ((row[1:] != row[:-1]) |
(col[1:] != col[:-1]))
unique_mask = np.append(True, unique_mask)
row = row[unique_mask]
col = col[unique_mask]
unique_inds, = np.nonzero(unique_mask)
data = np.add.reduceat(data, unique_inds, dtype=self.dtype)
return row, col, data
def eliminate_zeros(self):
"""Remove zero entries from the matrix
This is an *in place* operation
"""
mask = self.data != 0
self.data = self.data[mask]
self.row = self.row[mask]
self.col = self.col[mask]
#######################
# Arithmetic handlers #
#######################
def _add_dense(self, other):
if other.shape != self.shape:
raise ValueError('Incompatible shapes ({} and {})'
.format(self.shape, other.shape))
dtype = upcast_char(self.dtype.char, other.dtype.char)
result = np.array(other, dtype=dtype, copy=True)
fortran = int(result.flags.f_contiguous)
M, N = self.shape
coo_todense(M, N, self.nnz, self.row, self.col, self.data,
result.ravel('A'), fortran)
return matrix(result, copy=False)
def _mul_vector(self, other):
#output array
result = np.zeros(self.shape[0], dtype=upcast_char(self.dtype.char,
other.dtype.char))
coo_matvec(self.nnz, self.row, self.col, self.data, other, result)
return result
def _mul_multivector(self, other):
result = np.zeros((other.shape[1], self.shape[0]),
dtype=upcast_char(self.dtype.char, other.dtype.char))
for i, col in enumerate(other.T):
coo_matvec(self.nnz, self.row, self.col, self.data, col, result[i])
return result.T.view(type=type(other))
def isspmatrix_coo(x):
"""Is x of coo_matrix type?
Parameters
----------
x
object to check for being a coo matrix
Returns
-------
bool
True if x is a coo matrix, False otherwise
Examples
--------
>>> from scipy.sparse import coo_matrix, isspmatrix_coo
>>> isspmatrix_coo(coo_matrix([[5]]))
True
>>> from scipy.sparse import coo_matrix, csr_matrix, isspmatrix_coo
>>> isspmatrix_coo(csr_matrix([[5]]))
False
"""
return isinstance(x, coo_matrix)
| [
"you@example.com"
] | you@example.com |
07fefdde58f203a7265d07ddce327fb4501b8591 | ab3352c00a60b1cabe0fa5d113ee9621ca7eeec2 | /Scrapy/tutorial/tutorial/spiders/quotes_spider.py | 397fcacb051aad2937f9547d0e118c75b4be7fc6 | [] | no_license | eholowko/bootcamp-jupyter-web_scraping | 5965034f672bf48269f8adacc2c004977e582f49 | 28291f1d6cc45b42ea4c13c028e6a2a8f65f9399 | refs/heads/master | 2020-03-15T06:01:53.613467 | 2018-05-03T13:34:06 | 2018-05-03T13:34:06 | 131,998,892 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 971 | py | import scrapy
class QuotesSpider(scrapy.Spider):
name = "quotes"
start_urls = [
'http://quotes.toscrape.com/page/1/',
]
def parse(self, response):
for quote in response.css('div.quote'):
# Scrapy spider generates many dictionaries containing the scrapped data
yield {
'text': quote.css('span.text::text').extract_first(),
'author': quote.css('small.author::text').extract_first(),
'tags': quote.css('div.tags a.tag::text').extract(),
}
next_page = response.css('li.next a::attr(href)').extract_first()
if next_page is not None:
#build the full URL(since the liks can be relative)
#next_page = response.urljoin(next_page)
# yields a new request in a callback message
#yield scrapy.Request(next_page, callback=self.parse)
yield response.follow(next_page, callback=self.parse)
| [
"elwirabk@gmail"
] | elwirabk@gmail |
5f9d7b7eacbb75109638f37ebd1059148c35c96e | 84c4ed9c5accb96ec4d2a15b3c076fdf5e76c320 | /backend/api/urls.py | 0170c6528dca0d2ced1fb5903e56caa9f59589e0 | [] | no_license | langdon0003/react-in-django | bdbc22e6f3a640097cbca0dcee453a245c75c9ac | be7068cf69a120268e94d1ad69ffdad0bed2ebb1 | refs/heads/master | 2022-12-09T07:15:11.978646 | 2020-08-22T20:15:13 | 2020-08-22T20:15:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | from rest_framework import routers
from backend.api.views import IdeaViewSet
router = routers.DefaultRouter()
router.register('ideas', IdeaViewSet)
urlpatterns = router.urls
| [
"hello@victoria.dev"
] | hello@victoria.dev |
f6c61f388cbadceaa279cf9da02ddc3f889ccb83 | ecec28b5ac91198f8e8d70aee39fd900b0cd158a | /Python Codes/parameters.py | cfc1d78cbc3fe2100cbba13e2e695dca28a87ac3 | [] | no_license | minoot7/Lane-Detection-Transfer-Learning | 1864272327966519e109922e6ba812ad64ab9ede | 639f1f70e3aa170a9f89ac8329b69cc89b856e68 | refs/heads/main | 2023-01-10T22:34:16.325873 | 2020-11-18T16:32:23 | 2020-11-18T16:32:23 | 312,137,710 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,057 | py | import numpy as np
class Parameters():
n_epoch = 1000
l_rate = 0.00001
weight_decay=1e-5
save_path = "savefile/"
model_path = "savefile/"
batch_size = 6
x_size = 512
y_size = 256
resize_ratio = 8
grid_x = x_size//resize_ratio #64
grid_y = y_size//resize_ratio #32
feature_size = 4
regression_size = 110
mode = 2
threshold_point = 0.35 #0.5 #0.57 #0.64 #0.35
threshold_instance = 0.08
#loss function parameter
K1 = 1.0
K2 = 2.0
constant_offset = 0.2
constant_exist = 1.0
constant_nonexist = 1.0
constant_angle = 1.0
constant_similarity = 1.0
constant_attention = 0.1
constant_alpha = 0.5
constant_beta = 0.5
constant_l = 1.0
constant_lane_loss = 1.0
constant_instance_loss = 1.0
#data loader parameter
flip_ratio=0.6
translation_ratio=0.6
rotate_ratio=0.6
noise_ratio=0.6
intensity_ratio=0.6
shadow_ratio=0.6
scaling_ratio=0.2
flip_indices=[(0,34),(1,35),(2,36),(3,37),(4,38),(5,39),(6,40),(7,41),(8,42),(9,43),(10,44),(11,45),(12,46),(13,47),(14,48),(15,49),(16,50),(17,51)
,(18,52),(19,53),(20,54),(21,55),(22,56),(23,57),(24,58),(25,59),(26,60),(27,61),(28,62),(29,63),(30,64),(31,65)
,(32,66),(33,67),(68,68),(69,69),(70,72),(71,73)]
train_root_url="/Users/minootaghavi/Desktop/GA/tusimple-trained model/minoo/Deep Neural Networks/data/train_set/"
test_root_url="/Users/minootaghavi/Desktop/GA/tusimple-trained model/minoo/Deep Neural Networks/data/test_set/"
# test parameter
color = [(0,0,0), (255,0,0), (0,255,0),(0,0,255),(255,255,0),(255,0,255),(0,255,255),(255,255,255),(100,255,0),(100,0,255),(255,100,0),(0,100,255),(255,0,100),(0,255,100)]
grid_location = np.zeros((grid_y, grid_x, 2))
for y in range(grid_y):
for x in range(grid_x):
grid_location[y][x][0] = x
grid_location[y][x][1] = y
num_iter = 30
threshold_RANSAC = 0.1
ratio_inliers = 0.1
| [
"minoo.tgh1991@gmail.com"
] | minoo.tgh1991@gmail.com |
1a213c55c3efbd558591d41b2cb2823defa46e4d | 3851d5eafcc5fd240a06a7d95a925518412cafa0 | /Django_Code/gs117/manage.py | 4ac5af2d90a1091f294d5fe1334098bb13f6c4b8 | [] | no_license | Ikshansaleem/DjangoandRest | c0fafaecde13570ffd1d5f08019e04e1212cc2f3 | 0ccc620ca609b4ab99a9efa650b5893ba65de3c5 | refs/heads/master | 2023-01-31T04:37:57.746016 | 2020-12-10T06:27:24 | 2020-12-10T06:27:24 | 320,180,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 646 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gs117.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"ikshan3108@gmail.com"
] | ikshan3108@gmail.com |
9286c7c752d3b78d0b21c51aa4fb6441c7f13b0f | 9df6873ee1c161b55209aefbea3aae093c637b4c | /api/__init__.py | ef3864351a21f0a7ac3a3012dd284290e4ee1b0b | [] | no_license | sunjieyu/test_laoke | dc5c43da8c964e8c02afd6ea64defdda3f8acd13 | 5720bdb8af65f33f426669e509055574f6b8829b | refs/heads/master | 2022-04-27T09:08:34.928748 | 2020-04-30T01:27:08 | 2020-04-30T01:27:08 | 260,084,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | import apiConfig
# 初始化日志配置
apiConfig.ini_log() | [
"email@example.com"
] | email@example.com |
4359697daa111c7793ff8b1a89c137d782a065f4 | 5b05fec3d07edc740389fc35705bb8b6e35b52db | /ex6.py | d59778255efc0d0940da86a825df185c8142d14c | [] | no_license | Adhunik13/Learning-Python | cc03e21da6091fdef090613c38170307f703b923 | 06a3ba3ca6eb466e72b924f006d21a03d1fbdf9d | refs/heads/master | 2020-06-27T01:18:13.773607 | 2017-07-13T17:50:59 | 2017-07-13T17:50:59 | 97,039,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | x = "there are %d types of people." % 10
binary = "binary"
not_binary = "don't"
y = "those who know %s and those who %s" %(binary,not_binary)
print x
print y
print "i said %s ." %x
print "i also said %s." %y
hilarious = False
joke = "Isn't that joke so funny? %r"
print joke % hilarious
w = "this is left side of..... "
e = "of this string "
print w+e | [
"adhunik13@yahoo.com"
] | adhunik13@yahoo.com |
9c6058f9792698c4d54840aba99ca2b319f7a185 | 5f047d6f8985457c3d4eb5624f8e4be112447976 | /Python_Course_01/Week_04/solution_c01_w04_t01_magic.py | 2170d9b73259ce15ef63492bf7e471ddca13a697 | [] | no_license | Fianketto/python_coursera | 2c81c1585dc8dc6602f5d3f962cfab36c3d58646 | 925d744cf7640a3da6a2dd27c282b8c582be7902 | refs/heads/main | 2023-05-30T23:47:51.484264 | 2021-06-27T15:59:44 | 2021-06-27T15:59:44 | 380,779,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,380 | py | import os
import tempfile
class File:
file_count = 0
def __init__(self, file_path: str):
self.file_path = file_path
self.lines = []
if not os.path.exists(file_path):
with open(file_path, 'w'):
pass
self.update_iter_items()
def __str__(self):
return self.file_path
def __add__(self, other_file):
File.file_count += 1
new_file_path = os.path.join(tempfile.gettempdir(), str(File.file_count) + "_my_file.txt")
# new_file_path = os.path.join(str(File.file_count) + "_my_file.txt")
new_content = self.read() + other_file.read()
new_file = File(new_file_path)
new_file.write(new_content)
return new_file
def __iter__(self):
return (x for x in self.lines)
def update_iter_items(self):
with open(self.file_path, 'r') as f:
self.lines = f.readlines()
def read(self):
try:
with open(self.file_path, 'r') as f:
file_content = f.read()
except BaseException:
file_content = ""
return file_content
def write(self, new_content: str):
with open(self.file_path, 'w') as f:
print(new_content, file=f, end="")
self.update_iter_items()
return len(new_content)
| [
"noreply@github.com"
] | Fianketto.noreply@github.com |
79c6385f17b6b94d8615694578fba264ece03859 | 0e355b61d9696df27fd6e5725ed602e132c83cb4 | /pname/users/tests/test_models.py | f9fcd8883fa1322132805a9305ee3484977c8ccd | [] | no_license | shahwan42/django-project | dc15adaf1170fa53358389efcac118c55fe7af84 | 3f01358d21650f720ea53eac3895103b88f524b8 | refs/heads/dev | 2023-01-22T11:48:37.767988 | 2020-11-25T21:34:30 | 2020-11-25T21:34:30 | 302,995,071 | 1 | 1 | null | 2020-11-08T02:10:52 | 2020-10-10T21:45:46 | Python | UTF-8 | Python | false | false | 1,281 | py | from django.test import TestCase
from django.contrib.auth import get_user_model
from django.core.exceptions import ValidationError
class UserTests(TestCase):
def test_create_user(self):
User = get_user_model()
user = User.objects.create_user(username="new_user", password="foo")
self.assertEqual(user.username, "new_user")
self.assertTrue(user.is_active)
self.assertFalse(user.is_staff)
self.assertFalse(user.is_superuser)
with self.assertRaises(TypeError):
User.objects.create_user()
with self.assertRaises(TypeError):
User.objects.create_user(username="")
with self.assertRaises(ValidationError):
User.objects.create_user(username="", password="foo")
def test_create_superuser(self):
User = get_user_model()
admin_user = User.objects.create_superuser("new_super", "foo")
self.assertEqual(admin_user.username, "new_super")
self.assertTrue(admin_user.is_active)
self.assertTrue(admin_user.is_staff)
self.assertTrue(admin_user.is_superuser)
with self.assertRaises(ValueError):
User.objects.create_superuser(
username="new_super", password="foo", is_superuser=False
)
| [
"ahmed@shahwan.me"
] | ahmed@shahwan.me |
ece1f517f1f9a4ec35a11a6739994bc6335ea3a3 | d214b72b3ae340d288c683afe356de6846a9b09d | /数学/求1+2+...+n.py | 2205c92b921d0270ae1f9bd41283b6a7544fc58f | [] | no_license | Xiaoctw/LeetCode1_python | 540af6402e82b3221dad8648bbdcce44954a9832 | b2228230c90d7c91b0a40399fa631520c290b61d | refs/heads/master | 2021-08-29T15:02:37.786181 | 2021-08-22T11:12:07 | 2021-08-22T11:12:07 | 168,444,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | class Solution:
def __init__(self):
self.res = 0
def sumNums(self, n: int) -> int:
'''
巧妙利用短路方法解决问题
短路可以代替一部分条件判断语句功能
:param n:
:return:
'''
n > 1 and self.sumNums(n - 1)
self.res += n
return self.res
if __name__ == '__main__':
sol=Solution()
print(sol.sumNums(30)) | [
"m18846183092@163.com"
] | m18846183092@163.com |
aa5b33beb5ea335a34540520c6ee6dc5d8f33808 | 8eb66c7d7c4f1a2fa1ea74c990269b6df809ca99 | /sight_words.py | 5ba01a31f4be889949e535919b54237093a3aa22 | [] | no_license | UYasher/mew2math | b742cf4e6538548d230ef5828be39f97eebc9991 | 5ca31ff830425a7be28d0f89dc7318ad7568e02a | refs/heads/master | 2020-08-21T17:05:17.282726 | 2019-11-05T21:24:35 | 2019-11-05T21:24:35 | 216,204,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,872 | py | import pyttsx3
engine = pyttsx3.init()
from problems import Problem, ProblemGenerator
import random
import pyttsx3
class SightWordProblem(Problem):
def __init__(self, word, sentence=None):
self.word = word
self.sentence = sentence
def display_problem(self):
if self.sentence is not None:
return str("How do you spell the word " + self.word + " in the sentence " + self.sentence + "?")
else:
return str("How do you spell the word " + self.word + "?")
def display_answer(self):
return "It is spelled" + ' '.join(' '*1 + c for c in self.word)
def check_answer(self, x):
return x.lower() == self.word.lower()
def __str__(self):
print("Problem: spell " + word)
class SightWordProblemGenerator(ProblemGenerator):
def __init__(self, problems):
for problem in problems:
if type(problem) != SightWordProblem:
raise TypeError
self.problems = problems
def get_problem(self):
return random.choice(self.problems)
# TODO: (Suggestions from Krish)
# Make it so he can write instead of me typing it for him
# (Maybe try speech to text input also)
# Add a timer
sight_words = [
["are"],
["easy"],
["listen"],
["second"],
["they"],
["answer"],
["enough"],
["make"],
["since"],
["thing"],
["above"],
["first"],
["made"],
["sometimes"],
["usually"],
["another"],
["found"],
["more"],
["said"],
["use"],
["about"],
["float"],
["many"],
["saw"],
["very"],
["after"],
["friends"],
["new"],
["sure"],
["with"],
["again"],
["favorite"],
["nice"],
["school"],
["went"],
["before"],
["girl"],
["one"],
["small"],
["won"],
["because"],
["have"],
["our"],
["thank"],
["won’t"],
["best"],
["how"],
["other"],
["those"],
["where"],
["being"],
["hear"],
["off"],
["that’s"],
["were"],
["body"],
["house"],
["often"],
["talking"],
["wanted"],
["beautiful"],
["however"],
["outside"],
["them"],
["who"],
["brothers"],
["heard"],
["people"],
["to"],
["wrong"],
["could"],
["its"],
["phone"],
["two"],
["when"],
["can’t"],
["into"],
["pretty"],
["too"],
["what"],
["city"],
["idea"],
["piece"],
["tell"],
["will"],
["clock"],
["joke"],
["quit"],
["there"],
["write"],
["crash"],
["jump"],
["question"],
["they’re"],
["watch"],
["caught"],
["junk"],
["ride"],
["their"],
["why"],
["children"],
["knew"],
["right"],
["thought"],
["was"],
["don’t"],
["kicked"],
["rain"],
["through"],
["whole"],
["didn’t"],
["low"],
["really"],
["than"],
["we"],
["drink"],
["line"],
["sister"],
["then"],
["young"],
["eating"],
["little"]
]
problems = []
for word in sight_words:
if len(word) == 1:
new_problem = SightWordProblem(word[0])
else:
new_problem = SightWordProblem(word[0], word[1])
problems.append(new_problem)
generator = SightWordProblemGenerator(problems)
engine = pyttsx3.init()
while True:
engine.setProperty('rate', 150)
current_problem = generator.get_problem()
engine.say(current_problem.display_problem())
engine.runAndWait()
x = input("I think it is spelled: ")
if current_problem.check_answer(x):
engine.say("Correct!")
else:
engine.say("That's not quite right.")
print("The correct spelling is: " + current_problem.display_answer())
engine.say(current_problem.display_answer())
engine.setProperty('rate', 100)
engine.runAndWait() | [
"shriyash.upadhyaya@gmail.com"
] | shriyash.upadhyaya@gmail.com |
cb631c0caaebefdf3ea70fcbcc6f99968da792b9 | 39d40b7f65dd57307d9792b5ed9d410cc1f06ebc | /multithreading1.py | aee6a8d00ac1cc4a8f7540d80de85469f8445daa | [] | no_license | fanliugen/multiprocessing | 1f7131b1b21ddd31f09b14c4fd62bd9505cf7259 | a3b7bed61d18ba8b3867fcd22d7f8d6f27185495 | refs/heads/master | 2020-04-07T14:52:55.654408 | 2018-11-20T23:40:29 | 2018-11-20T23:40:29 | 158,464,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | import threading
import time
def coding():
for x in range(3):
print("%s正在写代码" %threading.current_thread())
time.sleep(1)
def drawing():
for x in range(3):
print("%s正在画图" %threading.current_thread())
time.sleep(1)
def multi_thread():
t1 = threading.Thread(target=coding)
t2 = threading.Thread(target=drawing)
t1.start()
t2.start()
print(threading.enumerate())
if __name__ == '__main__':
multi_thread() | [
"fanliugen@gmail.com"
] | fanliugen@gmail.com |
b6f3593606f86f3e322e5b00c48981d9e86dad18 | 7bbe2060385e1d732bb125b881562c64618075e0 | /Python/GaeaPipeline/workflow/H_merge_vcf-v2.py | 714fe74b114470956fd083c9d880ae8f9c743814 | [] | no_license | jjmini/dmcade | 7071cefd451946de14ee3817568b8d7c6ce5bf7e | 8c621126c3b18e4d4f0daed27fe77db35e16fd31 | refs/heads/master | 2022-12-08T13:55:53.355357 | 2019-07-07T06:09:33 | 2019-07-07T06:09:33 | 195,595,960 | 0 | 0 | null | 2022-07-07T23:10:18 | 2019-07-07T00:58:48 | Java | UTF-8 | Python | false | false | 2,344 | py | # encoding: utf-8
import os
from gaeautils.bundle import bundle
from gaeautils.workflow import Workflow
__updated__ = '2018-05-31'
class merge_vcf(Workflow):
""" merge_vcf """
INIT = bundle(merge_vcf=bundle())
INIT.merge_vcf.program = "gaeatools.jar"
INIT.merge_vcf.bcftools = ""
INIT.merge_vcf.bcftools_param = "-t"
INIT.merge_vcf.parameter = ""
INIT.merge_vcf.uploadvcf = False
INIT.merge_vcf.check_param = ""
INIT.merge_vcf.bed_list = ""
def run(self, impl, dependList):
impl.log.info("step: merge_vcf!")
inputInfo = self.results[dependList[0]].output
result = bundle(output=bundle(), script=bundle())
# extend program path
self.merge_vcf.program = self.expath('merge_vcf.program')
self.merge_vcf.bcftools = self.expath('merge_vcf.bcftools', False)
# global param
hadoop_parameter = ''
if self.hadoop.get('queue'):
hadoop_parameter += ' -D mapreduce.job.queuename={} '.format(self.hadoop.queue)
ParamDict = {
"PROGRAM": "%s jar %s" % (self.hadoop.bin, self.merge_vcf.program),
"HADOOPPARAM": hadoop_parameter
}
JobParamList = []
for sampleName in inputInfo:
scriptsdir = impl.mkdir(self.gaeaScriptsDir, sampleName)
outputPath = impl.mkdir(self.option.workdir, "variation", sampleName)
result.output[sampleName] = os.path.join(outputPath, "{}.hc.vcf.gz".format(sampleName))
# global param
JobParamList.append({
"SAMPLE": sampleName,
"SCRDIR": scriptsdir,
"INPUT": inputInfo[sampleName],
"VCF": result.output[sampleName]
})
cmd = ["%s ${INPUT}/_*" % self.fs_cmd.delete,
'${PROGRAM} SortVcf ${HADOOPPARAM} -input ${INPUT} -output file://${VCF}\n']
if self.merge_vcf.bcftools:
cmd.append("%s index %s ${VCF}" % (self.merge_vcf.bcftools, self.merge_vcf.bcftools_param))
# write script
scriptPath = \
impl.write_scripts(
name='merge_vcf',
commands=cmd,
JobParamList=JobParamList,
paramDict=ParamDict)
# result
result.script.update(scriptPath)
return result
| [
"https://jjmini@github.com"
] | https://jjmini@github.com |
3caeaa43091c7f062072ca9397e23e095ecee16e | 9bb3509de1bd721237a58793df31c67415518f8b | /sql_dbase_build.py | ce157e2bb4440c69ec61ea3bd666c06cd9e1f1e6 | [] | no_license | PolMacUilliam/IABA_boxing_scoring_patterns | ec7d5c62fbd813397cd1bfadb1491628c751cd42 | baf22e763fc3f2710712ff17ef0effa67eb2c7ca | refs/heads/master | 2020-05-17T10:36:05.685598 | 2019-04-26T21:04:41 | 2019-04-26T21:04:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,740 | py | import sys
import datetime
from datetime import datetime
import pyodbc
import socket
import tkinter
from tkinter import filedialog
hostname=(socket.gethostname())
print(hostname)
###############################################################################################################
# FUNCTION DEFINITIONS
###############################################################################################################
def check_date(input_date):
for fmt in ('%Y-%m-%d','%d/%m/%Y','%d/%m/%y'): # ALLOWED DATE FORMATS
try:
return datetime.strptime(input_date, fmt)
except ValueError:
pass # IGNORE & DO NOTHING YET...
raise ValueError('Invalid date format found: '+str(input_date))
###############################################################################################################
###############################################################################################################
# PICK DATALOG FOLDER
###############################################################################################################
###############################################################################################################
# Get user to pick dbase folder i.e. folder holding database files to be written/inserted into sql server db
root = tkinter.Tk()
root.withdraw()
datapath=filedialog.askdirectory(title="Select DBASE folder with dbase_table files...")
root.destroy()
if not datapath:
sys.exit("You cancelled!!!") # handle dialog cancel event
###############################################################################################################
###############################################################################################################
# CREATE CONNECTION TO SQL SERVER AND DATABASE
###############################################################################################################
###############################################################################################################
if hostname == 'PAUL-PC':
connection_str = """Driver={SQL Server}; Server=PAUL-PC\SQL12EXPRESS; Database=master; Trusted_Connection=yes; """
elif hostname == 'Cenit-PC':
connection_str = """Driver={SQL Server}; Server=CENIT-PC; Database=master; Trusted_Connection=yes; """
else:
sys.exit("ERROR: Cannot connect to server!!!")
print(connection_str)
db_connection = pyodbc.connect(connection_str)
db_connection.autocommit = True
db_cursor = db_connection.cursor()
# CHECK IF DATABASE ALREADY EXISTS, IF NOT CREATE IT
# CHECK IF TABLES ALREADY EXIST, IF NOT CREATE THEM
# ATTEMPT TO WRITE ALL RECORDS INTO THE DATABASE, IF RECORD ALREADY EXISTS SKIP, OTHERWISE WRITE
'''
sql_command = """ CREATE DATABASE ASwissTiming_WinBPM3163_IABA_Analytics """
try: db_cursor.execute(sql_command)
except pyodbc.ProgrammingError:
print("ERROR: CREATE Database 'ASwissTiming_WinBPM314_IABA_Analytics' failed")
sys.exit()
'''
sql_command = """ USE ASwissTiming_WinBPM3163_IABA_Analytics """
try: db_cursor.execute(sql_command)
except pyodbc.ProgrammingError:
print("ERROR: USE DATABASE 'ASwissTiming_WinBPM3163_IABA_Analytics' failed")
sys.exit()
print("*"*100+"\n") # WHITESPACE
###############################################################################################################
###############################################################################################################
# CREATE SQL SERVER DATABASE TABLE "BOUTS_DBASE_TABLE"
###############################################################################################################
###############################################################################################################
if db_cursor.tables(table='bouts_dbase_table', tableType='TABLE').fetchone():
print("\nBouts table exists already, reading dbase files...")
else:
print("Bouts table doesn't exist yet, creating now...", end="")
sql_command = """ CREATE TABLE bouts_dbase_table (
pk_bout_index VARCHAR(255) PRIMARY KEY,
competition_name VARCHAR(255),
competition_session_num INTEGER, competition_session_name VARCHAR(255),
competition_bout_date DATE, competition_bout_time TIME(0), competition_bout_number INTEGER,
competition_bout_weight_class VARCHAR(255), competition_bout_weight_KG VARCHAR(255),
competition_bout_gender VARCHAR(255), competition_bout_groupname VARCHAR(255),
red_boxer_name VARCHAR(255), red_club VARCHAR(255),
blue_boxer_name VARCHAR(255), blue_club VARCHAR(255),
referee_name VARCHAR(255), referee_nat VARCHAR(255),
judge1_name VARCHAR(255), judge1_nat VARCHAR(255),
judge2_name VARCHAR(255), judge2_nat VARCHAR(255),
judge3_name VARCHAR(255), judge3_nat VARCHAR(255),
judge4_name VARCHAR(255), judge4_nat VARCHAR(255),
judge5_name VARCHAR(255), judge5_nat VARCHAR(255),
data_format VARCHAR(255), number_of_rounds INTEGER,
result_text VARCHAR(255), winner VARCHAR(255) ) """
try:
db_cursor.execute(sql_command)
print("Done")
except pyodbc.ProgrammingError:
print("\nERROR: CREATE TABLE failed")
sys.exit()
# READ BOUTS_DBASE_TABLE FILE
with open(datapath+"/bouts_dbase_table.csv","r") as bouts_dbase_table:
bouts_dbase_table_lines_read = bouts_dbase_table.readlines()
# exclude column headings line [0] and dummy line [1], start reading at line [2]
bouts_dbase_table_lines_read = bouts_dbase_table_lines_read[2:]
print("Number of db lines read: ",len(bouts_dbase_table_lines_read))
print("Now attempting to insert values into bouts table...", end="")
# FORMAT DATES, INSERTS NULLS AND THEN INSERT ALL VALUES INTO TABLE
for line in bouts_dbase_table_lines_read:
# HANDLE SINGLE-QUOTES/APOSTROPHES IN TEXT VALUES BY REPLACING WITH DOUBLE SINGLE-QUOTES
line = line.replace("'","''")
# SPLIT EACH BOUTS LINE INTO A LIST TO AID DATA EXTRACTION
line=line.rstrip("\n").split(";")
# INSERT NULL VALUES INTO ANY EMPTY FIELDS
for index in range (0,len(line)):
if line[index] =="": line[index] = None
pk_bout_index=line[0]; competition_name=line[1]
competition_session_num=line[2]; competition_session_name=line[3]
competition_bout_date=line[4]; competition_bout_time=line[5]; competition_bout_number=line[6]
competition_bout_weight_class=line[7]; competition_bout_weight_KG=line[8]
competition_bout_gender=line[9]; competition_bout_groupname=line[10]
red_boxer_name=line[11]; red_club=line[12]
blue_boxer_name=line[13]; blue_club=line[14]
referee_name=line[15]; referee_nat=line[16]
judge1_name=line[17]; judge1_nat=line[18]
judge2_name=line[19]; judge2_nat=line[20]
judge3_name=line[21]; judge3_nat=line[22]
judge4_name=line[23]; judge4_nat=line[24]
judge5_name=line[25]; judge5_nat=line[26]
data_format=line[27]; number_of_rounds=line[28]
result_text=line[29]; winner=line[30]
# REFORMAT DATE VALUE INTO YY/MM/DD SO THAT IT FITS INTO MSSQL DATE TYPE FIELDS
if competition_bout_date == None: pass
else:
competition_bout_date = check_date(competition_bout_date)
competition_bout_date = datetime.strftime(competition_bout_date,'%Y/%m/%d')
# INSERT VALUES INTO TABLE
try:
db_cursor.execute(""" INSERT into bouts_dbase_table (pk_bout_index, competition_name,competition_session_num,\
competition_session_name,competition_bout_date, competition_bout_time, competition_bout_number,\
competition_bout_weight_class, competition_bout_weight_KG,competition_bout_gender, competition_bout_groupname,\
red_boxer_name, red_club,blue_boxer_name, blue_club, referee_name, referee_nat,judge1_name, judge1_nat,\
judge2_name, judge2_nat,judge3_name, judge3_nat,judge4_name, judge4_nat,judge5_name, judge5_nat,data_format,\
number_of_rounds, result_text, winner) values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",\
(pk_bout_index,competition_name,competition_session_num,competition_session_name,competition_bout_date,\
competition_bout_time,competition_bout_number,competition_bout_weight_class,competition_bout_weight_KG,\
competition_bout_gender,competition_bout_groupname,red_boxer_name,red_club,blue_boxer_name,blue_club,\
referee_name,referee_nat,judge1_name,judge1_nat,judge2_name,judge2_nat,judge3_name,judge3_nat,\
judge4_name,judge4_nat,judge5_name,judge5_nat,data_format,number_of_rounds,result_text,winner)\
)
# HANDLE INSERTION FAILURE
except pyodbc.ProgrammingError:
print("ERROR: INSERT failed")
sys.exit()
print("Done")
###############################################################################################################
###############################################################################################################
# CREATE SQL SERVER DATABASE TABLE "COMPETITION_DBASE_TABLE"
###############################################################################################################
###############################################################################################################
if db_cursor.tables(table='competition_dbase_table', tableType='TABLE').fetchone():
print("\nCompetition table exists already, reading dbase files...")
else:
print("\nCompetition table doesn't exist yet, creating now...", end="")
sql_command = """ CREATE TABLE competition_dbase_table (
pk_competition_index VARCHAR(255) PRIMARY KEY,
competition_name VARCHAR(255), competition_venue VARCHAR(255),
competition_firstday DATE, competition_lastday DATE,
competition_type VARCHAR(255) ) """
try:
db_cursor.execute(sql_command)
print("Done")
print("Now reading dbase files...")
except pyodbc.ProgrammingError:
print("\nERROR: CREATE TABLE failed")
sys.exit()
# READ COMPETITION_DBASE_TABLE FILE
with open(datapath+"/competition_dbase_table.csv","r") as competition_dbase_table:
competition_dbase_table_lines_read = competition_dbase_table.readlines()
# EXCLUDE COLUMN HEADINGS LINE [0] AND DUMMY LINE [1], START READING AT LINE [2]
competition_dbase_table_lines_read = competition_dbase_table_lines_read[2:]
print("Number of db lines read: ",len(competition_dbase_table_lines_read))
print("Now attempting to insert values into competitions table...", end="")
# FORMAT DATES, INSERTS NULLS AND THEN INSERT ALL VALUES INTO TABLE
for line in competition_dbase_table_lines_read:
# HANDLE SINGLE-QUOTES/APOSTROPHES IN TEXT VALUES BY REPLACING WITH DOUBLE SINGLE-QUOTES
line = line.replace("'","''")
# SPLIT EACH COMPETITION LINE INTO A LIST TO AID DATA EXTRACTION
line=line.rstrip("\n").split(";")
# INSERT NULL VALUES INTO ANY EMPTY FIELDS
for index in range (0,len(line)):
if line[index] =="": line[index] = None
#print("\n",line)
# EXTRACT DATA VALUES FROM THE CURRENT LINE
pk_competition_index=line[0]; competition_name=line[1]
competition_venue=line[2];
competition_type = None #line[5]
# REFORMAT DATE VALUE INTO YY/MM/DD SO THAT IT FITS INTO MSSQL DATE TYPE FIELDS
competition_firstday=line[3];
if competition_firstday == None: pass
else:
competition_firstday = check_date(competition_firstday)
competition_firstday = datetime.strftime(competition_firstday,'%Y/%m/%d')
competition_lastday=line[4]
if competition_lastday == None: pass
else:
competition_lastday = check_date(competition_lastday)
competition_lastday = datetime.strftime(competition_lastday,'%Y/%m/%d')
#print("\nValues to be inserted: %s,%s,%s,%s,%s,%s" %(pk_competition_index,\
# competition_name,competition_venue,competition_firstday,competition_lastday,competition_type))
# INSERT VALUES INTO TABLE
try:
db_cursor.execute(""" INSERT into competition_dbase_table (pk_competition_index, competition_name,\
competition_venue, competition_firstday,competition_lastday,competition_type)\
values (?,?,?,?,?,?)""", (pk_competition_index, competition_name,competition_venue,competition_firstday,\
competition_lastday,competition_type))
# HANDLE INSERTION FAILURE
except pyodbc.ProgrammingError:
print("\nERROR: INSERT failed")
sys.exit()
print("Done")
###############################################################################################################
###############################################################################################################
# CREATE SQL SERVER DATABASE TABLE "BOXERS_DBASE_TABLE"
###############################################################################################################
###############################################################################################################
if db_cursor.tables(table='boxers_dbase_table', tableType='TABLE').fetchone():
print("\nBoxers table exists already, reading dbase files...")
else:
print("\nBoxers table doesn't exist yet, creating now...", end="")
sql_command = """ CREATE TABLE boxers_dbase_table (
pk_boxers_index VARCHAR(255) PRIMARY KEY,
boxers_name VARCHAR(255), boxers_club VARCHAR(255),
boxers_dob DATE ) """
try:
db_cursor.execute(sql_command)
print("Done")
print("Now reading dbase files...")
except pyodbc.ProgrammingError:
print("\nERROR: CREATE TABLE failed")
sys.exit()
# READ BOXERS_DBASE_TABLE FILE
with open(datapath+"/boxers_dbase_table.csv","r") as boxers_dbase_table:
boxers_dbase_table_lines_read = boxers_dbase_table.readlines()
# EXCLUDE COLUMN HEADINGS LINE [0] AND DUMMY LINE [1], START READING AT LINE [2]
boxers_dbase_table_lines_read = boxers_dbase_table_lines_read[2:]
print("Number of db lines read: ",len(boxers_dbase_table_lines_read))
print("Now attempting to insert values into boxers table...", end="")
# FORMAT DATES, INSERTS NULLS AND THEN INSERT ALL VALUES INTO TABLE
for line in boxers_dbase_table_lines_read:
# HANDLE SINGLE-QUOTES/APOSTROPHES IN TEXT VALUES BY REPLACING WITH DOUBLE SINGLE-QUOTES
line = line.replace("'","''")
# SPLIT EACH BOXERS LINE INTO A LIST TO AID DATA EXTRACTION
line=line.rstrip("\n").split(";")
# INSERT NULL VALUES INTO ANY EMPTY FIELDS
for index in range (0,len(line)):
if line[index] =="": line[index] = None
#print("\n",line)
# EXTRACT DATA VALUES FROM THE CURRENT LINE
pk_boxers_index=line[0]; boxers_name=line[1]
boxers_club=line[2];
boxers_dob = None #line[3]
# REFORMAT DATE VALUE INTO YY/MM/DD SO THAT IT FITS INTO MSSQL DATE TYPE FIELDS
if boxers_dob == None: pass
else:
boxers_dob = check_date(boxers_dob)
boxers_dob = datetime.strftime(boxers_dob,'%Y/%m/%d')
#print("\nValues to be inserted: %s,%s,%s,%s" %(pk_boxers_index,boxers_name,boxers_club,boxers_dob))
# INSERT VALUES INTO TABLE
try:
db_cursor.execute(""" INSERT into boxers_dbase_table (pk_boxers_index, boxers_name,\
boxers_club, boxers_dob)\
values (?,?,?,?)""", (pk_boxers_index,boxers_name,boxers_club,boxers_dob))
# HANDLE INSERTION FAILURE
except pyodbc.ProgrammingError:
print("\nERROR: INSERT failed")
sys.exit()
print("Done")
###############################################################################################################
###############################################################################################################
# CREATE SQL SERVER DATABASE TABLE "OFFICIALS_DBASE_TABLE"
###############################################################################################################
###############################################################################################################
if db_cursor.tables(table='officials_dbase_table', tableType='TABLE').fetchone():
print("\nOfficials table exists already, reading dbase files...")
else:
print("\nOfficials table doesn't exist yet, creating now...", end="")
sql_command = """ CREATE TABLE officials_dbase_table (
pk_officials_index VARCHAR(255) PRIMARY KEY,
officials_name VARCHAR(255), officials_club VARCHAR(255),
officials_dob DATE ) """
try:
db_cursor.execute(sql_command)
print("Done")
print("Now reading dbase files...")
except pyodbc.ProgrammingError:
print("\nERROR: CREATE TABLE failed")
sys.exit()
# READ OFFICIALS_DBASE_TABLE FILE
with open(datapath+"/officials_dbase_table.csv","r") as officials_dbase_table:
officials_dbase_table_lines_read = officials_dbase_table.readlines()
# EXCLUDE COLUMN HEADINGS LINE [0] AND DUMMY LINE [1], START READING AT LINE [2]
officials_dbase_table_lines_read = officials_dbase_table_lines_read[2:]
print("Number of db lines read: ",len(officials_dbase_table_lines_read))
print("Now attempting to insert values into officials table...", end="")
# FORMAT DATES, INSERTS NULLS AND THEN INSERT ALL VALUES INTO TABLE
for line in officials_dbase_table_lines_read:
# HANDLE SINGLE-QUOTES/APOSTROPHES IN TEXT VALUES BY REPLACING WITH DOUBLE SINGLE-QUOTES
line = line.replace("'","''")
# SPLIT EACH OFFICIALS LINE INTO A LIST TO AID DATA EXTRACTION
line=line.rstrip("\n").split(";")
# INSERT NULL VALUES INTO ANY EMPTY FIELDS
for index in range (0,len(line)):
if line[index] =="": line[index] = None
#print("\n",line)
# EXTRACT DATA VALUES FROM THE CURRENT LINE
pk_officials_index=line[0]; officials_name=line[1]
officials_club=line[2];
officials_dob = None #line[3]
# REFORMAT DATE VALUE INTO YY/MM/DD SO THAT IT FITS INTO MSSQL DATE TYPE FIELDS
if officials_dob == None: pass
else:
officials_dob = check_date(officials_dob)
officials_dob = datetime.strftime(officials_dob,'%Y/%m/%d')
#print("\nValues to be inserted: %s,%s,%s,%s" %(pk_officials_index,officials_name,officials_club,officials_dob))
# INSERT VALUES INTO TABLE
try:
db_cursor.execute(""" INSERT into officials_dbase_table (pk_officials_index, officials_name,\
officials_club, officials_dob)\
values (?,?,?,?)""", (pk_officials_index,officials_name,officials_club,officials_dob))
# HANDLE INSERTION FAILURE
except pyodbc.ProgrammingError:
print("\nERROR: INSERT failed")
sys.exit()
print("Done")
###############################################################################################################
###############################################################################################################
# CREATE SQL SERVER DATABASE TABLE "BOUTSCORES_DBASE_TABLE"
###############################################################################################################
###############################################################################################################
if db_cursor.tables(table='boutscores_dbase_table', tableType='TABLE').fetchone():
print("\nBoutscores table exists already, reading dbase files...")
else:
print("\nBoutscores table doesn't exist yet, creating now...", end="")
sql_command = """ CREATE TABLE boutscores_dbase_table (
pk_boutscores_index VARCHAR(255) PRIMARY KEY, J1r1 INTEGER, J1b1 INTEGER, J2r1 INTEGER, J2b1 INTEGER, J3r1\
INTEGER, J3b1 INTEGER, J4r1 INTEGER, J4b1 INTEGER, J5r1 INTEGER, J5b1 INTEGER, kdr1 INTEGER, kdb1 INTEGER, wr1\
INTEGER, wb1 INTEGER, ix1r1 INTEGER, ix1b1 INTEGER, ix2r1 INTEGER, ix2b1 INTEGER, ix3r1 INTEGER, ix3b1\
INTEGER, ix4r1 INTEGER, ix4b1 INTEGER, ix5r1 INTEGER, ix5b1 INTEGER, J1r2 INTEGER, J1b2 INTEGER, J2r2 INTEGER,\
J2b2 INTEGER, J3r2 INTEGER, J3b2 INTEGER, J4r2 INTEGER, J4b2 INTEGER, J5r2 INTEGER, J5b2 INTEGER, kdr2\
INTEGER, kdb2 INTEGER, wr2 INTEGER, wb2 INTEGER, ix1r2 INTEGER, ix1b2 INTEGER, ix2r2 INTEGER, ix2b2 INTEGER,\
ix3r2 INTEGER, ix3b2 INTEGER, ix4r2 INTEGER, ix4b2 INTEGER, ix5r2 INTEGER, ix5b2 INTEGER, J1r3 INTEGER, J1b3\
INTEGER, J2r3 INTEGER, J2b3 INTEGER, J3r3 INTEGER, J3b3 INTEGER, J4r3 INTEGER, J4b3 INTEGER, J5r3 INTEGER,\
J5b3 INTEGER, kdr3 INTEGER, kdb3 INTEGER, wr3 INTEGER, wb3 INTEGER, ix1r3 INTEGER, ix1b3 INTEGER, ix2r3\
INTEGER, ix2b3 INTEGER, ix3r3 INTEGER, ix3b3 INTEGER, ix4r3 INTEGER, ix4b3 INTEGER, ix5r3 INTEGER, ix5b3\
INTEGER, J1r4 INTEGER, J1b4 INTEGER, J2r4 INTEGER, J2b4 INTEGER, J3r4 INTEGER, J3b4 INTEGER, J4r4 INTEGER,\
J4b4 INTEGER, J5r4 INTEGER, J5b4 INTEGER, kdr4 INTEGER, kdb4 INTEGER, wr4 INTEGER, wb4 INTEGER, ix1r4 INTEGER,\
ix1b4 INTEGER, ix2r4 INTEGER, ix2b4 INTEGER, ix3r4 INTEGER, ix3b4 INTEGER, ix4r4 INTEGER, ix4b4 INTEGER, ix5r4\
INTEGER, ix5b4 INTEGER) """
try:
db_cursor.execute(sql_command)
print("Done")
print("Now reading dbase files...")
except pyodbc.ProgrammingError:
print("\nERROR: CREATE TABLE failed")
sys.exit()
# READ BOUTSCORES_DBASE_TABLE FILE
with open(datapath+"/boutscores_dbase_table.csv","r") as boutscores_dbase_table:
boutscores_dbase_table_lines_read = boutscores_dbase_table.readlines()
# EXCLUDE COLUMN HEADINGS LINE [0] AND DUMMY LINE [1], START READING AT LINE [2]
boutscores_dbase_table_lines_read = boutscores_dbase_table_lines_read[2:]
print("Number of db lines read: ",len(boutscores_dbase_table_lines_read))
print("Now attempting to insert values into boutscores table...", end="")
# FORMAT DATES, INSERTS NULLS AND THEN INSERT ALL VALUES INTO TABLE
for line in boutscores_dbase_table_lines_read:
# SPLIT EACH BOUTSCORES LINE INTO A LIST TO AID DATA EXTRACTION
line=line.rstrip("\n").split(";")
# INSERT NULL VALUES INTO ANY EMPTY FIELDS
for index in range (0,len(line)):
if line[index] =="": line[index] = None
#print("\n",line)
# EXTRACT DATA VALUES FROM THE CURRENT LINE
pk_boutscores_index = line[0];
#print(pk_boutscores_index)
J1r1 = line[1]; J1b1 = line[2]; J2r1 = line[3]; J2b1 = line[4];
J3r1 = line[5]; J3b1 = line[6]; J4r1 = line[7]; J4b1 = line[8];
J5r1 = line[9]; J5b1 = line[10]; kdr1 = line[11]; kdb1 = line[12];
wr1 = line[13]; wb1 = line[14]; ix1r1 = line[15]; ix1b1 = line[16];
ix2r1 = line[17]; ix2b1 = line[18]; ix3r1 = line[19]; ix3b1 = line[20];
ix4r1 = line[21]; ix4b1 = line[22]; ix5r1 = line[23]; ix5b1 = line[24];
J1r2 = line[25]; J1b2 = line[26]; J2r2 = line[27]; J2b2 = line[28];
J3r2 = line[29]; J3b2 = line[30]; J4r2 = line[31]; J4b2 = line[32];
J5r2 = line[33]; J5b2 = line[34]; kdr2 = line[35]; kdb2 = line[36];
wr2 = line[37]; wb2 = line[38]; ix1r2 = line[39]; ix1b2 = line[40];
ix2r2 = line[41]; ix2b2 = line[42]; ix3r2 = line[43]; ix3b2 = line[44];
ix4r2 = line[45]; ix4b2 = line[46]; ix5r2 = line[47]; ix5b2 = line[48];
J1r3 = line[49]; J1b3 = line[50]; J2r3 = line[51]; J2b3 = line[52];
J3r3 = line[53]; J3b3 = line[54]; J4r3 = line[55]; J4b3 = line[56];
J5r3 = line[57]; J5b3 = line[58]; kdr3 = line[59]; kdb3 = line[60];
wr3 = line[61]; wb3 = line[62]; ix1r3 = line[63]; ix1b3 = line[64];
ix2r3 = line[65]; ix2b3 = line[66]; ix3r3 = line[67]; ix3b3 = line[68];
ix4r3 = line[69]; ix4b3 = line[70]; ix5r3 = line[71]; ix5b3 = line[72];
J1r4 = line[73]; J1b4 = line[74]; J2r4 = line[75]; J2b4 = line[76];
J3r4 = line[77]; J3b4 = line[78]; J4r4 = line[79]; J4b4 = line[80];
J5r4 = line[81]; J5b4 = line[82]; kdr4 = line[83]; kdb4 = line[84];
wr4 = line[85]; wb4 = line[86]; ix1r4 = line[87]; ix1b4 = line[88];
ix2r4 = line[89]; ix2b4 = line[90]; ix3r4 = line[91]; ix3b4 = line[92];
ix4r4 = line[93]; ix4b4 = line[94]; ix5r4 = line[95]; ix5b4 = line[96];
# INSERT VALUES INTO TABLE
try:
db_cursor.execute(""" INSERT into boutscores_dbase_table (pk_boutscores_index,\
J1r1, J1b1, J2r1, J2b1, J3r1, J3b1, J4r1, J4b1, J5r1, J5b1, kdr1, kdb1, wr1, wb1, ix1r1, ix1b1, ix2r1,\
ix2b1, ix3r1, ix3b1, ix4r1, ix4b1, ix5r1, ix5b1, J1r2, J1b2, J2r2, J2b2, J3r2, J3b2, J4r2, J4b2, J5r2,\
J5b2, kdr2, kdb2, wr2, wb2, ix1r2, ix1b2, ix2r2, ix2b2, ix3r2, ix3b2, ix4r2, ix4b2, ix5r2, ix5b2, J1r3,\
J1b3, J2r3, J2b3, J3r3, J3b3, J4r3, J4b3, J5r3, J5b3, kdr3, kdb3, wr3, wb3, ix1r3, ix1b3, ix2r3, ix2b3,\
ix3r3, ix3b3, ix4r3, ix4b3, ix5r3, ix5b3, J1r4, J1b4, J2r4, J2b4, J3r4, J3b4, J4r4, J4b4, J5r4, J5b4,\
kdr4, kdb4, wr4, wb4, ix1r4, ix1b4, ix2r4, ix2b4, ix3r4, ix3b4, ix4r4, ix4b4, ix5r4, ix5b4)
values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,\
?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",\
(pk_boutscores_index,\
J1r1, J1b1, J2r1, J2b1, J3r1, J3b1, J4r1, J4b1, J5r1, J5b1, kdr1, kdb1, wr1, wb1, ix1r1, ix1b1,\
ix2r1, ix2b1, ix3r1, ix3b1, ix4r1, ix4b1, ix5r1, ix5b1, J1r2, J1b2, J2r2, J2b2, J3r2, J3b2,\
J4r2, J4b2, J5r2, J5b2, kdr2, kdb2, wr2, wb2, ix1r2, ix1b2, ix2r2, ix2b2, ix3r2, ix3b2, ix4r2,\
ix4b2, ix5r2, ix5b2, J1r3, J1b3, J2r3, J2b3, J3r3, J3b3, J4r3, J4b3, J5r3, J5b3, kdr3, kdb3, wr3,\
wb3, ix1r3, ix1b3, ix2r3, ix2b3, ix3r3, ix3b3, ix4r3, ix4b3, ix5r3, ix5b3, J1r4, J1b4, J2r4, J2b4,\
J3r4, J3b4, J4r4, J4b4, J5r4, J5b4, kdr4, kdb4, wr4, wb4, ix1r4, ix1b4, ix2r4, ix2b4, ix3r4,\
ix3b4, ix4r4, ix4b4, ix5r4, ix5b4)
)
# HANDLE INSERTION FAILURE
except pyodbc.ProgrammingError:
print("\nERROR: INSERT failed")
sys.exit()
print("Done")
###############################################################################################################
# CLOSE DATABASE CONNECTION
###############################################################################################################
db_connection.autocommit = False
db_cursor.close()
del db_cursor
db_connection.close()
| [
"noreply@github.com"
] | PolMacUilliam.noreply@github.com |
a135e2f2aae95a77d00418ce22ebc8afe0190498 | 2207901d6018c5af9a430ea0504d2312117d8071 | /25-python-paint-app.py | ebf32252bdca165869743a6aabbc0a5b475dbec9 | [] | no_license | tsanhan/learn_python | 07a6a3742e74668309a349a2d73b33ea6074eb8b | e5abb1c15479470603b01d4835003e2ff7caa94a | refs/heads/master | 2023-02-13T19:40:51.790206 | 2021-01-12T23:34:59 | 2021-01-12T23:34:59 | 319,448,623 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,275 | py | from tkinter import *
import tkinter.font
class PaintApp:
# Stores current drawing tool used
drawing_tool = "line"
# Tracks whether left mouse is down
left_but = "up"
# x and y positions for drawing with pencil
x_pos, y_pos = None, None
# Tracks x & y when the mouse is clicked and released
x1_line_pt, y1_line_pt, x2_line_pt, y2_line_pt = None, None, None, None
# ---------- CATCH MOUSE UP ----------
def left_but_down(self, event=None):
self.left_but = "down"
# Set x & y when mouse is clicked
self.x1_line_pt = event.x
self.y1_line_pt = event.y
# ---------- CATCH MOUSE UP ----------
def left_but_up(self, event=None):
self.left_but = "up"
# Reset the line
self.x_pos = None
self.y_pos = None
# Set x & y when mouse is released
self.x2_line_pt = event.x
self.y2_line_pt = event.y
# If mouse is released and line tool is selected
# draw the line
if self.drawing_tool == "line":
self.line_draw(event)
elif self.drawing_tool == "arc":
self.arc_draw(event)
elif self.drawing_tool == "oval":
self.oval_draw(event)
elif self.drawing_tool == "rectangle":
self.rectangle_draw(event)
elif self.drawing_tool == "text":
self.text_draw(event)
# ---------- CATCH MOUSE MOVEMENT ----------
def motion(self, event=None):
if self.drawing_tool == "pencil":
self.pencil_draw(event)
# ---------- DRAW PENCIL ----------
def pencil_draw(self, event=None):
if self.left_but == "down":
# Make sure x and y have a value
if self.x_pos is not None and self.y_pos is not None:
event.widget.create_line(self.x_pos, self.y_pos, event.x, event.y, smooth=TRUE)
self.x_pos = event.x
self.y_pos = event.y
# ---------- DRAW LINE ----------
def line_draw(self, event=None):
# Shortcut way to check if none of these values contain None
if None not in (self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt):
event.widget.create_line(self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt, smooth=TRUE, fill="green")
# ---------- DRAW ARC ----------
def arc_draw(self, event=None):
# Shortcut way to check if none of these values contain None
if None not in (self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt):
coords = self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt
# start : starting angle for the slice in degrees
# extent : width of the slice in degrees
# fill : fill color if needed
# style : can be ARC, PIESLICE, or CHORD
event.widget.create_arc(coords, start=0, extent=150,
style=ARC)
# ---------- DRAW OVAL ----------
def oval_draw(self, event=None):
if None not in (self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt):
# fill : Color option names are here http://wiki.tcl.tk/37701
# outline : border color
# width : width of border in pixels
event.widget.create_oval(self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt,
fill="midnight blue",
outline="yellow",
width=2)
# ---------- DRAW RECTANGLE ----------
def rectangle_draw(self, event=None):
if None not in (self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt):
# fill : Color option names are here http://wiki.tcl.tk/37701
# outline : border color
# width : width of border in pixels
event.widget.create_rectangle(self.x1_line_pt, self.y1_line_pt, self.x2_line_pt, self.y2_line_pt,
fill="midnight blue",
outline="yellow",
width=2)
# ---------- DRAW TEXT ----------
def text_draw(self, event=None):
if None not in (self.x1_line_pt, self.y1_line_pt):
# Show all fonts available
print(tkinter.font.families())
text_font = tkinter.font.Font(family='Helvetica',
size=20, weight='bold', slant='italic')
event.widget.create_text(self.x1_line_pt, self.y1_line_pt,
fill="green",
font=text_font,
text="WOW")
def __init__(self, root):
drawing_area = Canvas(root)
drawing_area.pack()
drawing_area.bind("<Motion>", self.motion)
drawing_area.bind("<ButtonPress-1>", self.left_but_down)
drawing_area.bind("<ButtonRelease-1>", self.left_but_up)
root = Tk()
paint_app = PaintApp(root)
root.mainloop() | [
"you@example.com"
] | you@example.com |
f4e3f15f8943d2802aa19855414293b485be8f05 | 66eb4f6725900f233659b53a7dbfced5e26bb614 | /test/functional/test_framework/mininode.py | 3814b1aad34cb33cbe042ce24649249b0a6546a4 | [
"MIT"
] | permissive | hackersdotmu/litecoincash | 9c85579e9b45c994c8f984ed6850f23b007fd0ad | 3c382d1da5867180b74998885294f9172dcce43e | refs/heads/master | 2020-03-21T01:04:13.131687 | 2018-06-19T17:22:32 | 2018-06-19T17:22:32 | 137,922,815 | 0 | 0 | null | 2018-06-19T17:19:15 | 2018-06-19T17:19:14 | null | UTF-8 | Python | false | false | 57,928 | py | #!/usr/bin/env python3
# Copyright (c) 2010 ArtForz -- public domain half-a-node
# Copyright (c) 2012 Jeff Garzik
# Copyright (c) 2010-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Bitcoin P2P network half-a-node.
This python code was modified from ArtForz' public domain half-a-node, as
found in the mini-node branch of http://github.com/jgarzik/pynode.
NodeConn: an object which manages p2p connectivity to a bitcoin node
NodeConnCB: a base class that describes the interface for receiving
callbacks with network messages from a NodeConn
CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
data structures that should map to corresponding structures in
bitcoin/primitives
msg_block, msg_tx, msg_headers, etc.:
data structures that represent network messages
ser_*, deser_*: functions that handle serialization/deserialization
"""
import asyncore
from codecs import encode
from collections import defaultdict
import copy
import hashlib
from io import BytesIO
import logging
import random
import socket
import struct
import sys
import time
from threading import RLock, Thread
import litecoincash_scrypt
from test_framework.siphash import siphash256
from test_framework.util import hex_str_to_bytes, bytes_to_hex_str
BIP0031_VERSION = 60000
MY_VERSION = 80014 # past bip-31 for ping/pong
MY_SUBVERSION = b"/python-mininode-tester:0.0.3/"
MY_RELAY = 1 # from version 70001 onwards, fRelay should be appended to version messages (BIP37)
MAX_INV_SZ = 50000
MAX_BLOCK_BASE_SIZE = 1000000
COIN = 100000000 # 1 btc in satoshis
NODE_NETWORK = (1 << 0)
NODE_GETUTXO = (1 << 1)
NODE_BLOOM = (1 << 2)
NODE_WITNESS = (1 << 3)
NODE_UNSUPPORTED_SERVICE_BIT_5 = (1 << 5)
NODE_UNSUPPORTED_SERVICE_BIT_7 = (1 << 7)
logger = logging.getLogger("TestFramework.mininode")
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def ripemd160(s):
return hashlib.new('ripemd160', s).digest()
def hash256(s):
return sha256(sha256(s))
def ser_compact_size(l):
r = b""
if l < 253:
r = struct.pack("B", l)
elif l < 0x10000:
r = struct.pack("<BH", 253, l)
elif l < 0x100000000:
r = struct.pack("<BI", 254, l)
else:
r = struct.pack("<BQ", 255, l)
return r
def deser_compact_size(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return nit
def deser_string(f):
nit = deser_compact_size(f)
return f.read(nit)
def ser_string(s):
return ser_compact_size(len(s)) + s
def deser_uint256(f):
r = 0
for i in range(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in range(8):
rs += struct.pack("<I", u & 0xFFFFFFFF)
u >>= 32
return rs
def uint256_from_str(s):
r = 0
t = struct.unpack("<IIIIIIII", s[:32])
for i in range(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFF) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
# ser_function_name: Allow for an alternate serialization function on the
# entries in the vector (we use this for serializing the vector of transactions
# for a witness block).
def ser_vector(l, ser_function_name=None):
r = ser_compact_size(len(l))
for i in l:
if ser_function_name:
r += getattr(i, ser_function_name)()
else:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = ser_compact_size(len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = deser_compact_size(f)
r = []
for i in range(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = ser_compact_size(len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(hex_str_to_bytes(hex_string)))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return bytes_to_hex_str(obj.serialize())
# Objects that map to bitcoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nServices)
r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
MSG_WITNESS_FLAG = 1<<30
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block",
1|MSG_WITNESS_FLAG: "WitnessTx",
2|MSG_WITNESS_FLAG : "WitnessBlock",
4: "CompactBlock"
}
def __init__(self, t=0, h=0):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), bytes_to_hex_str(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
bytes_to_hex_str(self.scriptPubKey))
class CScriptWitness(object):
def __init__(self):
# stack is a vector of strings
self.stack = []
def __repr__(self):
return "CScriptWitness(%s)" % \
(",".join([bytes_to_hex_str(x) for x in self.stack]))
def is_null(self):
if self.stack:
return False
return True
class CTxInWitness(object):
def __init__(self):
self.scriptWitness = CScriptWitness()
def deserialize(self, f):
self.scriptWitness.stack = deser_string_vector(f)
def serialize(self):
return ser_string_vector(self.scriptWitness.stack)
def __repr__(self):
return repr(self.scriptWitness)
def is_null(self):
return self.scriptWitness.is_null()
class CTxWitness(object):
def __init__(self):
self.vtxinwit = []
def deserialize(self, f):
for i in range(len(self.vtxinwit)):
self.vtxinwit[i].deserialize(f)
def serialize(self):
r = b""
# This is different than the usual vector serialization --
# we omit the length of the vector, which is required to be
# the same length as the transaction's vin vector.
for x in self.vtxinwit:
r += x.serialize()
return r
def __repr__(self):
return "CTxWitness(%s)" % \
(';'.join([repr(x) for x in self.vtxinwit]))
def is_null(self):
for x in self.vtxinwit:
if not x.is_null():
return False
return True
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.wit = CTxWitness()
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = tx.sha256
self.hash = tx.hash
self.wit = copy.deepcopy(tx.wit)
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
flags = 0
if len(self.vin) == 0:
flags = struct.unpack("<B", f.read(1))[0]
# Not sure why flags can't be zero, but this
# matches the implementation in bitcoind
if (flags != 0):
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
else:
self.vout = deser_vector(f, CTxOut)
if flags != 0:
self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))]
self.wit.deserialize(f)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize_without_witness(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
# Only serialize with witness when explicitly called for
def serialize_with_witness(self):
flags = 0
if not self.wit.is_null():
flags |= 1
r = b""
r += struct.pack("<i", self.nVersion)
if flags:
dummy = []
r += ser_vector(dummy)
r += struct.pack("<B", flags)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
if flags & 1:
if (len(self.wit.vtxinwit) != len(self.vin)):
# vtxinwit must have the same length as vin
self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)]
for i in range(len(self.wit.vtxinwit), len(self.vin)):
self.wit.vtxinwit.append(CTxInWitness())
r += self.wit.serialize()
r += struct.pack("<I", self.nLockTime)
return r
# Regular serialization is without witness -- must explicitly
# call serialize_with_witness to include witness data.
def serialize(self):
return self.serialize_without_witness()
# Recalculate the txid (transaction hash without witness)
def rehash(self):
self.sha256 = None
self.calc_sha256()
# We will only cache the serialization without witness in
# self.sha256 and self.hash -- those are expected to be the txid.
def calc_sha256(self, with_witness=False):
if with_witness:
# Don't cache the result, just return it
return uint256_from_str(hash256(self.serialize_with_witness()))
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize_without_witness()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.scrypt256 = header.scrypt256
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
self.scrypt256 = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
self.scrypt256 = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(hash256(r))
self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii')
self.scrypt256 = uint256_from_str(litecoincash_scrypt.getPoWHash(r))
def rehash(self):
self.sha256 = None
self.scrypt256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += super(CBlock, self).serialize()
if with_witness:
r += ser_vector(self.vtx, "serialize_with_witness")
else:
r += ser_vector(self.vtx)
return r
# Calculate the merkle root given a vector of transaction hashes
@classmethod
def get_merkle_root(cls, hashes):
while len(hashes) > 1:
newhashes = []
for i in range(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
return self.get_merkle_root(hashes)
def calc_witness_merkle_root(self):
# For witness root purposes, the hash of the
# coinbase, with witness, is defined to be 0...0
hashes = [ser_uint256(0)]
for tx in self.vtx[1:]:
# Calculate the hashes with witness data
hashes.append(ser_uint256(tx.calc_sha256(True)))
return self.get_merkle_root(hashes)
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.scrypt256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.scrypt256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
class PrefilledTransaction(object):
def __init__(self, index=0, tx = None):
self.index = index
self.tx = tx
def deserialize(self, f):
self.index = deser_compact_size(f)
self.tx = CTransaction()
self.tx.deserialize(f)
def serialize(self, with_witness=False):
r = b""
r += ser_compact_size(self.index)
if with_witness:
r += self.tx.serialize_with_witness()
else:
r += self.tx.serialize_without_witness()
return r
def serialize_with_witness(self):
return self.serialize(with_witness=True)
def __repr__(self):
return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx))
# This is what we send on the wire, in a cmpctblock message.
class P2PHeaderAndShortIDs(object):
def __init__(self):
self.header = CBlockHeader()
self.nonce = 0
self.shortids_length = 0
self.shortids = []
self.prefilled_txn_length = 0
self.prefilled_txn = []
def deserialize(self, f):
self.header.deserialize(f)
self.nonce = struct.unpack("<Q", f.read(8))[0]
self.shortids_length = deser_compact_size(f)
for i in range(self.shortids_length):
# shortids are defined to be 6 bytes in the spec, so append
# two zero bytes and read it in as an 8-byte number
self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0])
self.prefilled_txn = deser_vector(f, PrefilledTransaction)
self.prefilled_txn_length = len(self.prefilled_txn)
# When using version 2 compact blocks, we must serialize with_witness.
def serialize(self, with_witness=False):
r = b""
r += self.header.serialize()
r += struct.pack("<Q", self.nonce)
r += ser_compact_size(self.shortids_length)
for x in self.shortids:
# We only want the first 6 bytes
r += struct.pack("<Q", x)[0:6]
if with_witness:
r += ser_vector(self.prefilled_txn, "serialize_with_witness")
else:
r += ser_vector(self.prefilled_txn)
return r
def __repr__(self):
return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn))
# P2P version of the above that will use witness serialization (for compact
# block version 2)
class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs):
def serialize(self):
return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True)
# Calculate the BIP 152-compact blocks shortid for a given transaction hash
def calculate_shortid(k0, k1, tx_hash):
expected_shortid = siphash256(k0, k1, tx_hash)
expected_shortid &= 0x0000ffffffffffff
return expected_shortid
# This version gets rid of the array lengths, and reinterprets the differential
# encoding into indices that can be used for lookup.
class HeaderAndShortIDs(object):
def __init__(self, p2pheaders_and_shortids = None):
self.header = CBlockHeader()
self.nonce = 0
self.shortids = []
self.prefilled_txn = []
self.use_witness = False
if p2pheaders_and_shortids != None:
self.header = p2pheaders_and_shortids.header
self.nonce = p2pheaders_and_shortids.nonce
self.shortids = p2pheaders_and_shortids.shortids
last_index = -1
for x in p2pheaders_and_shortids.prefilled_txn:
self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx))
last_index = self.prefilled_txn[-1].index
def to_p2p(self):
if self.use_witness:
ret = P2PHeaderAndShortWitnessIDs()
else:
ret = P2PHeaderAndShortIDs()
ret.header = self.header
ret.nonce = self.nonce
ret.shortids_length = len(self.shortids)
ret.shortids = self.shortids
ret.prefilled_txn_length = len(self.prefilled_txn)
ret.prefilled_txn = []
last_index = -1
for x in self.prefilled_txn:
ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx))
last_index = x.index
return ret
def get_siphash_keys(self):
header_nonce = self.header.serialize()
header_nonce += struct.pack("<Q", self.nonce)
hash_header_nonce_as_str = sha256(header_nonce)
key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0]
key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0]
return [ key0, key1 ]
# Version 2 compact blocks use wtxid in shortids (rather than txid)
def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False):
self.header = CBlockHeader(block)
self.nonce = nonce
self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ]
self.shortids = []
self.use_witness = use_witness
[k0, k1] = self.get_siphash_keys()
for i in range(len(block.vtx)):
if i not in prefill_list:
tx_hash = block.vtx[i].sha256
if use_witness:
tx_hash = block.vtx[i].calc_sha256(with_witness=True)
self.shortids.append(calculate_shortid(k0, k1, tx_hash))
def __repr__(self):
return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn))
class BlockTransactionsRequest(object):
def __init__(self, blockhash=0, indexes = None):
self.blockhash = blockhash
self.indexes = indexes if indexes != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
indexes_length = deser_compact_size(f)
for i in range(indexes_length):
self.indexes.append(deser_compact_size(f))
def serialize(self):
r = b""
r += ser_uint256(self.blockhash)
r += ser_compact_size(len(self.indexes))
for x in self.indexes:
r += ser_compact_size(x)
return r
# helper to set the differentially encoded indexes from absolute ones
def from_absolute(self, absolute_indexes):
self.indexes = []
last_index = -1
for x in absolute_indexes:
self.indexes.append(x-last_index-1)
last_index = x
def to_absolute(self):
absolute_indexes = []
last_index = -1
for x in self.indexes:
absolute_indexes.append(x+last_index+1)
last_index = absolute_indexes[-1]
return absolute_indexes
def __repr__(self):
return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes))
class BlockTransactions(object):
def __init__(self, blockhash=0, transactions = None):
self.blockhash = blockhash
self.transactions = transactions if transactions != None else []
def deserialize(self, f):
self.blockhash = deser_uint256(f)
self.transactions = deser_vector(f, CTransaction)
def serialize(self, with_witness=False):
r = b""
r += ser_uint256(self.blockhash)
if with_witness:
r += ser_vector(self.transactions, "serialize_with_witness")
else:
r += ser_vector(self.transactions)
return r
def __repr__(self):
return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
self.nRelay = MY_RELAY
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
if self.nVersion >= 70001:
# Relay field is optional for version 70001 onwards
try:
self.nRelay = struct.unpack("<b", f.read(1))[0]
except:
self.nRelay = 0
else:
self.nRelay = 0
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
r += struct.pack("<b", self.nRelay)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i nRelay=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight, self.nRelay)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize_without_witness()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_witness_tx(msg_tx):
def serialize(self):
return self.tx.serialize_with_witness()
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
# for cases where a user needs tighter control over what is sent over the wire
# note that the user must supply the name of the command, and the data
class msg_generic(object):
def __init__(self, command, data=None):
self.command = command
self.data = data
def serialize(self):
return self.data
def __repr__(self):
return "msg_generic()"
class msg_witness_block(msg_block):
def serialize(self):
r = self.block.serialize(with_witness=True)
return r
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in bitcoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf')):
if attempts == float('inf') and timeout == float('inf'):
timeout = 60
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
class msg_feefilter(object):
command = b"feefilter"
def __init__(self, feerate=0):
self.feerate = feerate
def deserialize(self, f):
self.feerate = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.feerate)
return r
def __repr__(self):
return "msg_feefilter(feerate=%08x)" % self.feerate
class msg_sendcmpct(object):
command = b"sendcmpct"
def __init__(self):
self.announce = False
self.version = 1
def deserialize(self, f):
self.announce = struct.unpack("<?", f.read(1))[0]
self.version = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<?", self.announce)
r += struct.pack("<Q", self.version)
return r
def __repr__(self):
return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version)
class msg_cmpctblock(object):
command = b"cmpctblock"
def __init__(self, header_and_shortids = None):
self.header_and_shortids = header_and_shortids
def deserialize(self, f):
self.header_and_shortids = P2PHeaderAndShortIDs()
self.header_and_shortids.deserialize(f)
def serialize(self):
r = b""
r += self.header_and_shortids.serialize()
return r
def __repr__(self):
return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids)
class msg_getblocktxn(object):
command = b"getblocktxn"
def __init__(self):
self.block_txn_request = None
def deserialize(self, f):
self.block_txn_request = BlockTransactionsRequest()
self.block_txn_request.deserialize(f)
def serialize(self):
r = b""
r += self.block_txn_request.serialize()
return r
def __repr__(self):
return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request))
class msg_blocktxn(object):
command = b"blocktxn"
def __init__(self):
self.block_transactions = BlockTransactions()
def deserialize(self, f):
self.block_transactions.deserialize(f)
def serialize(self):
r = b""
r += self.block_transactions.serialize()
return r
def __repr__(self):
return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions))
class msg_witness_blocktxn(msg_blocktxn):
def serialize(self):
r = b""
r += self.block_transactions.serialize(with_witness=True)
return r
class NodeConnCB(object):
"""Callback and helper functions for P2P connection to a bitcoind node.
Individual testcases should subclass this and override the on_* methods
if they want to alter message handling behaviour.
"""
def __init__(self):
# Track whether we have a P2P connection open to the node
self.connected = False
self.connection = None
# Track number of messages of each type received and the most recent
# message of each type
self.message_count = defaultdict(int)
self.last_message = {}
# A count of the number of ping messages we've sent to the node
self.ping_counter = 1
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
# Remember the services our peer has advertised
self.peer_services = None
# Message receiving methods
def deliver(self, conn, message):
"""Receive message and dispatch message to appropriate callback.
We keep a count of how many of each message type has been received
and the most recent message of each type.
Optionally waits for deliver_sleep_time before dispatching message.
"""
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
command = message.command.decode('ascii')
self.message_count[command] += 1
self.last_message[command] = message
getattr(self, 'on_' + command)(conn, message)
except:
print("ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0]))
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Callback methods. Can be overridden by subclasses in individual test
# cases to provide custom message handling behaviour.
def on_open(self, conn):
self.connected = True
def on_close(self, conn):
self.connected = False
self.connection = None
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_block(self, conn, message): pass
def on_blocktxn(self, conn, message): pass
def on_cmpctblock(self, conn, message): pass
def on_feefilter(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_getblocktxn(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
def on_reject(self, conn, message): pass
def on_sendcmpct(self, conn, message): pass
def on_sendheaders(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
conn.nServices = message.nServices
# Connection helper methods
def add_connection(self, conn):
self.connection = conn
def wait_for_disconnect(self, timeout=60):
test_function = lambda: not self.connected
assert wait_until(test_function, timeout=timeout)
# Message receiving helper methods
def wait_for_block(self, blockhash, timeout=60):
test_function = lambda: self.last_message.get("block") and self.last_message["block"].block.rehash() == blockhash
assert wait_until(test_function, timeout=timeout)
def wait_for_getdata(self, timeout=60):
test_function = lambda: self.last_message.get("getdata")
assert wait_until(test_function, timeout=timeout)
def wait_for_getheaders(self, timeout=60):
test_function = lambda: self.last_message.get("getheaders")
assert wait_until(test_function, timeout=timeout)
def wait_for_inv(self, expected_inv, timeout=60):
"""Waits for an INV message and checks that the first inv object in the message was as expected."""
if len(expected_inv) > 1:
raise NotImplementedError("wait_for_inv() will only verify the first inv object")
test_function = lambda: self.last_message.get("inv") and \
self.last_message["inv"].inv[0].type == expected_inv[0].type and \
self.last_message["inv"].inv[0].hash == expected_inv[0].hash
assert wait_until(test_function, timeout=timeout)
def wait_for_verack(self, timeout=60):
test_function = lambda: self.message_count["verack"]
assert wait_until(test_function, timeout=timeout)
# Message sending helper functions
def send_message(self, message):
if self.connection:
self.connection.send_message(message)
else:
logger.error("Cannot send message. No connection to node!")
def send_and_ping(self, message):
self.send_message(message)
self.sync_with_ping()
# Sync up with the node
def sync_with_ping(self, timeout=60):
self.send_message(msg_ping(nonce=self.ping_counter))
test_function = lambda: self.last_message.get("pong") and self.last_message["pong"].nonce == self.ping_counter
assert wait_until(test_function, timeout=timeout)
self.ping_counter += 1
return True
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
b"feefilter": msg_feefilter,
b"sendheaders": msg_sendheaders,
b"sendcmpct": msg_sendcmpct,
b"cmpctblock": msg_cmpctblock,
b"getblocktxn": msg_getblocktxn,
b"blocktxn": msg_blocktxn
}
MAGIC_BYTES = {
"mainnet": b"\xfb\xc0\xb6\xdb", # mainnet
"testnet3": b"\xfc\xc1\xb7\xdc", # testnet3
"regtest": b"\xfa\xbf\xb5\xda", # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK, send_version=True):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
self.nServices = 0
if send_version:
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
logger.info('Connecting to LitecoinCash Node: %s:%d' % (self.dstaddr, self.dstport))
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def handle_connect(self):
if self.state != "connected":
logger.debug("Connected & Listening: %s:%d" % (self.dstaddr, self.dstport))
self.state = "connected"
self.cb.on_open(self)
def handle_close(self):
logger.debug("Closing connection to: %s:%d" % (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
pre_connection = self.state == "connecting"
length = len(self.sendbuf)
return (length > 0 or pre_connection)
def handle_write(self):
with mininode_lock:
# asyncore does not expose socket connection, only the first read/write
# event, thus we must check connection manually here to know when we
# actually connect
if self.state == "connecting":
self.handle_connect()
if not self.writable():
return
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
logger.warning("Received unknown command from %s:%d: '%s' %s" % (self.dstaddr, self.dstport, command, repr(msg)))
except Exception as e:
logger.exception('got_data:', repr(e))
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
raise IOError('Not connected, no pushbuf')
self._log_message("send", message)
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self._log_message("receive", message)
self.cb.deliver(self, message)
def _log_message(self, direction, msg):
if direction == "send":
log_message = "Send message to "
elif direction == "receive":
log_message = "Received message from "
log_message += "%s:%d: %s" % (self.dstaddr, self.dstport, repr(msg)[:500])
if len(log_message) > 500:
log_message += "... (msg truncated)"
logger.debug(log_message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| [
"coldcity@gmail.com"
] | coldcity@gmail.com |
ef083b1cb301271ef664791c5da0ce5cc371be90 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_032/ch147_2020_06_21_14_18_56_736810.py | 94fb5801794571b5ac0ec8335c9c86bcc9bd9fb4 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | def mais_frequente(lista):
dicionario = {}
k = dicionario.keys()
for i in range (0, len(lista)):
if lista[i] not in k:
contador = lista.count(lista[i])
dicionario[lista[i]]=contador
n = 0
palavra = ' '
for c,v in dicionario.items():
if v > n:
n = v
palavra = c
return palavra | [
"you@example.com"
] | you@example.com |
3b1e66a3382ddde9f8df5db81b317421536c038d | b9ca293db6ec392a6c4c68bfa3625661bfe8ceef | /com/anjie/spider/house_db.py | 6d3785543aa9afa1f5baba91b5d7d5806da1ee8b | [
"Apache-2.0"
] | permissive | anzaizai/EasySpider | fb0840b12cb2686d73beb64be94540f3a6eaa683 | 1d24c02bd9879a4c7eb5d801b7b0d3fa4efb2a28 | refs/heads/master | 2021-07-06T15:47:00.522215 | 2017-09-29T00:33:53 | 2017-09-29T00:33:53 | 104,687,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,259 | py | import sqlite3
# 标题
title = "";
# 房子请求链接
url = "";
# 房子类型
house_type = ""
# 销售类型、出租类型
sale_type = ""
# 精装修等
level = ""
# 楼层
floor_number = ""
# 房子所在地
area_name = ""
# 具体地址
addr = ""
# 联系人
user = ""
# 补充
supplement = ""
price = "";
unit = "";
def createTable():
conn = sqlite3.connect('anjie.db');
curs = conn.cursor();
curs.execute('''
CREATE TABLE IF NOT EXISTS user_table(
id VARCHAR(255) PRIMARY KEY,
title VARCHAR(255),
url VARCHAR(255),
house_type VARCHAR(255),
level VARCHAR(255),
floor_number VARCHAR(255),
title VARCHAR(255),
title VARCHAR(255),
)
''');
# 关闭Cursor:
curs.close()
# 提交事务:
conn.commit()
# 关闭Connection:
conn.close()
def insertOneData():
conn = sqlite3.connect('anjie.db');
curs = conn.cursor();
u = User();
u.id = -1;
u.name = "anjie";
curs.execute('INSERT INTO user_table (id, name) VALUES (?, ?)', [u.id, u.name]);
curs.close()
# 通过rowcount获得插入的行数:
print('插入了%d 条数据' % curs.rowcount)
conn.commit()
conn.close()
def insertManyData():
conn = sqlite3.connect('anjie.db');
curs = conn.cursor();
u = None;
for i in range(10):
u = User();
u.id = i;
u.name = "anjie"+str(i);
curs.execute('INSERT INTO user_table (id, name) VALUES (?, ?)', [u.id, u.name]);
curs.close()
# 通过rowcount获得插入的行数:
conn.commit()
conn.close()
def selectData():
conn = sqlite3.connect('anjie.db');
curs = conn.cursor();
curs.execute('SELECT * FROM user_table');
print(curs.fetchall())
conn.commit()
conn.close()
if __name__ == '__main__':
# createTable();
# insertOneData()
# insertManyData();
# selectData();
conn = sqlite3.connect('anjie.db');
curs = conn.cursor();
curs.execute('SELECT * FROM user_table');
print(curs.fetchone())
print(curs.fetchmany())
print(curs.fetchmany(size=2))
curs.arraysize=1
print(curs.fetchmany())
print(curs.fetchall())
conn.commit()
conn.close()
| [
"angelswy163@163.com"
] | angelswy163@163.com |
346af0faa70b3dd33c353011b73262b7dcdd4b7e | 78c1d3020b827b9ab7527404983986685bbb79ae | /Additional challenges/challenge 34.py | ed71c4fce43b273d8779d7b292f5241f0112b534 | [] | no_license | Yakobo-UG/Python-by-example-challenges | eb3e52601c9546a4984b8da4cf752e59f13af50e | 5bee4ec3e7619e826a27cde9e6a9a841393f8ccb | refs/heads/master | 2023-08-27T03:01:39.448703 | 2021-10-14T15:43:45 | 2021-10-14T15:43:45 | 401,747,080 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 455 | py | '''You are going to write a program that calculates the highest score from a List of scores.
e.g. student_scores = [78, 65, 89, 86, 55, 91, 64, 89]
Important you are not allowed to use the max or min functions. The output words must match the example. i.e
The highest score in the class is: x
'''
student_scores = [78, 65, 89, 86, 55, 91, 64, 89]
for i in student_scores:
high =max(student_scores)
print ("The highest score in the class is ", high) | [
"65670517+Yakobo-UG@users.noreply.github.com"
] | 65670517+Yakobo-UG@users.noreply.github.com |
84f64dc5955273a83d2d0466f6c208f271ca7897 | 767389d7689167958a2b42d6f4805f1f0d64fb09 | /venv/Scripts/pip-script.py | 009dc0e48d39f7739fcb5e532b105856c3b2d7f2 | [] | no_license | XinyuJiang/CodeHouse | 7feb07c9cdf7622bb53360fbfea646c37f0d7cba | 0c3160414427b27506d97fcbba6f3a0f30865da4 | refs/heads/master | 2020-04-15T11:30:40.128656 | 2019-02-08T06:19:44 | 2019-02-08T06:19:44 | 164,633,580 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | #!E:\study\Web\Codehouse\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
| [
"503273921@qq.com"
] | 503273921@qq.com |
9241cd77e34c56ac2884cec50059f2be5495bc27 | 7e793f01dbbcf13e559d963c4f37166c00a4ace6 | /Cloud_Computing/Pies/readProducts.py | 0c0d0caf91b50c85b91515d7094a3d2ba094aa9e | [] | no_license | cmccormick1/CS-course-projects | 451dc7309465bad04c05489ab506ce5c587c6dd7 | 4f666d7dd81ddcf10b4c820fbb6b7bc9aa444bc2 | refs/heads/master | 2021-04-24T12:27:16.962911 | 2021-01-23T23:50:28 | 2021-01-23T23:50:28 | 250,118,312 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,153 | py | #! /usr/bin/python3
import cgi
import MySQLdb
import passwords
# Connect to database and select entire products table
conn = MySQLdb.connect(host = passwords.SQL_HOST, user = passwords.SQL_USER, \
passwd = passwords.SQL_PASSWD, db = "projects_database")
cursor = conn.cursor()
cursor.execute("SELECT * FROM products;")
results = cursor.fetchall()
cursor.close()
# Start HTML page
print("Content-Type: text/html")
print("Status: 200 OK")
print()
print("<html>")
print("<head><title>View Products</title></head>")
print("<body>")
print("<center>")
# Display a table of products in the database
print("<p>Current Products:")
print("<table border=1>")
print("<tr><th>Product ID</th><th>Pie Name</th><th>Price ($)</th><th>Calories (entire pie)</th></tr>")
for i in range(len(results)):
print("<tr>")
for j in range(len(results[i])):
print("<td style='text-align:center'>" + str(results[i][j]) + "</td>")
print("</tr>")
print("</table>")
# Link back to home page
print("<p><a href='/index.html'>Go back to the home page</a>")
print("</center>")
print("</body>")
print("</html>")
# Close database connection
conn.close()
| [
"noreply@github.com"
] | cmccormick1.noreply@github.com |
0754a01304f1cefe57de7a8532e81db47587d19a | c16c4e8e72a3f858283b74508cd69d0230e10f34 | /testCases/test_Login_DDT.py | c7fe9c811fbaf22ccc2d975ace47b04423e32811 | [] | no_license | d2bhatt/HybridFramework | 8a921643d7ef46bbb357bfbf88504783bf1c5d43 | d4f131b457c24898a31b08e5baa4db12b7839921 | refs/heads/master | 2022-12-11T11:26:40.507054 | 2020-09-07T10:55:11 | 2020-09-07T10:55:11 | 293,463,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,423 | py | from selenium import webdriver
import pytest
from pageObjects.LoginPage import LoginPage
from Utilities.readProperties import ReadConfigData
from Utilities.customLogger import LogGen
from Utilities import XLUtils
import time
class Test_002_DDT_Login:
# below three lines contain data which is common so we will create a separate config.ini(initialization)
# file under config folder where we will keep these values.This is not test data it is just prerequisite.
# Now to read common data we would create readProperties.py utility file
# url = "https://admin-demo.nopcommerce.com/"
# email = "admin@yourstore.com"
# password = "admin"
url = ReadConfigData.getApplicationUrl()
file_path = ".//TestData/Login_Data.xlsx"
logger = LogGen.loggen()
@pytest.mark.regression
def test_login(self, setup):
self.logger.info("*********************Test_002_DDT_Login***********************")
self.logger.info("*******************verifying login Page*********")
self.driver = setup
# self.driver = webdriver.Chrome()
self.driver.get(self.url)
self.driver.maximize_window()
self.lp = LoginPage(self.driver)
self.rows = XLUtils.getRowCount(self.file_path, 'Sheet1')
print("number of rows:", self.rows)
list_status = []
for r in range(2, self.rows+1):
self.username = XLUtils.readData(self.file_path, 'Sheet1', r, 1)
self.password = XLUtils.readData(self.file_path, 'Sheet1', r, 2)
self.status = XLUtils.readData(self.file_path, 'Sheet1', r, 3)
self.lp.setUserName(self.username)
self.lp.setPassword(self.password)
self.lp.clickLogin()
time.sleep(5)
actual_title = self.driver.title
expected_title = "Dashboard / nopCommerce administration"
# 1 scenario:- invalid credentials user is able to login and actual title would be equal to expected title
# so in this case expected column would be fail but in actual it is pass
# 2 scenario:- with valid credentials user is not able to login
# in below case with valid credentials expected == actual title, and status in excel would be equivalent to pass
# also if we have a list which contains values which stores the results as pass when all negative/positive
# combinations are tried list=[pass,pass,pass] ; expected=[pass,fail ,fail]. suppose if any value is fail in list
# than whole list is fail, so every value should be pass in list
# case1: valid credentials
if actual_title == expected_title:
if self.status == "pass":
self.logger.info("******passed_1*********")
self.lp.clickLogout()
list_status.append("pass")
# case2:-with invalid credentials user is able to login
elif self.status == "fail": # when titles are matching but according to excel it should have failed
self.logger.info("************failed_1***********")
self.lp.clickLogout()
list_status.append("fail")
# case3:- with valid credentials user is not able to login
elif actual_title != expected_title:
if self.status == "pass": # here self.status means expected in excel matches pass value
self.logger.info("****failed_2*****")
list_status.append("fail")
# case4:- with invalid credentials user is not able to login
elif self.status == "fail":
self.logger.info("*****passed_2******")
list_status.append("pass")
if "fail" not in list_status:
self.logger.info("***********passed_3*************")
self.driver.close()
assert True
else:
self.logger.info("*****failed_3***********")
self.driver.close()
assert False
self.logger.info("***************END OF DATA DRIVEN TESTING**********")
self.logger.info("*************end of tc002************")
# to run pytest -v -s testCases/test_Login_DDT.py
# pytest -v -s -n=1 testCases/test_Login_DDT.py
# pytest -v -s testCases/test_Login_DDT.py --browser chrome
# pytest -v -s -n=1 --html=Reports/report.html testCases/test_Login_DDT.py --browser chrome
| [
"d2bhatt@gmail.com"
] | d2bhatt@gmail.com |
128696c22c9cb7724103465fb75e9782eaa74c75 | 4892c4d6f79bdd22b3aca291b597a766f7a7ef0b | /KeyCode/AcaFinder/AcaFinder/settings.py | 9ed6bfa809239214ce6f00fc3ce29d8c666eaab7 | [
"MIT"
] | permissive | LelandYan/ResearchRelationshipNetwork_KnowledgeGraph | 0ab0776859285f83caf8c39bc818b16630b166b2 | 01119bbd2e02a0e3fbbd2cec4c9a4799c189e78e | refs/heads/main | 2023-06-26T04:19:24.130657 | 2021-07-17T03:47:03 | 2021-07-17T03:47:03 | 344,984,094 | 9 | 0 | MIT | 2021-07-16T13:53:57 | 2021-03-06T02:11:18 | Jupyter Notebook | UTF-8 | Python | false | false | 3,092 | py | """
Django settings for AcaFinder project.
Generated by 'django-admin startproject' using Django 2.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '123456'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'web',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'AcaFinder.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'AcaFinder.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"2721599586@qq.com"
] | 2721599586@qq.com |
eac9dc814f9ff849b9b9964d99f4157b5f4f361e | 3872438b8c6f6b7d1b921feec5fdaaa4d7b71c1e | /service_scripts/pastefs.py | bd015f3bf823d2641b92942e02288df557cdc093 | [] | no_license | h4cklife/SpiderWasp | e3b7c058512f21da40fbe007e42d2b316caf0655 | 77001a4df979c78ae97e60ec9f2ce25b0f8161c4 | refs/heads/master | 2022-12-11T09:13:45.442903 | 2019-10-08T03:35:42 | 2019-10-08T03:35:42 | 212,494,915 | 0 | 0 | null | 2022-12-08T06:41:17 | 2019-10-03T04:10:41 | Python | UTF-8 | Python | false | false | 456 | py | """
PasteFS
"""
import sys
import random
from time import time, sleep
sys.path.insert(0, '../')
from libs.PasteSiteParser import PasteSiteParser
service = 'Pastefs'
pages = ['/recent.php', '/trends.php']
viewall = "https://www.pastefs.com"
viewpaste = "https://www.pastefs.com/pid/"
tag = "href=\'https://www.pastefs.com/pid/"
tag_end = "\'"
sw = PasteSiteParser(service, pages, viewall, viewpaste, tag, tag_end)
sw.begin()
| [
"="
] | = |
e733855dc262c0e1a75f739d268db0838df5990c | 5bdcdee52da02ade014697cd1e9a9474dccfdf6a | /result_disp_ws.py | f11df8eb8d312973a19c5aee20835cf39dd7623b | [] | no_license | Sapphirine/Cost_efficiency_analysis_of_NBA_players | a2aa975147441bfee830ee40b43bc6213c02cc4a | b246e306beb087d6af272d4f8a0391294d4882fd | refs/heads/master | 2021-01-12T05:47:17.373344 | 2016-12-23T04:59:06 | 2016-12-23T04:59:06 | 77,198,153 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,068 | py | import sys
def calculation():
for line in sys.stdin:
data = line.replace("'","").replace('*','').replace('"','').replace('\\',',').replace(' ','').split(",")
avg_salary = 0
max_salary = 0
if len(data) == 10:
fullname = data[0]
if fullname == sys.argv[1]:
avg_OWS = float(data[3])
avg_DWS = float(data[4])
max_OWS = float(data[6])
max_DWS = float(data[7])
seasons = int(data[9])
avg_salary = 1485168.37294 * avg_OWS + 1794189.0838 * avg_DWS + 1842699.5473
max_salary = 1485168.37294 * max_OWS + 1794189.0838 * max_DWS + 1842699.5473
int_avg_salary = int(avg_salary)
int_max_salary = int(max_salary)
print "\n\nPlayer Name: %s\n\nAssume him to be a current player:\n\nCareer Average Level: $%s\n\nBest Season: $%s\n\nActive Seasons: %s\n\n" % (fullname, int_avg_salary, int_max_salary, seasons)
calculation() | [
"noreply@github.com"
] | Sapphirine.noreply@github.com |
1ba84ead4bb8fc96bc14d6bd612dc84ec7b4768a | 83514f49859020207267e4ce24b9f1d4092b7a4d | /capstone/migrations/0014_profile_photo.py | 6438f7cdc6a412a59867604d402edef64aaed740 | [
"MIT"
] | permissive | josshartmann/Capstone | 9bbaf7c931928b714a44e15f9e7502b274bc279c | 741a56619f6a5d73b4c190c9396108f8ae1ae8a5 | refs/heads/main | 2023-04-21T11:32:40.016183 | 2021-05-18T22:18:39 | 2021-05-18T22:18:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | # Generated by Django 3.1.7 on 2021-05-10 18:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('capstone', '0013_auto_20210428_2128'),
]
operations = [
migrations.AddField(
model_name='profile',
name='photo',
field=models.URLField(default='https://icons-for-free.com/iconfiles/png/512/free+outline+people+profile+ui+icon-1320196081912311498.png', max_length=300),
),
]
| [
"josshartmann@gmail.com"
] | josshartmann@gmail.com |
a0ad6c532cd261526354cb984c30f722913bc083 | cb7251a262097e5ddf981163a8e012064b085f86 | /unwanted_words_lister.py | edfb3ccdaf4bb4e281a4a5832ec0e2029f0f81c8 | [] | no_license | jsMRSoL/CEM_vocab_lister | 1fa976cfc828fffcd5fbfc952d9600de7d0c3cf6 | c0b3f3e5fb536c543c4516b32d3951604255ece3 | refs/heads/master | 2022-11-15T10:20:12.731479 | 2020-07-10T21:03:13 | 2020-07-10T21:03:13 | 278,732,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | #!/usr/bin/env python3
import csv
i = 0
unwanted_words = dict()
with open('corpus-data/cleaned1.txt', 'r', encoding="ISO-8859-1") as f:
words_list = csv.DictReader(f, delimiter='\t')
for item in words_list:
if item['Freq'] is not None:
unwanted_words[int(item['Freq'])] = item['Word']
for k in sorted(unwanted_words.keys()):
print(f"Word:\t{unwanted_words[k]}\t\tRank:{k}")
i += 1
if i == 10:
break
| [
"si_pitt@yahoo.com"
] | si_pitt@yahoo.com |
606011c8ab57fa296d72e54135fa3dc2047bf114 | 121f67f8ed2d88a6b8a30a1e1b02127923e42e64 | /sdk/linux/EigerAPI/test/eiger-simulator/setup.py | 39aa75187ff5f3c1f95df0b01acfdc43716643ce | [] | no_license | soleil-ica/Lima-camera-eiger | c8940eb85b26f17793e60b227cfcd50b2907490b | d36efc92cca34a06f4a975fc04ccd97eeb2951a0 | refs/heads/master | 2023-08-14T02:23:53.886342 | 2023-05-10T14:31:29 | 2023-05-10T14:31:29 | 24,136,346 | 3 | 4 | null | 2017-12-12T09:46:42 | 2014-09-17T08:28:33 | C++ | UTF-8 | Python | false | false | 1,747 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
import sys
from setuptools import setup, find_packages
TESTING = any(x in sys.argv for x in ["test", "pytest"])
requirements = ['fastapi', 'uvicorn', 'bootstrap4', 'aiofiles', 'jinja2',
'pyzmq>=17', 'click', 'h5py', 'lz4']#, 'bitshuffle']
setup_requirements = []
if TESTING:
setup_requirements += ['pytest-runner']
test_requirements = ['pytest', 'pytest-cov']
extras_requirements = {
'client' : ['requests'],
'lima': ['pint', 'prompt_toolkit>=3.0.3']
}
with open('README.md') as f:
description = f.read()
setup(
author="ALBA controls team",
author_email='controls@cells.es',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8'
],
description="ALBA Eiger simulator",
entry_points={
'console_scripts': [
'eiger-simulator=eigersim.server:main',
]
},
install_requires=requirements,
license="GPL",
long_description=description,
long_description_content_type='text/markdown',
include_package_data=True,
keywords='alba, dectris, eiger, simulator',
name='eiger-simulator',
packages=find_packages(),
package_data={
'eigersim': ['static/js/*', 'templates/*']
},
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
python_requires='>=3.7',
extras_require=extras_requirements,
url='https://git.cells.es/controls/eiger-simulator',
version='0.5.0'
)
| [
"langlois@synchrotron-soleil.fr"
] | langlois@synchrotron-soleil.fr |
c713032121ca6cb675597f037881644696f6999c | ad1700714e804dd559204984403baa52c8378efc | /practice/130226/refer.py | 15870fab2478147073d883e4bdf182b8094578bf | [] | no_license | cocagolau/NEXT_13-00_Preschool | e7fb5e59dd4afabea0ab8a70ddb90c999293e888 | 696eac44ef7349e873f9632b848e05ed04316666 | refs/heads/master | 2020-12-24T13:27:59.411641 | 2014-07-07T11:26:54 | 2014-07-07T11:26:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | import copy
a = [1,2,[3,4]]
b = copy.deepcopy(a)
print b is a
b.append(100)
print a
print b
b[2][0] = -100
print a
print b | [
"cocagolau@gmail.com"
] | cocagolau@gmail.com |
ab996058b96570c6009f7b8e8304849f7e3b2787 | 169f2f3fe4b7a248b022743800b604fb26483952 | /scripts/test.py | 78a05f9d11a4cc8d65e7d7c314c5422234348320 | [] | no_license | colinbrust/crop_map | 29b5403de9477f13f96cba8c51872160c337e0ba | c72281123119b2af3a1e148b0250e5c251f500eb | refs/heads/master | 2021-06-01T10:34:40.761746 | 2021-02-19T20:53:36 | 2021-02-19T20:53:36 | 141,490,979 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 180 | py | import numpy as np
import glob
import rasterio as rio
import rasterstats
import pandas as pd
import requests
import datetime
import os
from scipy import signal
print("It worked!")
| [
"colin.brust@gmail.com"
] | colin.brust@gmail.com |
55c7e6bc08a4698a9c5c34f7cee388aaddee4fdb | 697c5d66109db02b5271634ddfb4b69c99fa16e8 | /TodoListWeb/todolistweb/todoapp/migrations/0004_auto_20200406_1955.py | 1e33fc06a144b803d44dadd8b11a1bbc3243e752 | [] | no_license | ebookleader/My-Web-Project | 094816c20e9e74a7c8a6dc7394c480b68d3b2b6f | 746b66296cf4f70f87fe049cf35cb21807107a86 | refs/heads/master | 2020-12-09T05:15:07.027078 | 2020-05-21T07:02:56 | 2020-05-21T07:02:56 | 233,201,954 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | # Generated by Django 3.0.2 on 2020-04-06 10:55
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('todoapp', '0003_auto_20200406_1751'),
]
operations = [
migrations.RenameField(
model_name='customuser',
old_name='is_active',
new_name='active',
),
]
| [
"hemslje@naver.com"
] | hemslje@naver.com |
9552cab6a71f6334417c0e4337ada4d059242bcc | b6d55370dd3c0d00ffb4bc46df82a0fa105073e8 | /bugs/forms.py | aad5c9a739bcc27d43fa47f2148a5da713f3e0b7 | [] | no_license | DeanFlint/unitracker | 4ad77a5a3c53de4a4544d6fa0161004eccfaff1a | f1129880bbe52896f00ac6c2a91d829903197c21 | refs/heads/master | 2023-01-10T21:35:35.146210 | 2018-06-09T17:16:34 | 2018-06-09T17:16:34 | 133,188,856 | 0 | 2 | null | 2022-12-26T20:14:51 | 2018-05-12T22:49:39 | Python | UTF-8 | Python | false | false | 596 | py | from django import forms
from .models import Bug, BugComment
class CreateBugForm(forms.ModelForm):
class Meta:
model = Bug
fields = ('name', 'desc')
class CreateBugCommentForm(forms.ModelForm):
class Meta:
model = BugComment
fields = ('comment',)
class FilterView(forms.Form):
ORDER_BY_CHOICES = [
('name_az', 'Name: A - Z'),
('name_za', 'Name: Z - A'),
('status_az', 'Status: A - Z'),
('status_za', 'Status: Z - A')
]
order_by = forms.ChoiceField(choices=ORDER_BY_CHOICES, label='')
| [
"dean.p.flint@gmail.com"
] | dean.p.flint@gmail.com |
610c20802c5e1b878f3f325fb649767495a05dc7 | 0568b01e7575127181f0aa69f98d4e3fb69b5aab | /database.py | 22dda3cdaa6d2a57eea494b97efc419531ec14a8 | [] | no_license | bigchriskelly/barpi | 071ef3375329598389f32db4d9c683dda8797e1a | 05ae2e5748f0d2c638270d5b3cb4a548d6ec19eb | refs/heads/master | 2023-03-11T22:30:42.363401 | 2021-03-01T14:20:30 | 2021-03-01T14:20:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,953 | py | #!/usr/bin/python
#--------------------------------------
#
#
#
#
#--------------------------------------
def get_is_crash():
mydb = connect_db()
sql = "SELECT isCrash FROM keg where isCrash is not null order by time desc limit 1"
mycursor = mydb.cursor()
mycursor.execute(sql)
records = mycursor.fetchone()
print(records[0])
return records[0]
def update_temp(sensor_index, temp):
import mysql.connector
mydb = connect_db()
mycursor = mydb.cursor()
sql = "INSERT INTO measurements (keg, temp, time) VALUES (" + str(sensor_index + 1) + ", " + str(temp) + ", now())"
mycursor.execute(sql)
mydb.commit()
print(mycursor.rowcount, "record inserted.")
def connect_db():
import mysql.connector
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="monday",
database="ratarsedberrypi"
)
return mydb
def get_goal_temp():
mydb = connect_db()
mycursor = mydb.cursor()
sql = "SELECT goalTemp from keg where goalTemp is not null order by time desc limit 1"
mycursor.execute(sql)
records = mycursor.fetchone()
return records[0]
def get_wiggle_temp():
mydb = connect_db()
mycursor =mydb.cursor()
sql = "select wiggleTemp from keg where wiggleTemp is not null order by time desc limit 1"
mycursor.execute(sql)
records = mycursor.fetchone()
return records[0]
def get_keg_temp(keg):
mydb = connect_db()
mycursor = mydb.cursor()
sql = "select temp from measurements where temp is not null and keg = " + str(keg) + " order by time desc limit 1"
mycursor.execute(sql)
records = mycursor.fetchone()
return records[0]
def update_relay_DB(state):
mydb = connect_db()
mycursor = mydb.cursor()
sql = "insert into ratarsedberrypi.relay (time, position) values (now(), " + str(state) + ")"
print(sql)
mycursor.execute(sql)
mydb.commit()
| [
"bigchriskelly@hotmail.com"
] | bigchriskelly@hotmail.com |
0ac9abcfcea2a8cbc8e8767ba40564733a3cfde0 | 5cb45be18d88aeb02f35eedb78f57764b56c6d09 | /test/functional/resendwallettransactions.py | 96f487bd0a0375e7293d88c757233c2c1b8d3a18 | [
"MIT"
] | permissive | bellaj/Rcoin | 1db7a44d952845a4ebfb989f30ace329c64728d4 | bbcf2f528a668ae9b1642effc8644ef8dca0f1a2 | refs/heads/master | 2021-07-13T02:34:42.143678 | 2017-10-13T23:06:08 | 2017-10-13T23:06:08 | 106,748,183 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | #!/usr/bin/env python3
# Copyright (c) 2017 The Readercoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resendwallettransactions RPC."""
from test_framework.test_framework import ReadercoinTestFramework
from test_framework.util import assert_equal, assert_raises_jsonrpc
class ResendWalletTransactionsTest(ReadercoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['--walletbroadcast=false']]
def run_test(self):
# Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled.
assert_raises_jsonrpc(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions)
# Should return an empty array if there aren't unconfirmed wallet transactions.
self.stop_node(0)
self.start_node(0, extra_args=[])
assert_equal(self.nodes[0].resendwallettransactions(), [])
# Should return an array with the unconfirmed wallet transaction.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
assert_equal(self.nodes[0].resendwallettransactions(), [txid])
if __name__ == '__main__':
ResendWalletTransactionsTest().main()
| [
"bellaj.badr@gmail.com"
] | bellaj.badr@gmail.com |
ee639418ec0a7270beb2a0393f294f8661560709 | 2a1b9c3ca8a48c283c5261ff9c283a8172de750f | /test/contrib/streaming_test.py | 7210de28a42e3625e13fa39ce3d234d3b0a83dbe | [
"Apache-2.0"
] | permissive | edx/luigi | e0c08b28ea9e2ba32d3f5dc9d6b7aa4b3024fd94 | 5c135bd2b5d8a9027640a5eaa5698ec8e3c06ed5 | refs/heads/master | 2023-01-20T14:39:38.656382 | 2022-03-29T18:57:59 | 2022-03-29T18:57:59 | 23,933,599 | 5 | 10 | Apache-2.0 | 2023-01-06T13:14:56 | 2014-09-11T19:59:48 | Python | UTF-8 | Python | false | false | 3,086 | py | import mock
import os
import unittest
from luigi import Parameter
from luigi.contrib import mrrunner
from luigi.contrib.hadoop import HadoopJobRunner, JobTask
from luigi.contrib.hdfs import HdfsTarget
class MockStreamingJob(JobTask):
package_binary = Parameter(default=None)
def output(self):
rv = mock.MagicMock(HdfsTarget)
rv.path = 'test_path'
return rv
class MockStreamingJobWithExtraArguments(JobTask):
package_binary = Parameter(default=None)
def extra_streaming_arguments(self):
return [('myargument', '/path/to/coolvalue')]
def extra_archives(self):
return ['/path/to/myarchive.zip', '/path/to/other_archive.zip']
def output(self):
rv = mock.MagicMock(HdfsTarget)
rv.path = 'test_path'
return rv
class StreamingRunTest(unittest.TestCase):
@mock.patch('luigi.contrib.hadoop.shutil')
@mock.patch('luigi.contrib.hadoop.run_and_track_hadoop_job')
def test_package_binary_run(self, rath_job, shutil):
job_runner = HadoopJobRunner('jar_path', end_job_with_atomic_move_dir=False)
job_runner.run_job(MockStreamingJob(package_binary='test_bin.pex'))
self.assertEqual(1, shutil.copy.call_count)
pex_src, pex_dest = shutil.copy.call_args[0]
runner_fname = os.path.basename(pex_dest)
self.assertEqual('test_bin.pex', pex_src)
self.assertEqual('mrrunner.pex', runner_fname)
self.assertEqual(1, rath_job.call_count)
mr_args = rath_job.call_args[0][0]
mr_args_pairs = zip(mr_args, mr_args[1:])
self.assertIn(('-mapper', 'python mrrunner.pex map'), mr_args_pairs)
self.assertIn(('-file', pex_dest), mr_args_pairs)
@mock.patch('luigi.contrib.hadoop.create_packages_archive')
@mock.patch('luigi.contrib.hadoop.run_and_track_hadoop_job')
def test_standard_run(self, rath_job, cpa):
job_runner = HadoopJobRunner('jar_path', end_job_with_atomic_move_dir=False)
job_runner.run_job(MockStreamingJob())
self.assertEqual(1, cpa.call_count)
self.assertEqual(1, rath_job.call_count)
mr_args = rath_job.call_args[0][0]
mr_args_pairs = zip(mr_args, mr_args[1:])
self.assertIn(('-mapper', 'python mrrunner.py map'), mr_args_pairs)
self.assertIn(('-file', mrrunner.__file__.rstrip('c')), mr_args_pairs)
@mock.patch('luigi.contrib.hadoop.create_packages_archive')
@mock.patch('luigi.contrib.hadoop.run_and_track_hadoop_job')
def test_run_with_extra_arguments(self, rath_job, cpa):
job_runner = HadoopJobRunner('jar_path', end_job_with_atomic_move_dir=False)
job_runner.run_job(MockStreamingJobWithExtraArguments())
self.assertEqual(1, cpa.call_count)
self.assertEqual(1, rath_job.call_count)
mr_args = rath_job.call_args[0][0]
mr_args_pairs = list(zip(mr_args, mr_args[1:]))
self.assertIn(('-myargument', '/path/to/coolvalue'), mr_args_pairs)
self.assertIn(('-archives', '/path/to/myarchive.zip,/path/to/other_archive.zip'), mr_args_pairs)
| [
"miffoljud@gmail.com"
] | miffoljud@gmail.com |
2546b5eaaf77600c9038b04ba61738f774d52356 | 9da8754002fa402ad8e6f25659978bd269bbcec8 | /src/10A/test_cdf_10A.py | 461fddc7a5bf57efe5b29747a8b0acf1993ce6a1 | [
"MIT"
] | permissive | kopok2/CodeforcesSolutionsPython | a00f706dbf368ba0846c8ae86d4145b5dd3e1613 | 35bec0dbcff47765b123b5fe60476014376153df | refs/heads/master | 2023-02-02T03:08:22.097651 | 2020-12-17T22:00:50 | 2020-12-17T22:00:50 | 196,035,812 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 952 | py | import unittest
from unittest.mock import patch
from cdf_10A import CodeforcesTask10ASolution
class TestCDF10A(unittest.TestCase):
def test_10A_acceptance_1(self):
mock_input = ['1 3 2 1 5 10', '0 10']
expected = '30'
with patch('builtins.input', side_effect=mock_input):
Solution = CodeforcesTask10ASolution()
Solution.read_input()
Solution.process_task()
actual = Solution.get_result()
self.assertEquals(expected, actual)
def test_10A_acceptance_2(self):
mock_input = ['2 8 4 2 5 10', '20 30', '50 100']
expected = '570'
with patch('builtins.input', side_effect=mock_input):
Solution = CodeforcesTask10ASolution()
Solution.read_input()
Solution.process_task()
actual = Solution.get_result()
self.assertEquals(expected, actual)
if __name__ == "__main__":
unittest.main()
| [
"oleszek.karol@gmail.com"
] | oleszek.karol@gmail.com |
a3900968b452cb53d2db969d69e41f9433b341ca | bc0a8d69db48160c71d5075f1069ebd4a37e94b4 | /tools/format/formatters/protobuf.py | 9b7a1c4835df27b5aacd925d719baf340d8ba8b5 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ChrisCummins/format | 4cf3fef91740ce8bc05d03d4cf4b6480731b06c1 | d42b4dafcd7c4b187311473f1b446e0ca1988b12 | refs/heads/master | 2020-12-09T18:32:16.192867 | 2020-01-21T18:43:55 | 2020-01-21T18:43:55 | 233,383,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,444 | py | # Copyright 2020 Chris Cummins <chrisc.101@gmail.com>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines a formatter for protocol buffers."""
import os
import pathlib
import subprocess
import sys
from labm8.py import bazelutil
from tools.format.formatters.base import file_formatter
class FormatProtobuf(file_formatter.FileFormatter):
"""Format protocol buffer sources.
This uses prototool's format command to automatically format proto files.
Although the prototool recommends switching to `buf`, buf does not yet have
a formatter, see: https://buf.build/docs/lint-checkers#formatting.
Currently I also run a pass of prototool's lint command to enforce linting
rules. In the future I would like to switch to buf's linter as this seems to
have a nicer and broader rule set, but I'm deferring this task for now.
"""
assumed_filename = "input.proto"
def __init__(self, *args, **kwargs):
super(FormatProtobuf, self).__init__(*args, **kwargs)
self.prototool = bazelutil.DataPath(
f"prototool_{sys.platform}/file/prototool"
)
# Make a local cache for prototool, since otherwise it will try to write to
# $HOME.
self.prototool_cache = self.cache_path / "prototool"
self.prototool_cache.mkdir(exist_ok=True)
def RunOne(self, path: pathlib.Path) -> None:
# Run prototool in the same directory as the proto file being formatted.
previous_wd = os.getcwd()
os.chdir(path.parent)
try:
self._Exec(
[
self.prototool,
"--cache-path",
self.prototool_cache,
'--config-data={"lint": {"group": "google"}}',
"lint",
path.name,
]
)
self._Exec(
[
self.prototool,
"--cache-path",
self.prototool_cache,
"format",
"-w",
"-f",
path.name,
]
)
finally:
os.chdir(previous_wd)
| [
"chrisc.101@gmail.com"
] | chrisc.101@gmail.com |
a7cc3848892697d46713455a0a9b050f8d23535e | 3efe2059de4c7efd1f58a385656d19098b7efd63 | /deepiu/seq2seq/encoder.py | 162d924c0a43fff4a1ba916edb438716b7d6a6f4 | [] | no_license | yangyaoyunshu/image-caption-ai-challenger2017 | 5d2e82b2f8d70ac6d4eb7a0e70f6b406e551189b | 7f2c556587ea1e5c4583fe3b12b8d40c5a2aa2cc | refs/heads/master | 2021-08-31T22:48:55.886186 | 2017-12-23T07:27:04 | 2017-12-23T07:27:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==============================================================================
# \file encoder.py
# \author chenghuige
# \date 2016-12-23 23:59:26.165659
# \Description
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
flags = tf.app.flags
import gflags as flags
FLAGS = flags.FLAGS
class Encoder(object):
def __init__(self):
self.emb = None
def set_embedding(self, emb):
self.emb = emb | [
"29109317@qq.com"
] | 29109317@qq.com |
b6ec982d63362772815255bf4fe9446d93128b92 | 85abaca853c825ee6b293a26a7b5f7144616a4ae | /hello.py | 8e3b6bda59d590efa2c8589b9d74e8e492d6a64e | [
"MIT"
] | permissive | KeanF/cs3240-labdemo | f4a3a1dc836dae4b02ca93b472afab9386f7fdd9 | d81474ca654e4dc50e5168f776ced9728f4b70e4 | refs/heads/master | 2021-01-19T13:36:15.369454 | 2017-02-20T22:30:14 | 2017-02-20T22:30:14 | 82,425,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | """
Kean Finucane (kf9zy)
Lab 5 - Git and Github
CS 3240 - Smith
16 February 2017
"""
__author__ = 'Kean Finucane'
from helper import greeting
from helper2 import excited_greeting
def example():
print('This is an example')
if __name__ == '__main__':
greeting('hello')
<<<<<<< HEAD
example()
=======
excited_greeting('hello')
>>>>>>> develop
| [
"kf9zy@virginia.edu"
] | kf9zy@virginia.edu |
299950f696f7f07d440ccf0389360a268c8fb93c | ba9ebe750b6169b290fcb8cdd777420f06a17155 | /application2/app.py | c1bacd0526a78d8820ab5ed5e60c0a40215851c8 | [] | no_license | group2gmca/groupProject | f40279fb85d3d247db9be07fd2ca48a2b5441638 | 65f843bda4c468dad7c29cba755a9af9e577cb68 | refs/heads/master | 2023-05-12T22:55:55.025821 | 2020-06-11T13:01:10 | 2020-06-11T13:01:10 | 254,064,585 | 0 | 1 | null | 2023-05-01T21:24:12 | 2020-04-08T11:09:21 | Python | UTF-8 | Python | false | false | 126 | py | from application import app
if __name__== '__main__':
app.run(port=5001, host='0.0.0.0')
#service 2 runing on port 5001
| [
"zedz65@hotmail.com"
] | zedz65@hotmail.com |
453773ca6cc20013306d3315221015c2d4beb273 | 77ae39a4e38dc53ed50e3943a0049fc4c72af735 | /Leetcode/Reverse_Integer.py | f0792c94e23888b50df22647371be82765788b05 | [
"MIT"
] | permissive | harrifeng/Python-Study | 41ab870a31213d414f08c5753d22e8463bb3f102 | d8158e33392a322830244594405cae7e9d7f6fb4 | refs/heads/master | 2021-01-18T10:48:23.215569 | 2016-02-04T02:06:22 | 2016-02-04T02:06:22 | 51,045,556 | 1 | 0 | null | 2016-02-04T02:05:39 | 2016-02-04T02:05:38 | null | UTF-8 | Python | false | false | 930 | py | """
Reverse digits of an integer.
Example1: x = 123, return 321
Example2: x = -123, return -321
click to show spoilers.
Have you thought about this?
Here are some good questions to ask before coding. Bonus points for you if you have already thought through this!
If the integer's last digit is 0, what should the output be? ie, cases such as 10, 100.
Did you notice that the reversed integer might overflow? Assume the input is a 32-bit integer, then the reverse of 1000000003 overflows. How should you handle such cases?
Throw an exception? Good, but what if throwing an exception is not an option? You would then have to re-design the function (ie, add an extra parameter).
"""
class Solution:
# @return an integer
def reverse(self, x):
if x < 0:
return (-1) * self.reverse( (-1) * x)
res = 0
while x > 0:
res = res*10 + x%10
x /= 10
return res
| [
"cyandterry@hotmail.com"
] | cyandterry@hotmail.com |
3a1172fe783516fa01b2376d351a784d388be5ce | 5525efd4158915d828bb37736583d1e3eaed1881 | /tests/test_p4_exception_handling.py | 360fbb2221ca7773c2fc9493da286c6b9137dbb7 | [
"BSD-2-Clause"
] | permissive | P1119r1m/Universum | 8388f2ecc8019a9bf514fcf6561a27f928d5e826 | 71c9494f59dbac58a378d29eb31f8724964c8067 | refs/heads/master | 2023-08-02T11:03:16.933454 | 2021-08-20T11:02:37 | 2021-08-20T11:02:37 | 398,252,781 | 0 | 0 | BSD-2-Clause | 2021-10-01T15:49:07 | 2021-08-20T11:24:08 | null | UTF-8 | Python | false | false | 4,724 | py | # pylint: disable = redefined-outer-name
import pytest
from universum import __main__
from .perforce_utils import P4Environment
@pytest.fixture()
def perforce_environment(perforce_workspace, tmpdir):
yield P4Environment(perforce_workspace, tmpdir, test_type="main")
def test_p4_forbidden_local_revert(perforce_environment, stdout_checker):
p4 = perforce_environment.p4
p4_file = perforce_environment.repo_file
config = """
from universum.configuration_support import Configuration
configs = Configuration([dict(name="Restrict changes", command=["chmod", "-R", "555", "."]),
dict(name="Check", command=["ls", "-la"])])
"""
p4.run_edit(perforce_environment.depot)
p4_file.write(config)
change = p4.fetch_change()
change["Description"] = "CL for shelving"
shelve_cl = p4.save_change(change)[0].split()[1]
p4.run_shelve("-fc", shelve_cl)
settings = perforce_environment.settings
settings.PerforceMainVcs.shelve_cls = [shelve_cl]
settings.Launcher.config_path = p4_file.basename
result = __main__.run(settings)
# Clean up the directory at once to make sure it doesn't remain non-writable even if some assert fails
perforce_environment.temp_dir.chmod(0o0777, rec=1)
perforce_environment.temp_dir.remove(rec=1)
assert result == 0
stdout_checker.assert_has_calls_with_param("[Errno 13] Permission denied")
# make sure there are no pending CLs in the workspace
assert not p4.run_changes("-c", perforce_environment.client_name, "-s", "pending")
# make sure there are no pending changes in default CL
assert not p4.run_opened("-C", perforce_environment.client_name)
def test_p4_print_exception_before_run(perforce_environment, stdout_checker):
p4 = perforce_environment.p4
client = p4.fetch_client(perforce_environment.client_name)
client["Options"] = "noallwrite noclobber nocompress locked nomodtime normdir"
p4.save_client(client)
settings = perforce_environment.settings
result = __main__.run(settings)
# Update client at once to make sure it doesn't remain locked even if some assert fails
client = p4.fetch_client(perforce_environment.client_name)
client["Options"] = "noallwrite noclobber nocompress unlocked nomodtime normdir"
p4.save_client(client)
assert result != 0
stdout_checker.assert_has_calls_with_param(
"Errors during command execution( \"p4 client -d {}\" )".format(perforce_environment.client_name))
def test_p4_print_exception_in_finalize(perforce_environment, stdout_checker):
p4 = perforce_environment.p4
client = p4.fetch_client(perforce_environment.client_name)
client["Options"] = "noallwrite noclobber nocompress locked nomodtime normdir"
p4.save_client(client)
settings = perforce_environment.settings
settings.Main.finalize_only = True
result = __main__.run(settings)
# Update client at once to make sure it doesn't remain locked even if some assert fails
client = p4.fetch_client(perforce_environment.client_name)
client["Options"] = "noallwrite noclobber nocompress unlocked nomodtime normdir"
p4.save_client(client)
assert result == 0
stdout_checker.assert_has_calls_with_param(
"Errors during command execution( \"p4 client -d {}\" )".format(perforce_environment.client_name))
stdout_checker.assert_has_calls_with_param("[Errno 2] No such file or directory")
@pytest.mark.parametrize('cl_list', [["132,456"], ["@123,@456"], ["//depot/...@,//depot2/...@"],
["//depot/...,//depot2/..."], ["132", "456"], ["@123", "4@456"],
["//depot/...@", "//depot2/...@"], ["//depot/...", "//depot2/..."]])
def test_p4_print_exception_in_sync(perforce_environment, stdout_checker, cl_list):
settings = perforce_environment.settings
settings.PerforceMainVcs.sync_cls = cl_list
result = __main__.run(settings)
assert result == 1
text = f"Something went wrong when processing sync CL parameter ('{str(cl_list)}')"
stdout_checker.assert_has_calls_with_param(text)
def test_p4_print_exception_wrong_shelve(perforce_environment, stdout_checker):
cl = perforce_environment.make_a_change()
settings = perforce_environment.settings
settings.PerforceMainVcs.shelve_cls = [cl]
result = __main__.run(settings)
# This is not the 'already committed' case of Swarm review, so it actually should fail
assert result == 1
stdout_checker.assert_has_calls_with_param(
"Errors during command execution( \"p4 unshelve -s {} -f\" )".format(cl))
stdout_checker.assert_has_calls_with_param(f"[Error]: 'Change {cl} is already committed.'")
| [
"noreply@github.com"
] | P1119r1m.noreply@github.com |
9d284ac3445eb8e2204d34c0fa3b8caaac0fe026 | e164a9243216317f5fd3c4bf87c6bb38b28e9cdb | /Word Reversed Checker/WordReversedChecker.py | d3de3a37b309d3c68b6601260b1384511023ab79 | [] | no_license | RobSweny/Python-Java-Projects | 46d68b2a2c85ea57e4682861abb324bb2e3a98b7 | 469aa0eb8b54a3c22486688105ea7892c3d17c0a | refs/heads/master | 2022-12-27T00:23:27.536642 | 2020-10-11T23:09:12 | 2020-10-11T23:09:12 | 296,970,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | print("Check if words are reversed")
user_choice = input("Enter your first word: ")
user_second_choice = input("Enter your first word: ")
#Reversing the words
user_reversed = user_choice[::-1]
if user_reversed == user_second_choice:
print("These words are the reverse of the other")
else:
print("These are not reversed words of each other") | [
"Robsweny@gmail.com"
] | Robsweny@gmail.com |
9eafbc8fd21806248ceec01d8071e6dce389e8c5 | ef007108068d732a374d6a145bc3cd18d91950ef | /lib/hourglassNet.py | 02586c733bb6ec1ee5bbc88f1565f2aaf346348d | [] | no_license | hyunyongjeon/rc-pda | b7183ad5c20c4d95658442af74198f8cef3e1d6a | de993f9ff21357af64308e42c57197e8c7307d89 | refs/heads/master | 2023-06-18T16:00:03.056218 | 2021-07-15T23:51:01 | 2021-07-15T23:51:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,857 | py | """
Multi-Scale Guided Cascade Hourglass Network
Adapted from: https://github.com/anglixjtu/msg_chn_wacv20
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import matplotlib.pyplot as plt
class DepthEncoder(nn.Module):
def __init__(self, in_layers, layers, filter_size):
super(DepthEncoder, self).__init__()
padding = int((filter_size - 1) / 2)
self.init = nn.Sequential(nn.Conv2d(in_layers, layers, filter_size, stride=1, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding))
self.enc1 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding),
)
self.enc2 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding),
)
# Init Weights
for m in self.modules():
if isinstance(m, nn.Sequential):
for p in m:
if isinstance(p, nn.Conv2d) or isinstance(p, nn.ConvTranspose2d):
nn.init.xavier_normal_(p.weight)
nn.init.constant_(p.bias, 0.01)
def forward(self, input, scale=2, pre_x2=None, pre_x3=None, pre_x4=None):
### input
x0 = self.init(input)
if pre_x4 is not None:
x0 = x0 + F.interpolate(pre_x4, scale_factor=scale, mode='bilinear', align_corners=True)
x1 = self.enc1(x0) # 1/2 input size
if pre_x3 is not None: # newly added skip connection
x1 = x1 + F.interpolate(pre_x3, scale_factor=scale, mode='bilinear', align_corners=True)
x2 = self.enc2(x1) # 1/4 input size
if pre_x2 is not None: # newly added skip connection
x2 = x2 + F.interpolate(pre_x2, scale_factor=scale, mode='bilinear', align_corners=True)
return x0, x1, x2
class RGBEncoder(nn.Module):
def __init__(self, in_layers, layers, filter_size):
super(RGBEncoder, self).__init__()
padding = int((filter_size - 1) / 2)
self.init = nn.Sequential(nn.Conv2d(in_layers, layers, filter_size, stride=1, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding))
self.enc1 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding), )
self.enc2 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding), )
self.enc3 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding), )
self.enc4 = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=2, padding=padding),
nn.ReLU(),
nn.Conv2d(layers, layers, filter_size, stride=1, padding=padding), )
# Init Weights
for m in self.modules():
if isinstance(m, nn.Sequential):
for p in m:
if isinstance(p, nn.Conv2d) or isinstance(p, nn.ConvTranspose2d):
nn.init.xavier_normal_(p.weight)
nn.init.constant_(p.bias, 0.01)
def forward(self, input, scale=2, pre_x=None):
### input
x0 = self.init(input)
if pre_x is not None:
x0 = x0 + F.interpolate(pre_x, scale_factor=scale, mode='bilinear', align_corners=True)
x1 = self.enc1(x0) # 1/2 input size
x2 = self.enc2(x1) # 1/4 input size
x3 = self.enc3(x2) # 1/8 input size
x4 = self.enc4(x3) # 1/16 input size
return x0, x1, x2, x3, x4
class DepthDecoder(nn.Module):
def __init__(self, layers, filter_size):
super(DepthDecoder, self).__init__()
padding = int((filter_size - 1) / 2)
self.dec2 = nn.Sequential(nn.ReLU(),
nn.ConvTranspose2d(layers // 2, layers // 2, filter_size, stride=2, padding=padding,
output_padding=padding),
nn.ReLU(),
nn.Conv2d(layers // 2, layers // 2, filter_size, stride=1, padding=padding),
)
self.dec1 = nn.Sequential(nn.ReLU(),
nn.ConvTranspose2d(layers // 2, layers // 2, filter_size, stride=2, padding=padding,
output_padding=padding),
nn.ReLU(),
nn.Conv2d(layers // 2, layers // 2, filter_size, stride=1, padding=padding),
)
self.prdct = nn.Sequential(nn.ReLU(),
nn.Conv2d(layers // 2, layers // 2, filter_size, stride=1, padding=padding),
nn.ReLU(),
nn.Conv2d(layers // 2, 1, filter_size, stride=1, padding=padding))
# Init Weights
for m in self.modules():
if isinstance(m, nn.Sequential):
for p in m:
if isinstance(p, nn.Conv2d) or isinstance(p, nn.ConvTranspose2d):
nn.init.xavier_normal_(p.weight)
nn.init.constant_(p.bias, 0.01)
def forward(self, pre_dx, pre_cx):
x2 = pre_dx[2] + pre_cx[2] # torch.cat((pre_dx[2], pre_cx[2]), 1)
x1 = pre_dx[1] + pre_cx[1] # torch.cat((pre_dx[1], pre_cx[1]), 1) #
x0 = pre_dx[0] + pre_cx[0]
x3 = self.dec2(x2) # 1/2 input size
x4 = self.dec1(x1 + x3) # 1/1 input size
### prediction
output_d = self.prdct(x4 + x0)
return x2, x3, x4, output_d
class network(nn.Module):
def __init__(self, rd_layers):
super(network, self).__init__()
denc_layers = 32
cenc_layers = 32
ddcd_layers = denc_layers + cenc_layers
self.rgb_encoder = RGBEncoder(3, cenc_layers, 3)
self.depth_encoder1 = DepthEncoder(rd_layers, denc_layers, 3)
self.depth_decoder1 = DepthDecoder(ddcd_layers, 3)
self.depth_encoder2 = DepthEncoder(rd_layers + 1, denc_layers, 3)
self.depth_decoder2 = DepthDecoder(ddcd_layers, 3)
self.depth_encoder3 = DepthEncoder(rd_layers + 1, denc_layers, 3)
self.depth_decoder3 = DepthDecoder(ddcd_layers, 3)
def forward(self, input_d, input_rgb):
C = (input_d > 0).float()
enc_c = self.rgb_encoder(input_rgb)
## for the 1/4 res
input_d14 = F.avg_pool2d(input_d, 4, 4) / (F.avg_pool2d(C, 4, 4) + 0.0001)
enc_d14 = self.depth_encoder1(input_d14)
dcd_d14 = self.depth_decoder1(enc_d14, enc_c[2:5])
## for the 1/2 res
input_d12 = F.avg_pool2d(input_d, 2, 2) / (F.avg_pool2d(C, 2, 2) + 0.0001)
predict_d12 = F.interpolate(dcd_d14[3], scale_factor=2, mode='bilinear', align_corners=True)
input_12 = torch.cat((input_d12, predict_d12), 1)
enc_d12 = self.depth_encoder2(input_12, 2, dcd_d14[0], dcd_d14[1], dcd_d14[2])
dcd_d12 = self.depth_decoder2(enc_d12, enc_c[1:4])
## for the 1/1 res
predict_d11 = F.interpolate(dcd_d12[3] + predict_d12, scale_factor=2, mode='bilinear', align_corners=True)
input_11 = torch.cat((input_d, predict_d11), 1)
enc_d11 = self.depth_encoder3(input_11, 2, dcd_d12[0], dcd_d12[1], dcd_d12[2])
dcd_d11 = self.depth_decoder3(enc_d11, enc_c[0:3])
output_d11 = dcd_d11[3] + predict_d11
output_d12 = predict_d11
output_d14 = F.interpolate(dcd_d14[3], scale_factor=4, mode='bilinear', align_corners=True)
return output_d11, output_d12, output_d14,
| [
"longyunf@msu.edu"
] | longyunf@msu.edu |
d0a0e1d386d0d7d9a30895facf80e2de9e6d95cd | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/digitaltwins/azure-mgmt-digitaltwins/azure/mgmt/digitaltwins/v2020_12_01/aio/operations/_operations.py | 986d51c8aa369afd9f60dbb2be3edfa6bda5d4a5 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 4,816 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class Operations:
"""Operations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.digitaltwins.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs
) -> AsyncIterable["_models.OperationListResult"]:
"""Lists all of the available DigitalTwins service REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.digitaltwins.v2020_12_01.models.OperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('OperationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.DigitalTwins/operations'} # type: ignore
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
72b3ddb510aad5cdeb62cbd6a7671f1c93100309 | fd7b34b6f4261b0e81961594f38338e8e2c1a4cc | /src/command_modules/azure-cli-servicebus/azure/cli/command_modules/servicebus/__init__.py | 3af10ffe66ac3b5dbf3538ada2ca92ebecd3f4ad | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | WangYeJian/azure-cli-test | 3158da9f5dfb4321e3dfef3eed8cd213abb78dc7 | decdaa80a2d99a1a5753ed6f620d3cea5947bada | refs/heads/master | 2023-01-13T05:51:48.247079 | 2019-06-27T01:35:24 | 2019-06-27T01:35:24 | 194,001,154 | 0 | 0 | MIT | 2022-12-27T15:34:45 | 2019-06-27T01:25:56 | Python | UTF-8 | Python | false | false | 1,829 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=unused-import
# pylint: disable=line-too-long
from azure.cli.core import AzCommandsLoader
from azure.cli.command_modules.servicebus._help import helps
class ServicebusCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
from azure.cli.core import ModExtensionSuppress
from azure.cli.core.commands import CliCommandType
servicebus_custom = CliCommandType(operations_tmpl='azure.cli.command_modules.servicebus.custom#{}')
super(ServicebusCommandsLoader, self).__init__(cli_ctx=cli_ctx, custom_command_type=servicebus_custom,
min_profile='2017-03-10-profile',
suppress_extension=ModExtensionSuppress(__name__, 'servicebus', '0.0.1',
reason='These commands are now in the CLI.',
recommend_remove=True))
def load_command_table(self, args):
from azure.cli.command_modules.servicebus.commands import load_command_table
load_command_table(self, args)
return self.command_table
def load_arguments(self, command):
from azure.cli.command_modules.servicebus._params import load_arguments_sb
load_arguments_sb(self, command)
COMMAND_LOADER_CLS = ServicebusCommandsLoader
| [
"ericw2@wicresoft.com"
] | ericw2@wicresoft.com |
b510d226ad2c88a37acf52124ecbfeb1dcf9b6b1 | a31b879c0874e209095893b615620b207373139f | /main_app/migrations/0001_initial.py | 4aea2f993bd88fd24290cf8d17e77c8df77d94c8 | [] | no_license | sean-yates/Auto-Jokester | b612a479ae6f4ffe051547f1bb56b4a4cc27608d | 7187ad8009eb214c2843bff69d43f351a3034d56 | refs/heads/master | 2023-04-23T22:47:04.466548 | 2021-05-11T01:47:54 | 2021-05-11T01:47:54 | 361,569,739 | 0 | 2 | null | 2021-05-12T03:25:27 | 2021-04-26T00:20:54 | Python | UTF-8 | Python | false | false | 2,140 | py | # Generated by Django 3.2 on 2021-05-01 19:53
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Joke',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('joke', models.CharField(max_length=10000)),
('source', models.CharField(blank=True, max_length=50, null=True)),
('category', models.CharField(choices=[('Y', 'Yo Mama'), ('D', 'Dad'), ('H', 'Chuck Norris'), ('P', 'Pun'), ('C', 'Computer')], default='Y', max_length=50)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_date', models.DateTimeField(auto_now=True)),
('appproved', models.BooleanField(default=False)),
('createdBy', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('dislikes', models.ManyToManyField(blank=True, related_name='dislikes', to=settings.AUTH_USER_MODEL)),
('favorites', models.ManyToManyField(blank=True, related_name='favorites', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=4000)),
('created_at', models.DateTimeField(auto_now_add=True)),
('modified_date', models.DateTimeField(auto_now=True)),
('joke', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main_app.joke')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"seanyates@Seans-MacBook-Pro.local"
] | seanyates@Seans-MacBook-Pro.local |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.