blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0639bb8bfaf5d12027ea12b6ee6bbe9dec7363a0
|
6b7176e32e8e6b105d5ad8b4bda038ad9ae6a281
|
/P25034-zhaojie/week-11/homework.py
|
31a196df26a7d4580903c49e0900fa52b26d02c2
|
[
"Apache-2.0"
] |
permissive
|
xiaohh2016/python-25
|
20c7e0a157c4be5707891d1839644e015b28dbb4
|
8981ba89bfb32754c3f9c881ee8fcaf13332ce51
|
refs/heads/master
| 2021-01-05T18:50:53.838337
| 2020-02-12T08:46:53
| 2020-02-12T08:46:53
| 241,107,053
| 0
| 0
|
Apache-2.0
| 2020-02-17T12:52:31
| 2020-02-17T12:52:31
| null |
UTF-8
|
Python
| false
| false
| 1,910
|
py
|
#!/usr/bin/env python
# encoding:utf-8
# file: homework.py
# 自己实现python自带的map、zip和filter函数
# 还没学到 yield语法不熟 先简单实现
# 实现map函数
def my_map(*args):
"""文档字符串位置
"""
if len(args) < 2:
# 先不用异常的方式处理 只是打印
print('map()至少需要两个参数')
else:
# 判断是否为可迭代对象 先不处理
fnc_nme = args[0]
new_tpl = args[1:]
min_len = len(min(new_tpl, key=len))
for idx in range(min_len):
# yield后的代码会继续执行 yield只要存在函数就变成生成器
yield fnc_nme(*[itr[idx] for itr in new_tpl])
# 实现zip函数
def my_zip(*args):
if not len(args):
return tuple()
min_len = len(min(args, key=len))
for idx in range(min_len):
yield tuple(itr[idx] for itr in args)
# 实现filter函数
def my_filter(func, itr):
if func is not None:
for it in itr:
if func(it):
yield it
else:
for it in itr:
if it:
yield it
# 测试函数 加法
def func1(x, y):
return x + y
# 测试函数 平方
def func2(x):
return x ** 2
# 测试函数 取大于100的数
def func3(x):
return True if x > 100 else False
if __name__ == '__main__':
l1 = [3, 2, 3]
l2 = [6, 5]
print(list(my_map(func1, l1, l2)))
print(list(my_zip([1, 2, 3], [4, 5], 'abcdefg')))
print(list(my_filter(func3, [0, 201, 1, 2, 3, 100, 101])))
print(list(my_zip()))
print(list(my_filter(None, [0, 201, 1, 2, 3, 100, 101])))
print('-------- 对照组 --------')
print(list(map(func1, l1, l2)))
print(list(zip([1, 2, 3], [4, 5], 'abcdefg')))
print(list(filter(func3, [0, 201, 1, 2, 3, 100, 101])))
print(list(zip()))
print(list(filter(None, [0, 201, 1, 2, 3, 100, 101])))
|
[
"jasonz666@qq.com"
] |
jasonz666@qq.com
|
e18cb7dd81804a2ba328dc66b22ac4a5eb10f3e6
|
92d79bbe1e94e192e9d4a728f99f6aecea500645
|
/attack/df_attack_2_tab_next.py
|
33f57d3a58533a0b661b278d2c17db343feda980
|
[] |
no_license
|
westzyan/attackWFP
|
e6f61dc7a6636640a298162941c5b7c882c1fc80
|
5e2227308b3ab7be5b607c4d8dddb4871ed56fc4
|
refs/heads/master
| 2023-03-02T19:16:14.846449
| 2021-02-09T18:04:42
| 2021-02-09T18:04:42
| 325,488,478
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,558
|
py
|
from sklearn.metrics import confusion_matrix, precision_recall_fscore_support, classification_report
from Model_DF import DFNet
import random
from keras.utils import np_utils
from keras.optimizers import Adamax
import numpy as np
import os
import tensorflow as tf
import keras
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
keras.backend.tensorflow_backend.set_session(tf.Session(config=config))
# Load data for non-defended dataset for CW setting
def LoadDataNoDefCW(second):
print("Loading defended dataset for closed-world scenario")
# Point to the directory storing data
# dataset_dir = '../dataset/ClosedWorld/NoDef/'
# dataset_dir = "/media/zyan/软件/张岩备份/PPT/DeepFingerprinting/df-master/dataset/ClosedWorld/NoDef/"
dataset_dir = "/home/thinkst/zyan/real_specified_split/round/second{}/".format(second)
# X represents a sequence of traffic directions
# y represents a sequence of corresponding label (website's label)
data = np.loadtxt(dataset_dir + "df_tcp_95000_10000_head_math_order.csv", delimiter=",")
print(data)
np.random.shuffle(data)
print(data)
print(len(data))
train_length = int(0.8 * len(data))
valid_length = int(0.1 * len(data))
test_length = len(data) - train_length - valid_length
train = data[:train_length, :]
valid = data[train_length: train_length + valid_length, :]
test = data[train_length + valid_length:, :]
X_train = train[:, :-1]
y_train = train[:, -1]
X_valid = valid[:, :-1]
y_valid = valid[:, -1]
X_test = test[:, :-1]
y_test = test[:, -1]
print("X: Training data's shape : ", X_train.shape)
print("y: Training data's shape : ", y_train.shape)
print("X: Validation data's shape : ", X_valid.shape)
print("y: Validation data's shape : ", y_valid.shape)
print("X: Testing data's shape : ", X_test.shape)
print("y: Testing data's shape : ", y_test.shape)
#
return X_train, y_train, X_valid, y_valid, X_test, y_test
if __name__ == '__main__':
for second in range(2, 9):
random.seed(0)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
description = "Training and evaluating DF model for closed-world scenario on non-defended dataset"
print(description)
# Training the DF model
NB_EPOCH = 20 # Number of training epoch
print("Number of Epoch: ", NB_EPOCH)
BATCH_SIZE = 128 # Batch size
VERBOSE = 2 # Output display mode
LENGTH = 10000 # Packet sequence length
OPTIMIZER = Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0) # Optimizer
NB_CLASSES = 95 # number of outputs = number of classes
INPUT_SHAPE = (LENGTH, 1)
# Data: shuffled and split between train and test sets
print("Loading and preparing data for training, and evaluating the model")
X_train, y_train, X_valid, y_valid, X_test, y_test = LoadDataNoDefCW(second)
# Please refer to the dataset format in readme
# K.set_image_dim_ordering("tf") # tf is tensorflow
# Convert data as float32 type
X_train = X_train.astype('float32')
X_valid = X_valid.astype('float32')
X_test = X_test.astype('float32')
y_train = y_train.astype('float32')
y_valid = y_valid.astype('float32')
y_test = y_test.astype('float32')
# we need a [Length x 1] x n shape as input to the DF CNN (Tensorflow)
X_train = X_train[:, :, np.newaxis]
X_valid = X_valid[:, :, np.newaxis]
X_test = X_test[:, :, np.newaxis]
print(X_train.shape[0], 'train samples')
print(X_valid.shape[0], 'validation samples')
print(X_test.shape[0], 'test samples')
# Convert class vectors to categorical classes matrices
y_train = np_utils.to_categorical(y_train, NB_CLASSES)
y_valid = np_utils.to_categorical(y_valid, NB_CLASSES)
y_test = np_utils.to_categorical(y_test, NB_CLASSES)
# Building and training model
print("Building and training DF model")
model = DFNet.build(input_shape=INPUT_SHAPE, classes=NB_CLASSES)
model.compile(loss="categorical_crossentropy", optimizer=OPTIMIZER, metrics=["accuracy"])
print("Model compiled")
# Start training
history = model.fit(X_train, y_train,
batch_size=BATCH_SIZE, epochs=NB_EPOCH,
verbose=VERBOSE, validation_data=(X_valid, y_valid))
# model.save('my_model_undef_tcp_10000_round2.h5')
# Start evaluating model with testing data
score_test = model.evaluate(X_test, y_test, verbose=VERBOSE)
print("Testing accuracy:", score_test[1])
y_pre = model.predict(X_test)
index_test = np.argmax(y_test, axis=1)
index_pre = np.argmax(y_pre, axis=1)
print(precision_recall_fscore_support(index_test, index_pre, average='macro'))
# Macro-P,Macro-R,Macro-F1
print(precision_recall_fscore_support(index_test, index_pre, average='micro'))
# Micro-P,Micro-R,Micro-F1
print(classification_report(index_test, index_pre))
score = classification_report(index_test, index_pre)
# 混淆矩阵并可视化
confmat = confusion_matrix(y_true=index_test, y_pred=index_pre) # 输出混淆矩阵
print(confmat)
with open("./overlap_second.txt", 'a') as f:
f.write("second:{} acc:{}\n".format(second, score_test[1]))
f.write(score)
f.close()
|
[
"15639067131@163.com"
] |
15639067131@163.com
|
014cbf61158fb280b11d2f149b026f48d5234c0e
|
2e2a54e30f8c8018fe0d163a5fd4b0d854ef165d
|
/src/gluonts/torch/model/deep_npts/_network.py
|
c29d1935c3d32e884ec124b33fde866e0b55aa92
|
[
"Apache-2.0"
] |
permissive
|
kashif/gluon-ts
|
b742021ca0292ca2885b3b079150f24cdf3e6dec
|
a818f69dc049c1c1d57e09d2ccb8b5f7a0cff656
|
refs/heads/master
| 2023-09-05T00:00:22.861992
| 2023-08-09T15:47:28
| 2023-08-09T15:47:28
| 222,552,468
| 5
| 0
| null | 2019-11-18T21:56:52
| 2019-11-18T21:56:52
| null |
UTF-8
|
Python
| false
| false
| 14,377
|
py
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from functools import partial
from typing import Optional, Callable, List, Union
import torch
from torch import nn
from torch.distributions import (
Categorical,
MixtureSameFamily,
Normal,
)
from gluonts.core.component import validated
from gluonts.torch.distributions import DiscreteDistribution
from .scaling import (
min_max_scaling,
standard_normal_scaling,
)
INPUT_SCALING_MAP = {
"min_max_scaling": partial(min_max_scaling, dim=1, keepdim=True),
"standard_normal_scaling": partial(
standard_normal_scaling, dim=1, keepdim=True
),
}
def init_weights(module: nn.Module, scale: float = 1.0):
if type(module) == nn.Linear:
nn.init.uniform_(module.weight, -scale, scale)
nn.init.zeros_(module.bias)
class FeatureEmbedder(nn.Module):
"""Creates a feature embedding for the static categorical features."""
@validated()
def __init__(
self,
cardinalities: List[int],
embedding_dimensions: List[int],
):
super().__init__()
assert (
len(cardinalities) > 0
), "Length of `cardinalities` list must be greater than zero"
assert len(cardinalities) == len(
embedding_dimensions
), "Length of `embedding_dims` and `embedding_dims` should match"
assert all(
[c > 0 for c in cardinalities]
), "Elements of `cardinalities` should be > 0"
assert all(
[d > 0 for d in embedding_dimensions]
), "Elements of `embedding_dims` should be > 0"
self.embedders = [
torch.nn.Embedding(num_embeddings=card, embedding_dim=dim)
for card, dim in zip(cardinalities, embedding_dimensions)
]
for embedder in self.embedders:
embedder.apply(init_weights)
def forward(self, features: torch.Tensor):
"""
Parameters
----------
features
Input features to the model, shape: (-1, num_features).
Returns
-------
torch.Tensor
Embedding, shape: (-1, sum(self.embedding_dimensions)).
"""
embedded_features = torch.cat(
[
embedder(features[:, i].long())
for i, embedder in enumerate(self.embedders)
],
dim=-1,
)
return embedded_features
class DeepNPTSNetwork(nn.Module):
"""Base class implementing a simple feed-forward neural network that takes
in static and dynamic features and produces `num_hidden_nodes` independent
outputs. These outputs are then used by derived classes to construct the
forecast distribution for a single time step.
Note that the dynamic features are just treated as independent features
without considering their temporal nature.
"""
@validated()
def __init__(
self,
context_length: int,
num_hidden_nodes: List[int],
cardinality: List[int],
embedding_dimension: List[int],
num_time_features: int,
batch_norm: bool = False,
input_scaling: Optional[Union[Callable, str]] = None,
dropout_rate: float = 0.0,
):
super().__init__()
self.context_length = context_length
self.num_hidden_nodes = num_hidden_nodes
self.batch_norm = batch_norm
self.input_scaling = (
INPUT_SCALING_MAP[input_scaling]
if isinstance(input_scaling, str)
else input_scaling
)
self.dropout_rate = dropout_rate
# Embedding for categorical features
self.embedder = FeatureEmbedder(
cardinalities=cardinality, embedding_dimensions=embedding_dimension
)
total_embedding_dim = sum(embedding_dimension)
# We have two target related features: past_target and observed value
# indicator each of length `context_length`.
# Also, +1 for the static real feature.
dimensions = [
context_length * (num_time_features + 2) + total_embedding_dim + 1
] + num_hidden_nodes
modules: List[nn.Module] = []
for in_features, out_features in zip(dimensions[:-1], dimensions[1:]):
modules += [nn.Linear(in_features, out_features), nn.ReLU()]
if self.batch_norm:
modules.append(nn.BatchNorm1d(out_features))
if self.dropout_rate > 0:
modules.append(nn.Dropout(self.dropout_rate))
self.model = nn.Sequential(*modules)
self.model.apply(partial(init_weights, scale=0.07))
# TODO: Handle missing values using the observed value indicator.
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
):
"""
Parameters
----------
feat_static_cat
Shape (-1, num_features).
feat_static_real
Shape (-1, num_features).
past_target
Shape (-1, context_length).
past_observed_values
Shape (-1, context_length).
past_time_feat
Shape (-1, context_length, self.num_time_features).
"""
x = past_target
if self.input_scaling:
loc, scale = self.input_scaling(x)
x_scaled = (x - loc) / scale
else:
x_scaled = x
embedded_cat = self.embedder(feat_static_cat)
static_feat = torch.cat(
(embedded_cat, torch.tensor(feat_static_real)),
dim=1,
)
time_features = torch.cat(
[
x_scaled.unsqueeze(dim=-1),
past_observed_values.unsqueeze(dim=-1),
past_time_feat,
],
dim=-1,
)
features = torch.cat(
[
time_features.reshape(time_features.shape[0], -1),
static_feat,
],
dim=-1,
)
return self.model(features)
class DeepNPTSNetworkDiscrete(DeepNPTSNetwork):
"""
Extends `DeepNTPSNetwork` by implementing the output layer which
converts the outputs from the base network into probabilities of length
`context_length`. These probabilities together with the past values in the
context window constitute the one-step-ahead forecast distribution.
Specifically, the forecast is always one of the values observed in the
context window with the corresponding predicted probability.
Parameters
----------
*args
Arguments to ``DeepNPTSNetwork``.
use_softmax
Flag indicating whether to use softmax or normalization for
converting the outputs of the base network to probabilities.
kwargs
Keyword arguments to ``DeepNPTSNetwork``.
"""
@validated()
def __init__(self, *args, use_softmax: bool = False, **kwargs):
super().__init__(*args, **kwargs)
self.use_softmax = use_softmax
modules: List[nn.Module] = (
[] if self.dropout_rate > 0 else [nn.Dropout(self.dropout_rate)]
)
modules.append(
nn.Linear(self.num_hidden_nodes[-1], self.context_length)
)
self.output_layer = nn.Sequential(*modules)
self.output_layer.apply(init_weights)
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
) -> DiscreteDistribution:
h = super().forward(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=past_observed_values,
past_time_feat=past_time_feat,
)
outputs = self.output_layer(h)
probs = (
nn.functional.softmax(outputs, dim=1)
if self.use_softmax
else nn.functional.normalize(
nn.functional.softplus(outputs), p=1, dim=1
)
)
return DiscreteDistribution(values=past_target, probs=probs)
class DeepNPTSNetworkSmooth(DeepNPTSNetwork):
"""
Extends `DeepNTPSNetwork` by implementing the output layer which
converts the outputs from the base network into a smoothed mixture
distribution. The components of the mixture are Gaussians centered around
the observations in the context window. The mixing probabilities as well as
the width of the Gaussians are predicted by the network.
This mixture distribution represents the one-step-ahead forecast
distribution. Note that the forecast can contain values not observed in the
context window.
"""
@validated()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
modules = (
[] if self.dropout_rate > 0 else [nn.Dropout(self.dropout_rate)]
)
modules += [
nn.Linear(self.num_hidden_nodes[-1], self.context_length + 1),
nn.Softplus(),
]
self.output_layer = nn.Sequential(*modules)
self.output_layer.apply(init_weights)
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
) -> MixtureSameFamily:
h = super().forward(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=past_observed_values,
past_time_feat=past_time_feat,
)
outputs = self.output_layer(h)
probs = outputs[:, :-1]
kernel_width = outputs[:, -1:]
mix = Categorical(probs)
components = Normal(loc=past_target, scale=kernel_width)
return MixtureSameFamily(
mixture_distribution=mix, component_distribution=components
)
class DeepNPTSMultiStepNetwork(nn.Module):
"""
Implements multi-step prediction given a trained `DeepNPTSNetwork` model
that outputs one-step-ahead forecast distribution.
"""
@validated()
def __init__(
self,
net: DeepNPTSNetwork,
prediction_length: int,
num_parallel_samples: int = 100,
):
super().__init__()
self.net = net
self.prediction_length = prediction_length
self.num_parallel_samples = num_parallel_samples
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
future_time_feat: torch.Tensor,
):
"""Generates samples from the forecast distribution.
Parameters
----------
feat_static_cat
Shape (-1, num_features).
feat_static_real
Shape (-1, num_features).
past_target
Shape (-1, context_length).
past_observed_values
Shape (-1, context_length).
past_time_feat
Shape (-1, context_length, self.num_time_features).
future_time_feat
Shape (-1, prediction_length, self.num_time_features).
Returns
-------
torch.Tensor
Tensor containing samples from the predicted distribution.
Shape is (-1, self.num_parallel_samples, self.prediction_length).
"""
# Blow up the initial `x` by the number of parallel samples required.
# (batch_size * num_parallel_samples, context_length)
past_target = past_target.repeat_interleave(
self.num_parallel_samples, dim=0
)
# Note that gluonts returns empty future_observed_values.
future_observed_values = torch.ones(
(past_observed_values.shape[0], self.prediction_length)
)
observed_values = torch.cat(
[past_observed_values, future_observed_values], dim=1
)
observed_values = observed_values.repeat_interleave(
self.num_parallel_samples, dim=0
)
time_feat = torch.cat([past_time_feat, future_time_feat], dim=1)
time_feat = time_feat.repeat_interleave(
self.num_parallel_samples, dim=0
)
feat_static_cat = feat_static_cat.repeat_interleave(
self.num_parallel_samples, dim=0
)
feat_static_real = feat_static_real.repeat_interleave(
self.num_parallel_samples, dim=0
)
future_samples = []
for t in range(self.prediction_length):
distr = self.net(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=observed_values[
:, t : -self.prediction_length + t
],
past_time_feat=time_feat[
:, t : -self.prediction_length + t, :
],
)
samples = distr.sample()
if past_target.dim() != samples.dim():
samples = samples.unsqueeze(dim=-1)
future_samples.append(samples)
past_target = torch.cat([past_target[:, 1:], samples], dim=1)
# (batch_size * num_parallel_samples, prediction_length)
samples_out = torch.stack(future_samples, dim=1)
# (batch_size, num_parallel_samples, prediction_length)
return samples_out.reshape(
-1, self.num_parallel_samples, self.prediction_length
)
|
[
"noreply@github.com"
] |
kashif.noreply@github.com
|
725d9518757190bbff990c8383cf7ec9d56e3cc5
|
c0d537532f11cf742493093c3c325b4625fdc6e4
|
/Q4/HW3_Q4.py
|
ca25aabc6ff5e1c28eeec4da92089f07eb0f066c
|
[] |
no_license
|
plusbzz/cs224w-hw3
|
c9b4296425f467e203d12e4008b871d6dd89333f
|
7d513c991ff6e16433b6a4241950a2a3b2c15a96
|
refs/heads/master
| 2016-09-06T06:56:45.495051
| 2013-11-07T05:35:46
| 2013-11-07T05:35:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,469
|
py
|
# Homework 3: Question 4
from snap import *
from random import sample,choice
from ggplot import *
N=10670
M=22002
nodes=arange(N)
dia_sample = 20
# Creating graphs
# Create a random Gnm network
g_nm = PUNGraph_New()
for i in nodes: g_nm.AddNode(i)
while True:
s,t = sample(nodes,2)
g_nm.AddEdge(s,t)
if g_nm.GetEdges() == M: break
g_nm.GetNodes(),g_nm.GetEdges()
# Save graph
SaveEdgeList_PUNGraph(g_nm,"Gnm.txt")
# Create a graph G_pa with preferential attachment
# Start with a complete graph of 40 nodes
N_init = 40
edges = []
g_pa = PUNGraph_New()
for n in xrange(N_init):
g_pa.AddNode(n)
for m in xrange(n):
g_pa.AddEdge(m,n)
edges.append((m,n))
for n in nodes[N_init:]:
g_pa.AddNode(n)
for i in xrange(2):
m = choice(choice(edges))
g_pa.AddEdge(m,n)
edges.append((m,n))
if g_pa.GetEdges() == M: break
g_pa.GetNodes(),g_pa.GetEdges()
SaveEdgeList_PUNGraph(g_pa,"Gpa.txt")
# Load Autonomous network graph
g_as = LoadEdgeList_PUNGraph("oregon1_010331.txt")
SaveEdgeList_PUNGraph(g_as,"Gas.txt")
# Q4.1) Deletion experiments for failure vs attack
# Failure deletion
def failure1(graph,batchsize,percent):
del_nodes = 0 # number of deleted nodes
N = graph.GetNodes()
stopN = (percent*N)/100 # number of nodes at which to stop
X = [0]
Y = [GetBfsFullDiam_PUNGraph(graph,dia_sample)]
nodeset = set(range(N))
while True: # start deleting
for d in sample(nodeset,batchsize):
graph.DelNode(d)
nodeset.remove(d)
del_nodes += batchsize
dia = GetBfsFullDiam_PUNGraph(graph,dia_sample)
X.append((100.0*del_nodes)/N)
Y.append(dia)
if del_nodes >= stopN: break
return X,Y
# Attack deletion
def attack1(graph,batchsize,percent):
del_nodes = 0 # number of deleted nodes
N = graph.GetNodes()
stopN = (percent*N)/100 # number of nodes at which to stop
X = [0]
Y = [GetBfsFullDiam_PUNGraph(graph,dia_sample)]
nodeset = set(range(N))
while True: # start deleting
for i in xrange(batchsize):
d = GetMxDegNId_PUNGraph(graph)
graph.DelNode(d)
nodeset.remove(d)
del_nodes += batchsize
dia = GetBfsFullDiam_PUNGraph(graph,dia_sample)
X.append((100.0*del_nodes)/N)
Y.append(dia)
if del_nodes >= stopN: break
return X,Y
# Plot for average diameter vs. deleted nodes
def plots(X,Y,xlab,ylab,tpref,failure_func,attack_func):
g_nm = LoadEdgeListStr_PUNGraph("Gnm.txt")
f_g_nm_x,f_g_nm_y = failure_func(g_nm,X,Y)
g_as = LoadEdgeListStr_PUNGraph("Gas.txt")
f_g_as_x,f_g_as_y = failure_func(g_as,X,Y)
g_pa = LoadEdgeListStr_PUNGraph("Gpa.txt")
f_g_pa_x,f_g_pa_y = failure_func(g_pa,X,Y)
g_nm = LoadEdgeListStr_PUNGraph("Gnm.txt")
a_g_nm_x,a_g_nm_y = attack_func(g_nm,X,Y)
g_as = LoadEdgeListStr_PUNGraph("Gas.txt")
a_g_as_x,a_g_as_y = attack_func(g_as,X,Y)
g_pa = LoadEdgeListStr_PUNGraph("Gpa.txt")
a_g_pa_x,a_g_pa_y = attack_func(g_pa,X,Y)
p = plt.plot(f_g_as_x,f_g_as_y,'-o',f_g_nm_x,f_g_nm_y,'-x',f_g_pa_x,f_g_pa_y,'-+',
a_g_as_x,a_g_as_y,'-.',a_g_nm_x,a_g_nm_y,'--',a_g_pa_x,a_g_pa_y,'-4',
lw=1,mew=2)
p = plt.legend(("Failure: AS","Failure: NM","Failure: PA",
"Attack: AS","Attack: NM","Attack: PA"),loc="best")
p = plt.title(tpref + ': ' + ylab + " vs. " + xlab)
p = plt.xlabel(xlab)
p = plt.ylabel(ylab)
# Scenario 1: X = N/100, Y = 50
X = N/100
Y = 50
plots(X,Y,"Percent of deleted nodes","Average sampled diameter","Q4.1)X=N/100,Y=50",
failure1,attack1)
# Scenario 2: X = N/1000, Y = 2
X = N/1000
Y = 2
plots(X,Y,"Percent of deleted nodes","Average sampled diameter","Q4.1)X=N/1000,Y=2",
failure1,attack1)
# Q4.2) Change in size of largest connected component
# Failure deletion
def failure2(graph,batchsize,percent):
del_nodes = 0 # number of deleted nodes
N = graph.GetNodes()
stopN = (percent*N)/100 # number of nodes at which to stop
X = [0]
Y = [float(GetMxWccSz_PUNGraph(graph))]
nodeset = set(range(N))
while True: # start deleting
for d in sample(nodeset,batchsize):
graph.DelNode(d)
nodeset.remove(d)
del_nodes += batchsize
lcc = float(GetMxWccSz_PUNGraph(graph)) # size of LCC
X.append((100.0*del_nodes)/N)
Y.append(lcc)
if del_nodes >= stopN: break
return X,Y
# Attack deletion
def attack2(graph,batchsize,percent):
del_nodes = 0 # number of deleted nodes
N = graph.GetNodes()
stopN = (percent*N)/100 # number of nodes at which to stop
X = [0]
Y = [float(GetMxWccSz_PUNGraph(graph))]
nodeset = set(range(N))
while True: # start deleting
for i in xrange(batchsize):
d = GetMxDegNId_PUNGraph(graph)
graph.DelNode(d)
nodeset.remove(d)
del_nodes += batchsize
lcc = float(GetMxWccSz_PUNGraph(graph))
X.append((100.0*del_nodes)/N)
Y.append(lcc)
if del_nodes >= stopN: break
return X,Y
# Plots of fraction in largest connected component vs. percent deleted nodes
X = N/100
Y = 50
plots(X,Y,"Percent of deleted nodes","Fraction of nodes in LCC","Q4.2)X=N/100,Y=50",
failure2,attack2)
|
[
"plusbzz@gmail.com"
] |
plusbzz@gmail.com
|
1864abd09c45d30c777b5127b78b028f192c006a
|
ce3f2b03f38076b75544ab901662e6aeda35d97a
|
/manage.py
|
0f521b7d0f5bf1caf2215a08b13f9ff161682059
|
[] |
no_license
|
DivingCats/reflask
|
98799b7f693101a211152701cace06ef627233f3
|
1be5c61f3cf48b4e6e6a15fee56930f8166d3cd6
|
refs/heads/master
| 2022-12-09T22:13:58.644735
| 2020-02-08T09:56:14
| 2020-02-08T09:56:14
| 230,551,305
| 0
| 0
| null | 2022-12-08T03:34:28
| 2019-12-28T03:11:44
|
Python
|
UTF-8
|
Python
| false
| false
| 610
|
py
|
# -*- coding: utf-8 -*-
# @Time : 2020/2/5 20:55
# @Author : DivingKitten
# @File : manage.py
# @Software: PyCharm
# @Desc : 启动脚本
from app import create_app
from flask_script import Manager, Shell
app = create_app('default')
manager = Manager(app)
def make_shell_context():
return dict(app=app)
manager.add_command("shell", Shell(make_context=make_shell_context))
@manager.command
def test():
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
if __name__ == '__main__':
# app.run()
manager.run()
|
[
"Unility@163.com"
] |
Unility@163.com
|
235950d4728104e2a077b449c15418d2a6e7154c
|
057554afbdfec2f8689a999a15ba0848c620ab4f
|
/find_available_room.py
|
d3ca3e14301c511f7ba7b3c8037a7f21148694ab
|
[
"Apache-2.0"
] |
permissive
|
cnaert/roomfinder2
|
fe40c2728d19c92688ef4b86699db660f36fefb8
|
75040a2842058334fb5dfa9d12491e321bc88b43
|
refs/heads/master
| 2021-01-19T21:59:25.368371
| 2017-04-19T11:13:33
| 2017-04-19T11:13:33
| 88,688,826
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,600
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import subprocess
import getpass
from string import Template
import xml.etree.ElementTree as ET
import csv, codecs
import argparse
import datetime
now = datetime.datetime.now().replace(microsecond=0)
starttime_default = now.isoformat()
end_time_default = None
parser = argparse.ArgumentParser()
parser.add_argument("-url","--url", help="url for exhange server, e.g. 'https://mail.domain.com/ews/exchange.asmx'.",required=True)
parser.add_argument("-u","--user", help="user name for exchange/outlook",required=True)
parser.add_argument("-p","--password", help="password for exchange/outlook", required=True)
parser.add_argument("-start","--starttime", help="Starttime e.g. 2014-07-02T11:00:00 (default = now)", default=starttime_default)
parser.add_argument("-end","--endtime", help="Endtime e.g. 2014-07-02T12:00:00 (default = now+1h)", default=end_time_default)
#parser.add_argument("-n","--now", help="Will set starttime to now and endtime to now+1h", action="store_true")
parser.add_argument("-f","--file", help="csv filename with rooms to check (default=favorites.csv). Format: Name,email",default="favorites.csv")
args=parser.parse_args()
url = args.url
reader = csv.reader(codecs.open(args.file, 'r', encoding='utf-8'))
start_time = args.starttime
if not args.endtime:
start = datetime.datetime.strptime( start_time, "%Y-%m-%dT%H:%M:%S" )
end_time = (start + datetime.timedelta(hours=1)).isoformat()
else:
end_time = args.endtime
user = args.user
password = args.password
print "Searching for a room from " + start_time + " to " + end_time + ":"
print "{0:10s} {1:25s} {2:40s} {3:10s} {4:10s} {5:10s} {6:50s}".format("Status", "Room", "Email", "Level", "Zone", "Seats", "Description")
xml_template = open("getavailibility_template.xml", "r").read()
xml = Template(xml_template)
for room in reader:
data = unicode(xml.substitute(email=room[1],starttime=start_time,endtime=end_time))
header = "\"content-type: text/xml;charset=utf-8\""
command = "curl --silent --header " + header +" --data '" + data + "' --ntlm -u "+ user+":"+password+" "+ url
response = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True).communicate()[0]
tree = ET.fromstring(response)
status = "Free"
# arrgh, namespaces!!
elems=tree.findall(".//{http://schemas.microsoft.com/exchange/services/2006/types}BusyType")
for elem in elems:
status=elem.text
print "{0:10s} {1:25s} {2:40s} {3:10s} {4:10s} {5:10s} {6:50s}".format(status, room[0], room[1], room[2], room[3], room[4], room[5] )
|
[
"cyrille.naert@dimensiondata.com"
] |
cyrille.naert@dimensiondata.com
|
f45604bd7b04946b6c72a23f38771783457a5ae7
|
9998ff1d80a5442970ffdc0b2dd343e3cab30ee8
|
/fiaqm.py
|
8d1e36ec5d2bdf3d45320c614cf86650b2282341
|
[
"MIT"
] |
permissive
|
cgomezsu/FIAQM
|
9baac1a9410a6ad19e67fff024a9ff15f24df70c
|
da44e370f40e573233a148414229359e7782ad0c
|
refs/heads/main
| 2023-05-13T02:17:19.833979
| 2021-05-24T01:25:17
| 2021-05-24T01:25:17
| 301,475,198
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,902
|
py
|
"""
v.e.s.
Federated Congestion Predictor + Intelligent AQM
MIT License
Copyright (c) 2020 Cesar A. Gomez
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.link import TCLink
from mininet.log import info, setLogLevel
from mininet.cli import CLI
import numpy as np
import pandas as pd
import time, random, os
import substring as ss
import tuner as tnr
random.seed(7) # For reproducibility
class CreateTopo(Topo):
def build(self, n):
bra = self.addSwitch('bra1') # Border Router A
brb = self.addSwitch('brb1') # Border Router B
ixp = self.addSwitch('ixp1') # IXP switch
vs = self.addSwitch('vs1') # Virtual switch to emulate a tunnel connection between Learning Orchestrator and Local Learners
self.addLink(bra, ixp, bw=1000, delay='2ms') # Link between bra-eth1 and ixp-eth1
self.addLink(ixp, brb, bw=1000, delay='2ms') # Link between ixp-eth2 and brb-eth1
# Creation of hosts connected to Border Routers in each domain
for j in range(n):
BW = random.randint(250,500)
d = str(random.randint(2,10))+'ms' # Each host has a random propagation delay (between 2 and 10 ms) on its link connected to the corresponding router
ha = self.addHost('a%s' % (j+1), ip='10.10.0.%s' % (j+1))
self.addLink(ha, bra, bw=BW, delay=d)
BW = random.randint(250,500) # Random BW to limit rate on each interface
d = str(random.randint(2,10))+'ms'
hb = self.addHost('b%s' % (j+1), ip='10.11.0.%s' % (j+1))
self.addLink(hb, brb, bw=BW, delay=d)
hlla = self.addHost('lla1', ip='10.10.11.11') # Host acting as the Local Learner A
self.addLink(hlla, vs, bw=100)#, delay='2ms')
hllb = self.addHost('llb1', ip='10.10.11.12') # Host acting as the Local Learner B
self.addLink(hllb, vs, bw=100)#, delay='2ms')
hlo = self.addHost('lo1', ip='10.10.10.10') # Host acting as the Learning Orchestrator
self.addLink(hlo, vs, bw=100)#, delay='2ms')
ma = self.addHost('ma1', ip='10.0.0.10') # There are two monitor hosts for probing
self.addLink(ma, bra, bw=1000) # The BW of the monitor hosts are the same as the inter-domain links
mb = self.addHost('mb1', ip='10.0.0.11')
self.addLink(mb, brb, bw=1000)
setLogLevel('info') # To show info messages
n = 20 # Number of network elements connected per border router
topo = CreateTopo(n)
net = Mininet(topo, link=TCLink, autoSetMacs=True) # We use Traffic Control links
info('\n*** Starting network\n')
net.start()
# Creating network devices from topology
lo1 = net['lo1']
lla1 = net['lla1']
llb1 = net['llb1']
bra1 = net['bra1']
ixp1 = net['ixp1']
brb1 = net['brb1']
ma1 = net['ma1']
mb1 = net['mb1']
# AQM configuration for link between IXP switch and Border Router A
ixp1.cmd('tc qdisc del dev ixp1-eth1 root') # Clear current qdisc
ixp1.cmd('tc qdisc add dev ixp1-eth1 root handle 1:0 htb default 1') # Set the name of the root as 1:, for future references. The default class is 1
ixp1.cmd('tc class add dev ixp1-eth1 classid 1:1 htb rate 1000mbit') # Create class 1:1 as direct descendant of root (the parent is 1:)
ixp1.cmd('tc qdisc add dev ixp1-eth1 parent 1:1 handle 10:1 fq_codel limit 1000 target 50ms interval 1000ms noecn') # Create qdisc with ID (handle) 10 of class 1. Its parent class is 1:1. Queue size limited to 1000 pkts
# AQM configuration for link between Border Router B and IXP switch. This will be the IAQM
aqm_target = 5000
aqm_interval = 100 # Initial parameters
brb1.cmd('tc qdisc del dev brb1-eth1 root')
brb1.cmd('tc qdisc add dev brb1-eth1 root handle 1:0 htb default 1')
brb1.cmd('tc class add dev brb1-eth1 classid 1:1 htb rate 1000mbit')
brb1.cmd('tc qdisc add dev brb1-eth1 parent 1:1 handle 10:1 fq_codel limit 1000 target {}us interval {}ms noecn'.format(aqm_target,aqm_interval)) # Both target and interval of this queue will be set dynamically as the emulation runs
info('\n*** Setting up AQM for intra-domain link buffers at Border Router\n')
a = [0 for j in range(n)] # List initialization for hosts A
b = [0 for j in range(n)] # List initialization for hosts B
for j in range(n):
# Changing the queue discipline on interfaces connected to Border Router A
BW = random.randint(250,500) # Random BW to limit rate on each interface
bra1.cmd('tc qdisc del dev bra1-eth{} root'.format(j+2))
bra1.cmd('tc qdisc add dev bra1-eth{} root handle 1:0 htb default 1'.format(j+2))
bra1.cmd('tc class add dev bra1-eth{} classid 1:1 htb rate {}mbit'.format(j+2,BW))
bra1.cmd('tc qdisc add dev bra1-eth{} parent 1:1 handle 10:1 fq_codel limit 1000 target 2000us interval 40ms noecn'.format(j+2))
time.sleep(3) # Wait a moment while the AQM is configured
a[j] = net['a%s' % (j+1)] # Creating net devices from topology
b[j] = net['b%s' % (j+1)]
time.sleep(5)
info('\n*** Testing connectivity...\n')
for j in range(n):
net.ping(hosts=[a[j],b[j]])
net.ping(hosts=[lla1,lo1])
net.ping(hosts=[llb1,lo1])
net.ping(hosts=[ma1,mb1])
info('\n*** Starting AQM stat captures...\n')
bra1.cmd('bash ~/stat_bra.sh') # Bash script to capture AQM stats at Border Router A
ixp1.cmd('bash ~/stat_ixp.sh &') # Bash script to capture AQM stats at the IXP switch
brb1.cmd('bash ~/stat_brb.sh &') # Bash script to capture AQM stats at Border Router B
info('\n*** Starting RRUL traffic between pairs...\n')
for j in range(n):
a[j].cmd('netserver &') # Start server on domain-A hosts for RRUL tests
l = random.randint(300,900) # Random length for each RRUL test
b[j].cmd('bash rrul.sh 10.10.0.{} {} &'.format(j+1,l)) # Start RRUL tests on domain-B hosts
time.sleep(20) # Waiting time while all RRUL tests start
info('\n*** Capturing AQM stats for initial training...\n')
time.sleep(150) # Waiting time for the getting initial training samples
# If emulation gets stuck in the first period, it's porbably because there's not enough traffic samples to train the model. Leave a longer time
info('\n*** Starting Federated Congestion Predictor process...\n')
lo1.cmd('mkdir ~/LO') # Directory to store parameter files of Learning Orchestrator
lo1.cmd('mkdir ~/LO/Predictions') # Directory to store predictions of Learning Orchestrator
lla1.cmd('mkdir ~/LLA') # Directory to store files of Local Learner A
llb1.cmd('mkdir ~/LLB') # Directory to store files of Local Learner B
# Start SCP server on Learning Orchestrator
lo1.cmd('/usr/sbin/sshd -p 54321')
# Start learning process on Learning Orchestrator
lo1.cmd('source PyTorch/bin/activate') # Activate virtual environment where PyTorch is installed
lo1.cmd('python lo_train.py &')
lo1.cmd('python lo_predict.py &')
# Start learning process on Local Learner A
lla1.cmd('source PyTorch/bin/activate')
lla1.cmd('python ll_train.py &')
# Receive predictions
periods = 300 # Number of periods (of aprox 2 secs) to run the emulation
factor = 1000
S = 100 # Number of states: discrete levels of congestion [0, 100] in a period of 2 s
A = np.arange(1100, 11100, 100) # Set of actions: set value of target parameter in us
epsilon = 0.5
s_curr = random.randint(0,S) # Random initialization of the first observed state for tuning
ind_action = len(A)-1 # Initial action for tuning: max FQ-CoDel target considered (7.5 ms)
cong_pred_max = 1e-6
hist_pred = np.zeros(periods)
hist_local = np.zeros(periods)
hist_r = np.zeros(periods)
hist_rtt = np.ones(periods)*8
hist_tput = np.ones(periods)*100
ma1.cmd('iperf -s &') # Iperf server on Monitor A to measure throughput
t0 = time.time() # To take time of all periods
preds_file = '/home/ubuntu/LLB/dr_est.npy'
for i in range(periods):
# Measure RTT and throughput in 1 sec
mb1.cmd('iperf -c 10.0.0.10 -i 0.1 -t 1 | tail -1 > ~/LLB/tput.out &')
mb1.cmd('ping 10.0.0.10 -i 0.1 -w 1 -q | tail -1 > ~/LLB/ping.out')
print("*** Period",i)
print("+++ Configured target and interval parameters:",aqm_target,"us",aqm_interval,"ms")
# Load received file with predictions, sent by the Learning Orchestrator
while not os.path.isfile(preds_file):
time.sleep(0)
while not os.path.getsize(preds_file) >= 928: # To make sure that the file is not empty and still being transferred (file size with 100 predictions)
time.sleep(0)
dr_est = np.load(preds_file)
# Some signal rearrangements of the predictions
dr_est = dr_est*factor
dr_est = dr_est-dr_est.mean()
dr_est = np.abs(dr_est)
dr_est[dr_est > 1] = 1 # Avoid any possible outlier after rearranging
hist_pred[i] = dr_est.mean()
# Discretize values of predictions
if hist_pred[i] > cong_pred_max:
cong_pred_max = hist_pred[i] # Stores the max value of predicted congestion
s_next = int((hist_pred[i]/cong_pred_max)*S-1)
print("+++ Predicted level of congestion ahead:",s_next+1)
time.sleep(1) # The duration of each period is this waiting time + ping time
statinfo = os.stat('/home/ubuntu/LLB/ping.out')
if statinfo.st_size < 10:
mRTT = hist_rtt.max() # If no ping response was gotten, take the maximum in the records (worst case)
print ('>>> No mRTT response. Taking maximum known: %.3f' % mRTT, 'ms')
else:
din = open('/home/ubuntu/LLB/ping.out').readlines()
slice = ss.substringByInd(din[0],26,39)
text = (slice.split('/'))
mRTT = float(text[1])
print ('>>> mRTT: %.3f' % mRTT, 'ms')
hist_rtt[i] = mRTT
statinfo = os.stat('/home/ubuntu/LLB/tput.out')
if statinfo.st_size < 10:
tput = hist_tput.min() # If no tput response was gotten, take the minimum in the records (worst case)
print ('>>> No Tput response. Taking minimum known: %.3f' % tput, 'Mbps')
else:
din = open('/home/ubuntu/LLB/tput.out').readlines()
tput = float(ss.substringByInd(din[0],34,37))
unit = ss.substringByInd(din[0],39,39)
if unit == 'K':
tput = tput*0.001
print ('>>> Tput: %.3f' % tput, 'Mbps')
hist_tput[i] = tput
hist_r[i] = tput/mRTT
R = hist_r[i] # Reward is based on power function
print ('>>> Power: %.2f' % R)
if i >= 75: # Start the AQM tuning process after this period
# Update Q-values
Q = tnr.update(s_curr, ind_action, R, s_next)
s_curr = s_next
# Select action for next iteration
ind_action = tnr.action(s_curr, epsilon)
aqm_target = A[ind_action] # Select a FQ-CoDel target
aqm_interval = int(aqm_target/(0.05*1000)) # Select a FQ-CoDel interval. Tipycally, target is 5% of interval
brb1.cmd('tc qdisc change dev brb1-eth1 parent 1:1 handle 10:1 fq_codel limit 1000 target {}us interval {}ms noecn'.format(aqm_target,aqm_interval)) # Change the AQM parameters at Border Router B
print("*** Total wall time: ", time.time() - t0, " seconds")
np.save('/home/ubuntu/hist_pred.npy',hist_pred)
np.save('/home/ubuntu/hist_power.npy',hist_r)
np.save('/home/ubuntu/q-table.npy',Q)
#CLI(net) # Uncomment this line to explore the temporary files before exiting Mininet
info('*** Deleting temporary files...\n')
lo1.cmd('rm -r ~/LO')
lla1.cmd('rm -r ~/LLA')
llb1.cmd('rm -r ~/LLB')
info('*** Experiment finished!\n')
net.stop()
|
[
"noreply@github.com"
] |
cgomezsu.noreply@github.com
|
6d39bfe89b71cb0c05cdf5b5824bde77c2647498
|
fca3fe7557c00a379e90cda8016a8719ca57fe28
|
/jexp/tests.py
|
32a07ef74a77e0e252e4893441888e74bb850e8b
|
[
"BSD-3-Clause"
] |
permissive
|
mhluongo/jexp
|
d8a4db0a2d4f0f5f70471c2e36ecc22c8835b73e
|
e23b375c00bb62cab9671bc76250023125b4e60f
|
refs/heads/master
| 2021-01-13T01:15:08.980614
| 2011-09-02T00:11:13
| 2011-09-02T00:11:13
| 2,158,299
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 279
|
py
|
from nose.tools import eq_
from jexp import J
a = J('a')
b = J('b')
#logical tests
def test_or():
eq_(str(a | b), '(a||b)')
def test_and():
eq_(str(a & b), '(a&&b)')
def test_inv():
eq_(str(~a), '(!a)')
#math tests
def test_negate():
eq_(str(-a), '(-a)')
|
[
"mhluongo@gmail.com"
] |
mhluongo@gmail.com
|
25bd69a2f916412574ed02402bb69fe8bb639fc1
|
a1bfa15fdb28c2eb4f46c6a694dd310e0a174846
|
/jpg2mojo.py
|
c128a00be0cb268eea657c795fa607d5b2657c2a
|
[] |
no_license
|
Rhoana/MojoToolkit
|
2971f6634adbcf40a5b8658b29de7fb6215498c2
|
c64e6d0c266dbb61105a8cadda16db7a2f76e0eb
|
refs/heads/master
| 2020-12-21T12:06:05.149710
| 2017-09-18T16:42:31
| 2017-09-18T16:42:31
| 73,499,035
| 0
| 1
| null | 2016-11-11T17:49:48
| 2016-11-11T17:49:47
| null |
UTF-8
|
Python
| false
| false
| 2,135
|
py
|
#!/usr/bin/python
import os
import cv2
import glob
import argparse
import numpy as np
from toMojo.np2imgo import Imgo
from toMojo.np2sego import Sego
help = {
'out': 'output mojo parent (default mojo)',
'jpg2mojo': 'Stack all jpgs into a mojo folder!',
'jpgs': 'input folder with all jpgs (default jpgs)',
't': 'datatype for output file (default uint8)',
'c': '-c enables -t uint32 (and default -o bgr)',
'o': 'Little Endian channel order as rgba,bgr (default none)',
}
paths = {}
stack = {}
rgba = {
'r': 0,
'g': 1,
'b': 2,
'a': 3
}
parser = argparse.ArgumentParser(description=help['jpg2mojo'])
parser.add_argument('-t', metavar='string', default='uint8', help=help['t'])
parser.add_argument('-o', metavar='string', default='', help=help['o'])
parser.add_argument('jpgs', default='jpgs', nargs='?', help=help['jpgs'])
parser.add_argument('out', default='mojo', nargs='?', help=help['out'])
parser.add_argument('-c', help=help['c'], action='store_true')
# attain all arguments
args = vars(parser.parse_args())
for key in ['jpgs', 'out']:
paths[key] = os.path.realpath(os.path.expanduser(args[key]))
[order, color, dtype] = [args['o'], args['c'], args['t']]
# Set color datatype
if color:
dtype = 'uint32'
order = order or 'bgr'
dtype = getattr(np,dtype)
# read all jpgs in jpgs folder
search = os.path.join(paths['jpgs'],'*.jpg')
stack = sorted(glob.glob(search))
# Size input files
sliceShape = cv2.imread(stack[0], 0).shape
shape = (len(stack),) + sliceShape
# Open an output file
outfile = Imgo(paths['out'])
if order:
outfile = Sego(paths['out'])
# Add each jpg file as a slice
for zi, file in enumerate(stack):
written = np.zeros(sliceShape,dtype=dtype)
if not order:
written = cv2.imread(file, 0).astype(dtype)
else:
# pixel to integer
volume = cv2.imread(file)
for ci, char in enumerate(order):
colorbyte = volume[:, :, rgba[char]] * (256 ** ci)
written = written + colorbyte
# Write as image or segmentation
outfile.run(written,zi)
# Write metadata to ouput file
outfile.save(shape)
|
[
"thejohnhoffer@coxgpu04.rc.fas.harvard.edu"
] |
thejohnhoffer@coxgpu04.rc.fas.harvard.edu
|
808213727226448e77ae3540979e0a54ba99ac8c
|
29d6101cc76550b3dbb47e885a6c160f46551bc1
|
/test
|
61dbb6fd4297fef87fa9d79ea8c095ff1b07c43c
|
[] |
no_license
|
awesomebjt/lpic-self-tests
|
b7dcc4062550b6ec06ef20ecb3c31c976ce46b32
|
bd772b1e25549e96caf02671b882212a3ab0cc13
|
refs/heads/master
| 2023-07-12T02:42:21.945693
| 2020-08-27T02:13:42
| 2020-08-27T02:13:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 558
|
#!/usr/bin/python
import json
from random import randint
import sys
try:
content = json.loads(open(sys.argv[1],'r').read())
except Exception as e:
print("Failed to load self-test. Did you provide the right file name as the first argument?")
raise e
total = 0
correct = 0
while len(content) > 0:
q = content.pop(randint(0,len(content)-1))
total += 1
print(q['Q'])
a=input("# ")
if a == q['A']:
correct += 1
print("Total: {}\tCorrect: {}\tGrade: {}%".format(
total,
correct,
int((correct/total)*100)))
|
[
"bjt@rabidquill.com"
] |
bjt@rabidquill.com
|
|
0a4d9c5a7f1c40f757a2731830ff440ee111ecd1
|
03b7430a0fbba63c0d5712ed6539caa807cb8d05
|
/problem1e.py
|
920b116d89aa792337950f565febeb39b2f85035
|
[
"MIT"
] |
permissive
|
yasserglez/monkeys-typing
|
7e0dcecfea3bc7401fbd8d229180b8277dc0b6e0
|
c7d15802cedd29dc0a194612d3777a656cb43caa
|
refs/heads/master
| 2020-04-06T04:31:39.337137
| 2015-08-08T18:23:37
| 2015-08-08T18:23:37
| 18,028,427
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 313
|
py
|
from monkeys import compute_freq_tab, write_freq_tab
freq_tab_2nd_order = compute_freq_tab(2, 'books/agnes_grey.txt')
write_freq_tab(freq_tab_2nd_order, 'agnes_grey_2nd_order.json')
freq_tab_3rd_order = compute_freq_tab(3, 'books/agnes_grey.txt')
write_freq_tab(freq_tab_3rd_order, 'agnes_grey_3rd_order.json')
|
[
"ygonzalezfernandez@gmail.com"
] |
ygonzalezfernandez@gmail.com
|
9524b3b82893f96b5401f320e1e5ef79be1a59ef
|
523db0f90e8069311705173cfcfdfb2c4417ae44
|
/06_port_scan/w8ay.py
|
a95691477badbd9c9fae4083fb5c2c06b3319282
|
[] |
no_license
|
ClayAndMore/py_scanner
|
b351fbc23fdc2d797fcc527472561333423d44f7
|
b21c1ae9ae8d9a6dc32841ec62bcd7cc40e0531f
|
refs/heads/master
| 2020-06-05T13:20:27.787920
| 2019-06-19T02:45:15
| 2019-06-19T02:45:15
| 192,449,157
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 601
|
py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
Name:w8ayScan
Author:w8ay
Copyright (c) 2017
'''
import sys
from lib.core.Spider import SpiderMain
from lib.core import webcms, common, PortScan
reload(sys)
sys.setdefaultencoding('utf-8')
def main():
root = "https://shiyanlou.com"
threadNum = 10
ip = common.gethostbyname(root)
print "IP:",ip
print "Start Port Scan:"
pp = PortScan.PortScan(ip)
pp.work()
#webcms
ww = webcms.webcms(root,threadNum)
ww.run()
#spider
w8 = SpiderMain(root,threadNum)
w8.craw()
if __name__ == '__main__':
main()
|
[
"wangyu1@antiy.com"
] |
wangyu1@antiy.com
|
2c2aebeebd8ad4a79fc47d44907d6a0fd9cdc88d
|
f68c7045d39039bcc58b8d096aca7edf433429ca
|
/env/bin/easy_install
|
2a206bc40153a884b403668cb00e8f28646c0b1c
|
[
"MIT"
] |
permissive
|
kelvin-daniel/instagram
|
beca157eb4eb1130ebd86825a9f99d96b903da02
|
2ede5319266f4312a9440d4985d098bc7545c2ae
|
refs/heads/master
| 2022-12-30T17:31:37.451798
| 2020-10-26T09:02:55
| 2020-10-26T09:02:55
| 304,535,375
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 283
|
#!/home/kevon/Documents/moringa_school/Django/instagram/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"kaymutor@gmail.com"
] |
kaymutor@gmail.com
|
|
35509fe6b955bd2603e79013b82691a6ac50a9c7
|
1fa21cd2c288a9f87295631e10f747fe075a1502
|
/Trip Planner APIs/trip.py
|
fe94241adebdc4d7399310e515b6760df7830685
|
[] |
no_license
|
bmrn/TfNSW_APIs
|
4bc22e800796f848ff5f1ced2c04dd56a0666472
|
b4cbe176ce811698739b5fd33517fb36edbfa68d
|
refs/heads/master
| 2021-01-19T22:55:33.722331
| 2017-04-28T06:33:23
| 2017-04-28T06:33:23
| 88,893,404
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,853
|
py
|
from urllib.parse import urlencode
import requests
import json
import tssetup
from pprint import pprint
api_key = tssetup.getKey()
base_url = "https://api.transport.nsw.gov.au/v1/tp/"
query_type = "trip?"
#initialise query param dictionary
qdict = {}
#add parameters
qdict["outputFormat"] = "rapidJSON"
qdict["coordOutputFormat"] = "EPSG:4326"
qdict["depArrMacro"] = "dep" #dep after or arr before
qdict["itdDate"] = "20170707"
qdict["itdTime"] = "1200"
qdict["type_origin"] = "any"
qdict["name_origin"] = "10101331" #get location/stop id from stop_finder.py
qdict["type_destination"] = "any"
qdict["name_destination"] = "10102027"
qdict["calcNumberOfTrips"] = 5
qdict["wheelchair"] = "" #or "on"
qdict["TfNSWSF"] = "true"
qdict["version"] = "10.2.1.15"
#encode params as querystring
qstring = urlencode(qdict)
#buildurl
urlsend = base_url + query_type + qstring
print(urlsend)
#get authentication
headers = {'Authorization': 'apikey ' + api_key, 'Accept': 'application/json'}
response = requests.get(urlsend, headers=headers)
#decode response and convert to JSON format
respdict = json.loads(response.content.decode('utf-8'))
#simple example to look at data
for x in range(len(respdict["journeys"])):
print("********* TRIP " + str(x+1) + " *********")
for y in range(len(respdict["journeys"][x]["legs"])):
print("LEG " + str(y+1) + "")
print("Duration " + str(respdict["journeys"][x]["legs"][y]["duration"]/60) + " mins", end="\n")
print(respdict["journeys"][x]["legs"][y]["origin"]["departureTimeEstimated"], end="\t")
print(respdict["journeys"][x]["legs"][y]["origin"]["name"], end="\n")
print(respdict["journeys"][x]["legs"][y]["destination"]["arrivalTimeEstimated"], end="\t")
print(respdict["journeys"][x]["legs"][y]["destination"]["name"], end="\n")
print("\t\t")
|
[
"ausben@gmail.com"
] |
ausben@gmail.com
|
fe2dc08589eec0c27d13129f015869399ee3dae0
|
4bb72ba6ee6ed3ad887b799b27434946a92ff9d2
|
/algo/CryptoSystem.py
|
07c2ccc7e3f2ab9244ec99bd40722b77700c684c
|
[] |
no_license
|
Libi92/ECCBDD
|
33de3d9b2a91d671304f3e5bc6b134e7046d55f8
|
baa7b2c9177c6110e1cfa57bea6c936b30a4985a
|
refs/heads/master
| 2020-03-22T10:04:52.317899
| 2018-07-08T07:24:56
| 2018-07-08T07:24:56
| 139,879,602
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,516
|
py
|
import datetime
from functools import reduce
from algo import curve
from algo.ecc import EC
from algo.logger import Logger
from algo.point import Point
PXY_MATRIX_SIZE = 5
class CryptoSystem:
def __init__(self, g, ec):
self.g = g
self.ec = ec
def bit_invert(self, b):
inv = map(lambda x: '0' if x == '1' else '1', b)
return reduce(lambda x, y: x + y, inv)
def constructPxPyMetrix(self, decimal_list):
pxy_list = []
list_5 = []
for i in range(len(decimal_list)):
if i != 0 and i % 5 == 0:
pxy_list.append(list_5)
list_5 = []
py = i
px = decimal_list[i] + i
list_5.append(Point(px, py))
pxy_list.append(list_5)
return pxy_list
def get_gMatrix(self):
return [self.ec.mul(self.g, i) for i in range(1, 6)]
def add(self, a, b):
return [self.ec.add(m, n) for m, n in zip(a, b)]
def sub(self, a, b):
return [self.ec.sub(m, n) for m, n in zip(a, b)]
def matrixShiftAdd(self, a_list, b):
c_list = []
for a in a_list:
c = self.add(a, b)
b.append(b.pop(0))
c_list.append(c)
return c_list
def matrixShiftSub(self, a_list, b):
c_list = []
for a in a_list:
c = self.sub(a, b)
b.append(b.pop(0))
c_list.append(c)
return c_list
def print_matrix(self, matrix):
for x in matrix:
Logger.log(str(x.x) + ', ' + str(x.y))
def extractPx(self, pxy_list):
extracted = []
for list_5 in pxy_list:
ext = map(lambda p: Point(p.x - p.y, p.y), list_5)
extracted.append(list(ext))
return extracted
def encode(self, message):
start_time = datetime.datetime.now().microsecond
eq_ascii = [ord(x) for x in message]
Logger.log('ascii: ', eq_ascii)
bin_array = [format(x, '08b') for x in eq_ascii]
num_append = len(bin_array) % PXY_MATRIX_SIZE
if num_append != 0:
num_append = PXY_MATRIX_SIZE - num_append
for i in range(num_append):
bin_array.append(format(0, '08b'))
Logger.log('binary: ', bin_array)
inv_array = [self.bit_invert(b) for b in bin_array]
Logger.log('inverse binary: ', inv_array)
decimal_arr = [int(x, 2) for x in inv_array]
Logger.log('decimal: ', decimal_arr)
pxy_matrix = self.constructPxPyMetrix(decimal_arr)
Logger.log('PxPy (5x2)matrix: ', pxy_matrix)
g_matrix = self.get_gMatrix()
Logger.log('(5x2)g matrix: ')
self.print_matrix(g_matrix)
mapped_list = self.matrixShiftAdd(pxy_matrix, g_matrix)
Logger.log('encoded matrix: ')
for x in mapped_list: self.print_matrix(x)
end_time = datetime.datetime.now().microsecond
execution_time = end_time - start_time
Logger.log("Encoding time: {} μs".format(execution_time))
return mapped_list
def decode(self, encoded_list):
start_time = datetime.datetime.now().microsecond
g_matrix = self.get_gMatrix()
subs_matrix = self.matrixShiftSub(encoded_list, g_matrix)
Logger.log('Subtracted Matrix: ')
for x in subs_matrix: self.print_matrix(x)
extracted = self.extractPx(subs_matrix)
Logger.log('Px Extracted: ')
for x in extracted: self.print_matrix(x)
temp = []
for x in extracted: temp.extend(x)
extracted = temp
bin_array = [self.frmt(x) for x in extracted]
Logger.log(bin_array)
inv_bits = [self.bit_invert(b) for b in bin_array]
decimal_arr = [int(x, 2) for x in inv_bits]
Logger.log(decimal_arr)
chars = [chr(d) for d in decimal_arr]
plain_text = reduce(lambda x, y: x + y, chars)
end_time = datetime.datetime.now().microsecond
execution_time = end_time - start_time
Logger.log("Decoding time: {} μs".format(execution_time))
return plain_text
def frmt(self, X):
Logger.log(X, display=True)
return format(int(X.x), '08b')
if __name__ == '__main__':
plain_text = input("Enter your message: ")
curve = curve.P256
g = Point(curve.gy, curve.gy)
ec = EC(curve.a, curve.b, curve.p)
crypto = CryptoSystem(g, ec)
encoded = crypto.encode(plain_text)
decoded = crypto.decode(encoded)
print(decoded)
|
[
"libinbabup@hotmail.com"
] |
libinbabup@hotmail.com
|
0cca7a3e106b4584b3b916276cb3e704eb75122f
|
e66efe2baf16c48398087e1e2322ae4e8e77b5f1
|
/deepbrain/train/segmenter/count_frequency.py
|
858fb7942997b6c83726df7342a20dcfe6b2a46e
|
[
"MIT"
] |
permissive
|
codewithkaranjeswani/deepbrain
|
b43e72e95c185dd96ec78f92f42afd7741fac75c
|
ac16db831ba0fb213c08b4449657f5895b136324
|
refs/heads/master
| 2022-11-25T11:12:41.954520
| 2020-08-03T15:52:48
| 2020-08-03T15:52:48
| 284,741,744
| 0
| 0
|
MIT
| 2020-08-03T15:50:43
| 2020-08-03T15:50:43
| null |
UTF-8
|
Python
| false
| false
| 1,581
|
py
|
# Count class frequency to deal with unbalance
import tensorflow as tf
import os
import nibabel as nib
import numpy as np
import random
import re
from skimage.transform import resize
from pathlib import Path
from const import *
# CSF: 1, 2, 23, 24, 0, 18 -> 1
# WM: 16, 17 -> 2
# GM: Rest -> 3
# Brain Stem: 7 -> 4
# Cerebellum WM: 12, 13 -> 5
# Cerebellum GM: 10, 11, 36, 37, 38 -> 6
def shrink_labels(labels):
labels[np.isin(labels, [1,2,23,24,0,18])] = 1
labels[np.isin(labels, [16,17])] = 2
labels[~np.isin(labels, [1,2,23,24,0,18,16,17,7,12,13,10,11,36,37,38])] = 3
labels[np.isin(labels, [7])] = 4
labels[np.isin(labels, [12,13])] = 5
labels[np.isin(labels, [10,11,36,37,38])] = 6
return labels
def run():
_dir = ADNI_DATASET_DIR
labels = Path(os.path.join(_dir, "masks", "malpem"))
brains = Path(os.path.join(_dir, "masks", "brain_masks"))
ret = {}
index = 0
for each in os.listdir(labels):
aux = each[7:]
p = labels / each
b = brains / aux
img = nib.load(str(p))
brain = (nib.load(str(b)).get_fdata().squeeze()) == 1
x = img.get_fdata()
x = x.astype(np.uint8).squeeze()
assert x.shape == brain.shape
x = x[brain]
x = shrink_labels(x)
y = np.bincount(x)
ii = np.nonzero(y)[0]
index +=1
if index % 100 == 0:
print("Processed {}".format(index))
for k, v in zip(ii,y[ii]):
ret[k] = ret.get(k, 0) + v
print(ret)
if __name__ == "__main__":
run()
|
[
"i.itzcovich@gmail.com"
] |
i.itzcovich@gmail.com
|
59accba5a656d5b413c7c3ad528bee9b9a83ad95
|
9025c27655e2f150d01e64ce0826df8166ac6813
|
/core/urls.py
|
a1c84250501f6e331d1daaab5d0a66f5b2db6bbf
|
[] |
no_license
|
kairat3/bella-plain
|
02dd219f6bf087c99772490a32d61cd242a18f28
|
1950fd46dc53b800461f6077af3044bdfcf8300c
|
refs/heads/master
| 2023-07-13T05:06:17.575811
| 2021-08-19T14:05:29
| 2021-08-19T14:05:29
| 393,064,884
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,452
|
py
|
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from product.views import ProductApiView
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
schema_view = get_schema_view(
openapi.Info(
title="Bella API",
default_version='v1',
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="sunerisestudiopro@gmail.com"),
license=openapi.License(name="BSD License"),
),
public=True,
permission_classes=(permissions.AllowAny,),
)
router = DefaultRouter()
router.register('products', ProductApiView)
urlpatterns = [
path('', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('docs/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
path('', include('account.urls')),
path('', include('product.urls')),
path('admin/', admin.site.urls),
path('', include(router.urls)),
path('', include('info.urls')),
path('', include('news.urls')),
path('', include('cart.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"jvckmiller@gmail.com"
] |
jvckmiller@gmail.com
|
c67c4e7d3e4988859508f7b101f56a874364ee59
|
1add9e012d8b61f17ca992f0c157ee0a5d1b7860
|
/env/bin/pyhtmlizer
|
5689466410d24f4b1c2200f96904416ace10fb6b
|
[] |
no_license
|
dims337/chat-app
|
3b0cd7dd4c2b22ba71219fba181ae35e91e7b3db
|
042594f1f5785feeb3a06f7c204fa726ae2b2352
|
refs/heads/master
| 2020-04-11T17:09:17.406649
| 2018-12-21T04:02:46
| 2018-12-21T04:02:46
| 161,949,636
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 259
|
#!/Users/dimsontenke/Desktop/chat_app/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from twisted.scripts.htmlizer import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run())
|
[
"dimsontenke@dimsons-MacBook-Pro.local"
] |
dimsontenke@dimsons-MacBook-Pro.local
|
|
fb997a333c66b348a4b2be51acc74d9b48a24bd6
|
47a211616a0fd746c1738aac8ab00cb758d6b057
|
/Flask/Scripts/rst2xml.py
|
04f24d4bafeabae2bcd193f648d3307ea9da0421
|
[] |
no_license
|
yan-egorov/flask_bro
|
49b8b43ae5d113fd776adda6a65214b334daf63b
|
5e233a5665c9948fc22d7d185c6d43b50b58fe5c
|
refs/heads/master
| 2021-01-18T20:30:05.722883
| 2015-06-10T07:40:45
| 2015-06-10T07:40:45
| 37,182,165
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 663
|
py
|
#!C:\Users\rusegy\Documents\Python Scripts\TrueStories\Flask\Scripts\python.exe
# $Id: rst2xml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing Docutils XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates Docutils-native XML from standalone '
'reStructuredText sources. ' + default_description)
publish_cmdline(writer_name='xml', description=description)
|
[
"egorov.yan@gmail.com"
] |
egorov.yan@gmail.com
|
3e9e5f2e2ab82da05314d2f3be4cd0c514ea872e
|
b10bae1f94f00404d8d21a563d7626486aeebe74
|
/users/models.py
|
ce385919d08af645716f84c4baa3931bbe72a02d
|
[] |
no_license
|
brotherchris65/newspaper
|
d93b91f618cc33c627f5e584fccd91bdcb6fb75c
|
7dd5f5c3e09fc38183684d9a30e01f5e1debc2ee
|
refs/heads/master
| 2021-09-28T14:20:35.043218
| 2019-12-11T01:42:05
| 2019-12-11T01:42:05
| 227,240,504
| 0
| 0
| null | 2021-09-22T18:10:49
| 2019-12-11T00:18:19
|
Python
|
UTF-8
|
Python
| false
| false
| 202
|
py
|
from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db import models
class CustomUser(AbstractUser):
age=models.PositiveIntegerField(null=True, blank=True)
|
[
"brotherchris65@gmail.com"
] |
brotherchris65@gmail.com
|
2cdced4eecc20d7bb193be9283df32bad706371b
|
841af9f674d910f1135b65a1e46f4a422d13a6db
|
/test_only_modified.py
|
6626cf6ccb89b554c250e63db1192603bc0e1db2
|
[] |
no_license
|
betterteam/InterSec_simulation
|
07e95215abbab4722226355261b9909c4a8d8308
|
bfccec4e58b0fe79313993cb6cad33ce53a7f672
|
refs/heads/master
| 2021-01-15T22:18:47.528359
| 2018-01-10T07:57:19
| 2018-01-10T07:57:19
| 99,894,685
| 3
| 0
| null | 2017-12-15T15:39:54
| 2017-08-10T07:30:28
|
Python
|
UTF-8
|
Python
| false
| false
| 23,851
|
py
|
# Final version of traffic signal
import sys
import numpy as np
from PyQt5.QtWidgets import QWidget, QApplication, QMainWindow, QFrame, QDesktopWidget
from PyQt5.QtGui import QPainter, QColor, QPen, QBrush
from PyQt5.QtCore import Qt, QTimer, QTime
import math
class Position:
def __init__(self, x=0, y=0):
self.x = x
self.y = y
class Speed:
def __init__(self, x=0, y=0):
self.x = x
self.y = y
class Size:
def __init__(self, x=5, y=10):
self.x = x
self.y = y
class Vehicle:
def __init__(self):
self._position = Position()
self._speed = Speed()
self._size = Size()
def setPosition(self, position):
self._position = position
def getPosition(self):
return self._position
def setSpeed(self, speed):
self._speed = speed
def getSpeed(self):
return self._speed
def setSize(self, size):
self._size = size
def getSize(self):
return self._size
def moveNext(self):
self._position.x += self._speed.x
self._position.y += self._speed.y
if self._position.x > 600:
self._position.x = 0
class Example(QWidget):
def __init__(self, vehicles_N, vehicles_W, vehicles_E):
super().__init__()
self.vehicles_N = vehicles_N
self.vehicles_W = vehicles_W
self.vehicles_E = vehicles_E
self.initUI()
self.timer = QTimer(self)
self.timer.timeout.connect(self.update)
self.timer.start(1000/60)#一秒間隔で更新
self.t = QTime()
self.t.start()
self.show()
def initUI(self):
self.setGeometry(300, 300, 600, 600)
self.setWindowTitle("Koku's Simulation")
self.ti = 0
self.beze_t = []
self.r = []
self.up_left_x = []
self.up_left_y = []
self.down_left_x = []
self.down_left_y = []
self.up_right_x = []
self.up_right_y = []
self.down_right_x = []
self.down_right_y = []
for i in range(10):
self.beze_t.append(0)
self.r.append(0)
self.up_left_x.append(0)
self.up_left_y.append(0)
self.down_left_x.append(0)
self.down_left_y.append(0)
self.up_right_x.append(0)
self.up_right_y.append(0)
self.down_right_x.append(0)
self.down_right_y.append(0)
self.single_0_0 = True
self.single_0_1 = True
self.collision_check = []
self.collision_check_N = []
self.collision_check_S = []
self.collision_check_W = []
self.collision_check_E = []
self.grid = {}
for i in range(270, 330, 10):
for j in range(270, 330, 10):
self.grid[(i, j)] = True
def paintEvent(self, e):
#print("!")
qp = QPainter(self)
self.drawLines(qp)
self.drawSignals_0(qp)
self.drawVehicles(qp)
def drawLines(self, qp):
# print(self.t.elapsed())
pen = QPen(Qt.black, 2, Qt.SolidLine)
pen_dash = QPen(Qt.black, 2, Qt.DotLine)
# Vertical
qp.setPen(pen)
qp.drawLine(270, 0, 270, 600)
# with grids ##################
# qp.drawLine(280, 0, 280, 600)
# qp.drawLine(290, 0, 290, 600)
# qp.drawLine(300, 0, 300, 600)
# qp.drawLine(310, 0, 310, 600)
# qp.drawLine(320, 0, 320, 600)
# with grids ##################
qp.drawLine(330, 0, 330, 600)
qp.drawLine(300, 0, 300, 270)
qp.drawLine(300, 330, 300, 600)
qp.setPen(pen_dash)
qp.drawLine(280, 330, 280, 600)
qp.drawLine(290, 330, 290, 600)
qp.drawLine(310, 330, 310, 600)
qp.drawLine(320, 330, 320, 600)
qp.drawLine(280, 0, 280, 270)
qp.drawLine(290, 0, 290, 270)
qp.drawLine(310, 0, 310, 270)
qp.drawLine(320, 0, 320, 270)
# Tropical
qp.setPen(pen)
qp.drawLine(0, 270, 600, 270)
# with grids ##################
# qp.drawLine(0, 280, 600, 280)
# qp.drawLine(0, 290, 600, 290)
# qp.drawLine(0, 300, 600, 300)
# qp.drawLine(0, 310, 600, 310)
# qp.drawLine(0, 320, 600, 320)
# with grids ##################
qp.drawLine(0, 330, 600, 330)
qp.drawLine(0, 300, 270, 300)
qp.drawLine(330, 300, 600, 300)
qp.setPen(pen_dash)
qp.drawLine(0, 280, 270, 280)
qp.drawLine(0, 290, 270, 290)
qp.drawLine(0, 310, 270, 310)
qp.drawLine(0, 320, 270, 320)
qp.drawLine(330, 280, 600, 280)
qp.drawLine(330, 290, 600, 290)
qp.drawLine(330, 310, 600, 310)
qp.drawLine(330, 320, 600, 320)
def drawSignals_0(self, qp):
#print(self.t.elapsed())
if 1000 < self.t.elapsed() < 2000:
qp.setPen(Qt.black)
qp.setBrush(Qt.red)
qp.drawEllipse(272, 262, 6, 6)
qp.drawEllipse(282, 262, 6, 6)
qp.drawEllipse(292, 262, 6, 6)
qp.setBrush(Qt.green)
qp.drawEllipse(332, 272, 6, 6)
qp.drawEllipse(332, 282, 6, 6)
qp.drawEllipse(332, 292, 6, 6)
qp.setBrush(Qt.red)
qp.drawEllipse(302, 332, 6, 6)
qp.drawEllipse(312, 332, 6, 6)
qp.drawEllipse(322, 332, 6, 6)
qp.setBrush(Qt.green)
qp.drawEllipse(262, 302, 6, 6)
qp.drawEllipse(262, 312, 6, 6)
qp.drawEllipse(262, 322, 6, 6)
self.single_0_0 = False
self.single_0_1 = True
else:
qp.setPen(Qt.black)
qp.setBrush(Qt.green)
qp.drawEllipse(272, 262, 6, 6)
qp.drawEllipse(282, 262, 6, 6)
qp.drawEllipse(292, 262, 6, 6)
qp.setBrush(Qt.red)
qp.drawEllipse(332, 272, 6, 6)
qp.drawEllipse(332, 282, 6, 6)
qp.drawEllipse(332, 292, 6, 6)
qp.setBrush(Qt.green)
qp.drawEllipse(302, 332, 6, 6)
qp.drawEllipse(312, 332, 6, 6)
qp.drawEllipse(322, 332, 6, 6)
qp.setBrush(Qt.red)
qp.drawEllipse(262, 302, 6, 6)
qp.drawEllipse(262, 312, 6, 6)
qp.drawEllipse(262, 322, 6, 6)
self.single_0_0 = True
self.single_0_1 = False
def coordinate_up_left_x(self, po_x, r):
return po_x - 5 * math.cos(math.radians(r))
def coordinate_up_left_y(self, po_y):
return po_y
def coordinate_up_right_x(self, po_x, r):
return po_x + 10 * math.cos(math.radians(r))
def coordinate_up_right_y(self, po_y):
return po_y
def coordinate_down_left_x(self, po_x, r):
return po_x - 5 * math.cos(math.radians(r))
def coordinate_down_left_y(self, po_y, r):
return po_y + 5 * math.sin(math.radians(r)) + 10 * math.cos(math.radians(r))
def coordinate_down_right_x(self, po_x, r):
return po_x + 10 * math.cos(math.radians(r))
def coordinate_down_right_y(self, po_y, r):
return po_y + 10 * math.sin(math.radians(r)) + 5 * math.cos(math.radians(r))
def drawVehicles(self, qp):
qp.setPen(Qt.black)
qp.setBrush(Qt.green)
# # Vehicles from North
for i, veh in enumerate(vehicles_N):
if (veh.getPosition().x + veh.getSpeed().x, veh.getPosition().y + veh.getSpeed().y) in self.collision_check_N:
qp.drawRect(veh.getPosition().x, veh.getPosition().y, veh.getSize().x, veh.getSize().y)
for i in range(11):
self.collision_check_N.append((veh.getPosition().x, veh.getPosition().y - i))
else:
if veh.getPosition().y + veh.getSpeed().y > 260 and veh.getPosition().y <= 260:
if self.single_0_1:
qp.drawRect(veh.getPosition().x, veh.getPosition().y, veh.getSize().x, veh.getSize().y)
for i in range(11):
self.collision_check_N.append((veh.getPosition().x, veh.getPosition().y - i))
else:
if veh.getPosition().y <= 270:
if self.grid[((veh.getPosition().x + veh.getSpeed().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)] and \
self.grid[((veh.getPosition().x + veh.getSpeed().x + veh.getSize().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)]:
veh.getPosition().y += veh.getSpeed().y
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 5, 10)
for i in range(11):
self.collision_check_N.append((veh.getPosition().x, veh.getPosition().y - i))
self.grid[(veh.getPosition().x // 10 * 10, (veh.getPosition().y + veh.getSize().y) // 10 * 10)] = False
self.grid[((veh.getPosition().x + veh.getSize().x) // 10 * 10, (veh.getPosition().y + veh.getSize().y) // 10 * 10)] = False
else:
try:
if self.grid[((veh.getPosition().x + veh.getSpeed().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y) // 10 * 10)] and \
self.grid[((veh.getPosition().x + veh.getSpeed().x + veh.getSize().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y) // 10 * 10)] and \
self.grid[((veh.getPosition().x + veh.getSpeed().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)] and \
self.grid[((veh.getPosition().x + veh.getSpeed().x + veh.getSize().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)]:
self.vehicles_N[i].getPosition().y += veh.getSpeed().y
self.grid[((veh.getPosition().x + veh.getSpeed().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y) // 10 * 10)] = False
self.grid[((veh.getPosition().x + veh.getSpeed().x + veh.getSize().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y) // 10 * 10)] = False
self.grid[((veh.getPosition().x + veh.getSpeed().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)] = False
self.grid[((veh.getPosition().x + veh.getSpeed().x + veh.getSize().x) // 10 * 10,
(veh.getPosition().y + veh.getSpeed().y + veh.getSize().y) // 10 * 10)] = False
if self.vehicles_N[i].getPosition().y > 600:
self.vehicles_N[i].getPosition().y = 0
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 5, 10)
for i in range(11):
self.collision_check_N.append((veh.getPosition().x, veh.getPosition().y - i))
except KeyError:
self.vehicles_N[i].getPosition().y += veh.getSpeed().y
if self.vehicles_N[i].getPosition().y > 600:
self.vehicles_N[i].getPosition().y = 0
qp.drawRect(self.vehicles_N[i].getPosition().x, self.vehicles_N[i].getPosition().y, 5, 10)
else:
# print(self.single_0_1)
veh.getPosition().y += veh.getSpeed().y
if veh.getPosition().y > 600:
veh.getPosition().y = 0
# print(self.t.elapsed())
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 5, 10)
for i in range(11):
self.collision_check_N.append((veh.getPosition().x, veh.getPosition().y - i))
#print(self.collision_check)
# Vehicles from West
for i, veh in enumerate(vehicles_W):
# Check if there are vehicles ahead. If true, stop
if (veh.getPosition().x + veh.getSpeed().x, veh.getPosition().y + veh.getSpeed().y) in self.collision_check_W:
qp.drawRect(veh.getPosition().x, veh.getPosition().y, veh.getSize().x, veh.getSize().y)
# Make the room not available for other vehicles
for j in range(11):
self.collision_check_W.append((veh.getPosition().x - j, veh.getPosition().y))
# Move forward
else:
# Just before the intersection
if veh.getPosition().x + 10 + 2 > 270 and veh.getPosition().x <= 270 - 10:
# Check traffic signal. True, then stop before entering.
if self.single_0_0:
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x - j, veh.getPosition().y))
# Enter intersection
else:
veh.getPosition().x += 2
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x - j, veh.getPosition().y))
# Light up the grids in the intersection
# Up left
if (veh.getPosition().x // 10 * 10, veh.getPosition().y // 10 * 10) in self.grid:
self.grid[(veh.getPosition().x // 10 * 10, veh.getPosition().y // 10 * 10)] = False
#print('success, x:', veh.getPosition().x)
# Up right
if ((veh.getPosition().x + 10) // 10 * 10, veh.getPosition().y // 10 * 10) in self.grid:
self.grid[((veh.getPosition().x + 10) // 10 * 10, veh.getPosition().y // 10 * 10)] = False
#print('success, x:', veh.getPosition().x)
# Down left
if (veh.getPosition().x // 10 * 10, (veh.getPosition().y) // 10 * 10) in self.grid:
self.grid[(veh.getPosition().x // 10 * 10, (veh.getPosition().y + 5) // 10 * 10)] = False
#print('success, x:', veh.getPosition().x)
# Down right
if ((veh.getPosition().x + 10) // 10 * 10, (veh.getPosition().y) // 10 * 10) in self.grid:
self.grid[((veh.getPosition().x + 10) // 10 * 10, (veh.getPosition().y + 5) // 10 * 10)] = False
#print('success, x:', veh.getPosition().x)
# Already in the intersection
else:
if 270 < veh.getPosition().x < 328 and veh.getPosition().y < 330:
qp.save()
qp.translate(veh.getPosition().x, veh.getPosition().y)
# Calculate rotation angle
if (((veh.getPosition().x - 270 + 3) / 60) * 90 > 15):
self.r[i] = ((veh.getPosition().x - 270 + 3) / 60) * 90
qp.rotate(self.r[i])
else:
self.r[i] = 0
qp.rotate(self.r[i])
qp.translate(-veh.getPosition().x, -veh.getPosition().y)
# Calculate trajectory by using Bezier Curve
x = pow(1 - (self.beze_t[i] / 60), 2) * 272 + 2 * (self.beze_t[i] / 60) * (
1 - self.beze_t[i] / 60) * 330 + pow(
self.beze_t[i] / 60, 2) * 330
y = pow(1 - (self.beze_t[i] / 60), 2) * 273 + 2 * (self.beze_t[i] / 60) * (
1 - self.beze_t[i] / 60) * 273 + pow(
self.beze_t[i] / 60, 2) * 330
veh.setPosition(Position(x, y))
self.beze_t[i] += 2
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x - j, veh.getPosition().y))
qp.restore()
# Calculate the big Square's coordinate
self.up_left_x[i] = self.coordinate_up_left_x(veh.getPosition().x, self.r[i])
self.up_left_y[i] = self.coordinate_up_left_y(veh.getPosition().y)
self.down_left_x[i] = self.coordinate_down_left_x(veh.getPosition().x, self.r[i])
self.down_left_y[i] = self.coordinate_down_left_y(veh.getPosition().y, self.r[i])
self.up_right_x[i] = self.coordinate_up_right_x(veh.getPosition().x, self.r[i])
self.up_right_y[i] = self.coordinate_up_right_y(veh.getPosition().y)
self.down_right_x[i] = self.coordinate_down_right_x(veh.getPosition().x, self.r[i])
self.down_right_y[i] = self.coordinate_down_right_y(veh.getPosition().y, self.r[i])
# Up left
if (self.up_left_x[i] // 10 * 10, self.up_left_y[i] // 10 * 10) in self.grid:
self.grid[(self.up_left_x[i] // 10 * 10, self.up_left_y[i] // 10 * 10)] = False
# print('success')
# Up right
if ((self.up_right_x[i]) // 10 * 10, self.up_right_y[i] // 10 * 10) in self.grid:
self.grid[((self.up_right_x[i]) // 10 * 10, self.up_right_y[i] // 10 * 10)] = False
# print('success')
# Down left
if (self.down_left_x[i] // 10 * 10, (self.down_left_y[i]) // 10 * 10) in self.grid:
self.grid[(self.down_left_x[i] // 10 * 10, (self.down_left_y[i]) // 10 * 10)] = False
# print('success')
# Down right
if ((self.down_right_x[i]) // 10 * 10, (self.down_right_y[i]) // 10 * 10) in self.grid:
self.grid[((self.down_right_x[i]) // 10 * 10, (self.down_right_y[i]) // 10 * 10)] = False
# print('success')
# Already left intersection
elif 328 <= veh.getPosition().x and veh.getPosition().y < 600:
qp.save()
qp.translate(veh.getPosition().x, veh.getPosition().y)
qp.rotate(90)
qp.translate(-veh.getPosition().x, -veh.getPosition().y)
veh.getPosition().y += 2
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x, veh.getPosition().y - j))
qp.restore()
# Already left screen
elif veh.getPosition().y >= 600:
veh.getPosition().x = 0
veh.getPosition().y = 273
self.beze_t[i] = 0
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x, veh.getPosition().y - j))
# Move horizontal direction(across X_axis)
else:
veh.getPosition().x += 2
qp.drawRect(veh.getPosition().x, veh.getPosition().y, 10, 5)
for j in range(11):
self.collision_check_W.append((veh.getPosition().x - j, veh.getPosition().y))
# Vehicle2
# if self.single_0_0:
# qp.drawRect(self.vehicles_E[0].getPosition().x, self.vehicles_E[0].getPosition().y, 10, 5)
# else:
try:
if self.grid[((self.vehicles_E[0].getPosition().x - 5) // 10 * 10, self.vehicles_E[0].getPosition().y // 10 * 10)] and \
self.grid[((self.vehicles_E[0].getPosition().x + 10 - 5) // 10 * 10, self.vehicles_E[0].getPosition().y // 10 * 10)] and \
self.grid[((self.vehicles_E[0].getPosition().x - 5) // 10 * 10, (self.vehicles_E[0].getPosition().y + 5) // 10 * 10)] and \
self.grid[((self.vehicles_E[0].getPosition().x + 10 - 5) // 10 * 10, (self.vehicles_E[0].getPosition().y + 5) // 10 * 10)]:
self.vehicles_E[0].getPosition().x -= 3
if self.vehicles_E[0].getPosition().x < 0:
self.vehicles_E[0].getPosition().x = 600
qp.drawPoint(self.vehicles_E[0].getPosition().x + 1, self.vehicles_E[0].getPosition().y - 1)
qp.drawRect(self.vehicles_E[0].getPosition().x, self.vehicles_E[0].getPosition().y, 10, 5)
else:
qp.drawPoint(self.vehicles_E[0].getPosition().x + 1, self.vehicles_E[0].getPosition().y - 1)
qp.drawRect(self.vehicles_E[0].getPosition().x, self.vehicles_E[0].getPosition().y, 10, 5)
except KeyError:
self.vehicles_E[0].getPosition().x -= 3
if self.vehicles_E[0].getPosition().x < 0:
self.vehicles_E[0].getPosition().x = 600
qp.drawPoint(self.vehicles_E[0].getPosition().x + 1, self.vehicles_E[0].getPosition().y - 1)
qp.drawRect(self.vehicles_E[0].getPosition().x, self.vehicles_E[0].getPosition().y, 10, 5)
self.collision_check = []
self.collision_check_N = []
self.collision_check_S = []
self.collision_check_W = []
self.collision_check_E = []
for i in range(270, 330, 10):
for j in range(270, 330, 10):
self.grid[(i, j)] = True
self.ti += 10
if self.ti > 700:
self.ti = 0
# print(self.t.elapsed())
self.t.restart()
if __name__ == '__main__':
app = QApplication(sys.argv)
# Vehicles from North
vehicles_N = []
for i in range(5):
v = Vehicle()
v.setPosition(Position(313, 0 - i * 10))
v.setSpeed(Speed(0, 2))
v.setSize(Size(5, 10))
vehicles_N.append(v)
# Vehicles from West
vehicles_W = []
for i in range(9):
v = Vehicle()
v.setPosition(Position(0 - i * 10, 273))
v.setSpeed(Speed(2, 0))
v.setSize(Size(10, 5))
vehicles_W.append(v)
# Vehicles from East
vehicles_E = []
v = Vehicle()
v.setPosition(Position(600, 302))
v.setSpeed(Speed(2, 0))
v.setSize(Size(10, 5))
vehicles_E.append(v)
ex = Example(vehicles_N, vehicles_W, vehicles_E)
sys.exit(app.exec_())
|
[
"better@opossum.itolab.nitech.ac.jp"
] |
better@opossum.itolab.nitech.ac.jp
|
e6acc1a14b714638e4d8eb6b3210b8ad4b35a3c2
|
37069009dd428ce59819ffea2fcffc07dda6e712
|
/django_analyze/migrations/0068_auto__add_field_genotype_max_memory_usage.py
|
550ac7b81c79b27de932d2c0ecb1788805c93c03
|
[] |
no_license
|
chrisspen/django-analyze
|
829f560d7c5f2fb1c19fc07bc77cb1a83238e696
|
421ee35235f76ff8657f7befe5212acd7ccf3989
|
refs/heads/master
| 2020-04-28T15:42:51.773823
| 2015-04-18T14:50:02
| 2015-04-18T14:50:02
| 14,995,029
| 2
| 2
| null | 2014-07-07T12:39:22
| 2013-12-06T22:26:29
|
Python
|
UTF-8
|
Python
| false
| false
| 18,209
|
py
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Genotype.max_memory_usage'
db.add_column(u'django_analyze_genotype', 'max_memory_usage',
self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Genotype.max_memory_usage'
db.delete_column(u'django_analyze_genotype', 'max_memory_usage')
models = {
'django_analyze.epoche': {
'Meta': {'ordering': "('genome', '-index')", 'unique_together': "(('genome', 'index'),)", 'object_name': 'Epoche'},
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'epoches'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'mean_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'oldest_epoche_of_creation': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'django_analyze.gene': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('genome', 'name'),)", 'object_name': 'Gene'},
'coverage_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'exploration_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genes'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_increment': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'max_value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'max_value_observed': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'min_value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'min_value_observed': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'mutation_weight': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'django_analyze.genedependency': {
'Meta': {'unique_together': "(('gene', 'dependee_gene', 'dependee_value'),)", 'object_name': 'GeneDependency'},
'dependee_gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependents'", 'to': "orm['django_analyze.Gene']"}),
'dependee_value': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': "orm['django_analyze.Gene']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'positive': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'django_analyze.genestatistics': {
'Meta': {'ordering': "('genome', 'gene', '-mean_fitness')", 'object_name': 'GeneStatistics', 'managed': 'False'},
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Gene']", 'on_delete': 'models.DO_NOTHING', 'db_column': "'gene_id'"}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gene_statistics'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'genome_id'", 'to': "orm['django_analyze.Genome']"}),
'genotype_count': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'primary_key': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mean_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'django_analyze.genome': {
'Meta': {'object_name': 'Genome'},
'_epoche': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'current_genome'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Epoche']"}),
'delete_inferiors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'elite_ratio': ('django.db.models.fields.FloatField', [], {'default': '0.1'}),
'epoche': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'epoche_stall': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'epoches_since_improvement': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'error_report': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'evaluating_part': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'evaluation_timeout': ('django.db.models.fields.PositiveIntegerField', [], {'default': '300'}),
'evaluator': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'evolution_start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'evolving': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'max_species': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'maximum_evaluated_population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1000'}),
'maximum_population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mutation_rate': ('django.db.models.fields.FloatField', [], {'default': '0.1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'production_at_best': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_evaluation_timeout': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'production_genotype': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'production_genomes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Genotype']"}),
'production_genotype_auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ratio_evaluated': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'django_analyze.genotype': {
'Meta': {'ordering': "('-fitness',)", 'unique_together': "(('genome', 'fingerprint'),)", 'object_name': 'Genotype', 'index_together': "(('valid', 'fresh', 'fitness'), ('genome', 'fresh'))"},
'accuracy': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'complete_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'complete_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'epoche': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'genotypes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Epoche']"}),
'epoche_of_creation': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'epoche_of_evaluation': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'evaluating': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'evaluating_pid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'fingerprint': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '700', 'null': 'True', 'db_column': "'fingerprint'", 'blank': 'True'}),
'fingerprint_fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'fitness_evaluation_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fitness_evaluation_datetime_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'gene_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'generation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genotypes'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immortal': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'max_memory_usage': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mean_absolute_error': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'mean_evaluation_seconds': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mean_memory_usage': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'memory_usage_samples': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'ontime_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ontime_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_complete_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_complete_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'production_evaluating': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_evaluating_pid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_evaluation_end_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_evaluation_start_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_ontime_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_ontime_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_success_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_success_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_total_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'species': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'genotypes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Species']"}),
'success_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'success_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'total_evaluation_seconds': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'total_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'})
},
'django_analyze.genotypegene': {
'Meta': {'ordering': "('gene__name',)", 'unique_together': "(('genotype', 'gene'),)", 'object_name': 'GenotypeGene'},
'_value': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'db_column': "'value'"}),
'_value_genome': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Genome']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gene_values'", 'to': "orm['django_analyze.Gene']"}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genes'", 'to': "orm['django_analyze.Genotype']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'django_analyze.genotypegeneillegal': {
'Meta': {'object_name': 'GenotypeGeneIllegal', 'managed': 'False'},
'gene_value': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.GenotypeGene']", 'on_delete': 'models.DO_NOTHING', 'primary_key': 'True', 'db_column': "'illegal_genotypegene_id'"}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'illegal_gene_values'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'illegal_genotype_id'", 'to': "orm['django_analyze.Genotype']"}),
'illegal_gene_name': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
u'django_analyze.genotypegenemissing': {
'Meta': {'object_name': 'GenotypeGeneMissing', 'managed': 'False'},
'default': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Gene']", 'on_delete': 'models.DO_NOTHING', 'primary_key': 'True', 'db_column': "'gene_id'"}),
'gene_name': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'missing_gene_values'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'genotype_id'", 'to': "orm['django_analyze.Genotype']"})
},
'django_analyze.species': {
'Meta': {'ordering': "('genome', 'index')", 'unique_together': "(('genome', 'index'),)", 'object_name': 'Species', 'index_together': "(('genome', 'index'),)"},
'centroid': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'species'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['django_analyze']
|
[
"chris@coronis"
] |
chris@coronis
|
e9645d989c183c9dbac600d419ff4e6142b1a50e
|
0c6fb8fcc31eb658561ee7e244e8f0f7c8d72d6e
|
/build/ros_arduino_firmware/catkin_generated/pkg.installspace.context.pc.py
|
499a2c796c3c70019a20c66822f8a3cea544edbd
|
[] |
no_license
|
tutlebotmitica/mybot_ws
|
f71fac8e41a061d132694eb4f24f8375fd5eba8d
|
4bb2003069709a29f0868c5fc404da01cddd0af3
|
refs/heads/master
| 2021-08-18T21:41:13.355546
| 2017-11-24T00:43:04
| 2017-11-24T00:43:04
| 111,839,160
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 386
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ros_arduino_firmware"
PROJECT_SPACE_DIR = "/home/turtlebot/mybot_ws/install"
PROJECT_VERSION = "0.2.0"
|
[
"paul@modulab.ro"
] |
paul@modulab.ro
|
a8a1af44b4ff29b22520121f30295c8ebe1d693f
|
554ec84f23825452f7692f91f742bdc81fa50e84
|
/chatbot_27549/urls.py
|
7d1264887b9b6eb6dad7fc662d8571cc66eddd66
|
[] |
no_license
|
crowdbotics-apps/chatbot-27549
|
a7806af210b6e7ccdfb3db3dbaaac9e9dcb5a5af
|
0e615cbb191a8d91e2874e7329b059193a8ad625
|
refs/heads/master
| 2023-05-26T13:30:53.116812
| 2021-05-29T07:24:50
| 2021-05-29T07:24:50
| 371,908,087
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,022
|
py
|
"""chatbot_27549 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Chatbot"
admin.site.site_title = "Chatbot Admin Portal"
admin.site.index_title = "Chatbot Admin"
# swagger
api_info = openapi.Info(
title="Chatbot API",
default_version="v1",
description="API documentation for Chatbot App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
50363bbf710a2b67812e488531ed086fe0b32138
|
d40fbefbd5db39f1c3fb97f17ed54cb7b6f230e0
|
/datadog_checks_dev/datadog_checks/dev/tooling/config.py
|
7d63ecb7890e8d4df068f1419c36389ea8bb11bc
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
slightilusion/integrations-core
|
47a170d791e809f3a69c34e2426436a6c944c322
|
8f89e7ba35e6d27c9c1b36b9784b7454d845ba01
|
refs/heads/master
| 2020-05-20T18:34:41.716618
| 2019-05-08T21:51:17
| 2019-05-08T21:51:17
| 185,708,851
| 2
| 0
|
BSD-3-Clause
| 2019-05-09T02:05:19
| 2019-05-09T02:05:18
| null |
UTF-8
|
Python
| false
| false
| 3,143
|
py
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from collections import OrderedDict, deque
from copy import deepcopy
import toml
from appdirs import user_data_dir
from atomicwrites import atomic_write
from six import string_types
from ..compat import FileNotFoundError
from ..utils import ensure_parent_dir_exists, file_exists, read_file
APP_DIR = user_data_dir('dd-checks-dev', '')
CONFIG_FILE = os.path.join(APP_DIR, 'config.toml')
SECRET_KEYS = {'dd_api_key', 'github.token', 'pypi.pass', 'trello.key', 'trello.token'}
DEFAULT_CONFIG = OrderedDict(
[
('core', os.path.join('~', 'dd', 'integrations-core')),
('extras', os.path.join('~', 'dd', 'integrations-extras')),
('agent', os.path.join('~', 'dd', 'datadog-agent')),
('repo', 'core'),
('agent6', OrderedDict((('docker', 'datadog/agent-dev:master'), ('local', 'latest')))),
('agent5', OrderedDict((('docker', 'datadog/dev-dd-agent:master'), ('local', 'latest')))),
('dd_api_key', os.getenv('DD_API_KEY')),
('github', OrderedDict((('user', ''), ('token', '')))),
('pypi', OrderedDict((('user', ''), ('pass', '')))),
('trello', OrderedDict((('key', ''), ('token', '')))),
]
)
def config_file_exists():
return file_exists(CONFIG_FILE)
def copy_default_config():
return deepcopy(DEFAULT_CONFIG)
def save_config(config):
ensure_parent_dir_exists(CONFIG_FILE)
with atomic_write(CONFIG_FILE, mode='wb', overwrite=True) as f:
f.write(toml.dumps(config).encode('utf-8'))
def load_config():
config = copy_default_config()
try:
config.update(toml.loads(read_config_file(), OrderedDict))
except FileNotFoundError:
pass
return config
def read_config_file():
return read_file(CONFIG_FILE)
def read_config_file_scrubbed():
return toml.dumps(scrub_secrets(load_config()))
def restore_config():
config = copy_default_config()
save_config(config)
return config
def update_config():
config = copy_default_config()
config.update(load_config())
# Support legacy config where agent5 and agent6 were strings
if isinstance(config['agent6'], string_types):
config['agent6'] = OrderedDict((('docker', config['agent6']), ('local', 'latest')))
if isinstance(config['agent5'], string_types):
config['agent5'] = OrderedDict((('docker', config['agent5']), ('local', 'latest')))
save_config(config)
return config
def scrub_secrets(config):
for secret_key in SECRET_KEYS:
branch = config
paths = deque(secret_key.split('.'))
while paths:
path = paths.popleft()
if not hasattr(branch, 'get'):
break
if path in branch:
if not paths:
old_value = branch[path]
if isinstance(old_value, string_types):
branch[path] = '*' * len(old_value)
else:
branch = branch[path]
else:
break
return config
|
[
"noreply@github.com"
] |
slightilusion.noreply@github.com
|
4090524e51e82ff0144e1f510597cdaf59752ac9
|
08d5ac4947860068f3c79edd6eed7493cee29f95
|
/theano_train.py
|
54672bc9bc60d8e2e33a5ff5e6c0dc50ddded584
|
[] |
no_license
|
MrsJLW/nslkdd-deep
|
9babc0821524247bdd4bd2afa1c197392af5ab3c
|
462705bb0f42328673a3fe6d0caf0e75802e9b87
|
refs/heads/master
| 2020-03-29T13:27:54.058568
| 2016-04-11T18:01:31
| 2016-04-11T18:01:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,049
|
py
|
import matplotlib.pyplot as plt
from sklearn.metrics import accuracy_score, precision_score, confusion_matrix
import numpy as np
import sklearn
import sklearn.datasets
import matplotlib
import theano
import theano.tensor as T
import timeit
import time
from theano import pp
import data as d
from theano import config
from collections import Counter
np.random.seed(0)
class_weight = []
layer_stack, w_stack, b_stack, dw_stack, db_stack = [], [], [], [], []
num_examples = 40
nn_input_dim = 200
nn_output_dim = 5
# three layers in a conical structure
nn_hdims = [500, 100]
batch_per_epoch = 3000
num_passes = 2
n_layers = 2
epsilon = np.float32(0.06)
reg_lambda = np.float32(0.0001)
def activate(name, values):
if name == 'sigmoid':
return T.nnet.sigmoid(values)
elif name == 'tanh':
return T.tanh(values)
elif name == 'relu':
return T.switch(values > 0, values, 0)
elif name == 'softmax':
return T.nnet.softmax(values)
def add_layer(activation, dim):
if not layer_stack:
W = theano.shared(np.random.randn(nn_input_dim, dim).astype('float32'),
name='W' + str(len(layer_stack) + 1))
b = theano.shared(np.zeros(dim).astype('float32'), name='b' + str(len(layer_stack) + 1))
layer_stack.append(activate(activation, X.dot(W) + b))
w_stack.append(W)
b_stack.append(b)
else:
prev = layer_stack[-1]
W = theano.shared(np.random.randn(b_stack[-1].get_value().shape[0], dim).astype('float32'),
name='W' + str(len(layer_stack) + 1))
b = theano.shared(np.zeros(dim).astype('float32'), name='b' + str(len(layer_stack) + 1))
layer_stack.append(activate(activation, prev.dot(W) + b))
w_stack.append(W)
b_stack.append(b)
# print(layer_stack)
X = theano.shared(np.array(np.random.randn(200, 2000), config.floatX))
y = theano.shared(np.array(np.random.randn(200, 5), config.floatX))
c_w = theano.shared(np.array(np.random.randn(200), config.floatX))
# W1 = theano.shared(np.random.randn(nn_input_dim, nn_hdim1).astype('float32'), name='W1')
# b1 = theano.shared(np.zeros(nn_hdim1).astype('float32'), name='b1')
# W2 = theano.shared(np.random.randn(nn_hdim1, nn_hdim2).astype('float32'), name='W2')
# b2 = theano.shared(np.zeros(nn_hdim2).astype('float32'), name='b2')
# W3 = theano.shared(np.random.randn(nn_hdim2, nn_output_dim).astype('float32'), name='W3')
# b3 = theano.shared(np.zeros(nn_output_dim).astype('float32'), name='b3')
# params=[W1,b1,W2,b2,W3,b3]
params = [w_stack, b_stack]
add_layer('sigmoid', nn_hdims[0])
add_layer('sigmoid', nn_hdims[1])
add_layer('softmax', nn_output_dim)
loss_reg = 1. / num_examples * reg_lambda / 2 * (
T.sum(T.sqr(w_stack[-3])) + T.sum(T.sqr(w_stack[-2])) + T.sum(T.sqr(w_stack[-1])))
loss = ((T.nnet.categorical_crossentropy(layer_stack[-1], y)).mean()) + loss_reg
prediction = T.argmax(layer_stack[-1], axis=1)
# dW4 = T.grad(loss, w_stack[-1])
# db4 = T.grad(loss, b_stack[-1])
# dW3 = T.grad(loss, w_stack[-2])
# db3 = T.grad(loss, b_stack[-2])
# dW2 = T.grad(loss, w_stack[-3])
# db2 = T.grad(loss, b_stack[-3])
# dW1 = T.grad(loss, w_stack[-4])
# db1 = T.grad(loss, b_stack[-4])
dw_stack = [T.grad(loss, w_stack[-n_layers+i-1]) for i in range(0, n_layers + 1)]
db_stack = [T.grad(loss, b_stack[-n_layers+i-1]) for i in range(0, n_layers + 1)]
forward_prop = theano.function([], layer_stack[-1])
calculate_loss = theano.function([], loss)
predict = theano.function([], prediction)
# u = ((w_stack[-1], w_stack[-1] - epsilon * dW4),
# (w_stack[-2], w_stack[-2] - epsilon * dW3),
# (w_stack[-3], w_stack[-3] - epsilon * dW2),
# (w_stack[-4], w_stack[-4] - epsilon * dW1),
# (b_stack[-1], b_stack[-1] - epsilon * db4),
# (b_stack[-2], b_stack[-2] - epsilon * db3),
# (b_stack[-3], b_stack[-3] - epsilon * db2),
# (b_stack[-4], b_stack[-4] - epsilon * db1))
u = tuple([tuple([w_stack[-i], w_stack[-i] - epsilon * dw_stack[n_layers - i + 1]]) for i in range(1, n_layers+2)])
u += tuple([tuple([b_stack[-i], b_stack[-i] - epsilon * db_stack[n_layers - i + 1]]) for i in range(1, n_layers+2)])
gradient_step = theano.function(
[],
updates=u)
def build_model(num_passes=5, print_loss=False):
np.random.seed(0)
w_stack[-3].set_value((np.random.randn(nn_input_dim, nn_hdims[0]) / np.sqrt(nn_input_dim)).astype('float32'))
b_stack[-3].set_value(np.zeros(nn_hdims[0]).astype('float32'))
w_stack[-2].set_value((np.random.randn(nn_hdims[0], nn_hdims[1]) / np.sqrt(nn_hdims[0])).astype('float32'))
b_stack[-2].set_value(np.zeros(nn_hdims[1]).astype('float32'))
# w_stack[-1].set_value((np.random.randn(nn_hdims[1], nn_hdims[2]) / np.sqrt(nn_hdims[1])).astype('float32'))
# b_stack[-1].set_value(np.zeros(nn_hdims[2]).astype('float32'))
w_stack[-1].set_value((np.random.randn(nn_hdims[1], nn_output_dim) / np.sqrt(nn_hdims[1])).astype('float32'))
b_stack[-1].set_value(np.zeros(nn_output_dim).astype('float32'))
for i in range(0, num_passes):
for j in range(batch_per_epoch):
a, b, c = d.train_batch_data(40)
X.set_value(a.astype('float32'))
y.set_value(b.astype('float32'))
c_w.set_value(c.astype('float32'))
if j % 500 == 0:
# print(debug(),end='\n',flush=True)
print(calculate_loss(), end=' ', flush=True)
gradient_step()
print()
if print_loss and i % 1 == 0:
print("Loss after iteration %i: %f" % (i, calculate_loss()))
build_model(num_passes=2, print_loss=True)
# save model
np.savez('out/model.npz', params=[[x.get_value() for x in params[0]], [x.get_value() for x in params[1]]])
a, b = d.fulldata('test')
X.set_value(a.astype('float32'))
y.set_value(b.astype('float32'))
predicted = predict()
actual = [np.argmax(x) for x in b]
print(accuracy_score(predicted, actual))
print(precision_score(actual, predicted, average='macro'))
print(confusion_matrix(actual, predicted))
|
[
"rajarsheem@gmail.com"
] |
rajarsheem@gmail.com
|
faccd9f59e65cffd749ab558cda959576930e26c
|
0ea5d0f75e7cb9f8a7fd213e2eb4f29a339ea285
|
/wnfportal_python/wnfportal_dm_konten.py
|
a209429b9d0e3bab64b37d89220a66b699158bda
|
[] |
no_license
|
wnf58/wnfportal
|
1d5d7ba8e5b63b69feb016e57fc2ee7efccc8400
|
2d593fdd9266f44d60297f7f96b6b4a2c4c7ea98
|
refs/heads/master
| 2022-12-17T03:08:06.001400
| 2020-09-26T11:51:59
| 2020-09-26T11:51:59
| 115,601,732
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 33,164
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
import fdb
import os
import configparser
import wnfportal_dm_datenbank
import wnfportal_tools as T
import time
from reportlab.graphics.shapes import Drawing
from reportlab.graphics.charts.barcharts import VerticalBarChart
from reportlab.graphics.charts.piecharts import Pie
from reportlab.graphics import renderPM
class dmKonten(wnfportal_dm_datenbank.dmDatenbank):
def __init__(self):
wnfportal_dm_datenbank.dmDatenbank.__init__(self)
self.setIniDatei('wnfKuB.ini')
# self.setIniDatei('wnfKITAOffice.ini')
def summeAlleKonten(self):
aSQL = """
SELECT SUM(BETRAG) FROM KO_KUBEA
"""
return self.sqlSumme(aSQL)
def summeProjekt(self, aProjekt_ID):
aSQL = "SELECT SUM(BETRAG) FROM KO_KUBEA WHERE PROJEKT_ID=%s" % (aProjekt_ID)
print(aSQL)
return self.sqlSumme(aSQL)
def summeProjektWintergarten(self):
aProjekt_ID = self.getProjekt_ID_Wintergarten_2017()
return self.summeProjekt(aProjekt_ID)
def listeAlleKonten(self):
aSQL = """
SELECT E.KUB_ID,K.KURZ,SUM(E.BETRAG)
FROM KO_KUBEA E
LEFT JOIN KO_KUB K ON K.ID=E.KUB_ID
GROUP BY K.KURZ,E.KUB_ID
HAVING SUM(E.BETRAG)<>0
ORDER BY K.KURZ
"""
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
konten = []
for row in cur:
k = {'konto_id': row[0], 'konto': row[1], 'saldo': T.sDM(row[2])}
# print k
konten.append(k)
return konten
def jsonAlleKonten(self):
j = {'summe': T.sDM(self.summeAlleKonten()), 'konten': self.listeAlleKonten()}
return j
def listeLetzteEA(self):
aSumme = 0
aSQL = """
SELECT E.ID,E.DATUM,E.KURZ,E.BETRAG
FROM KO_KUBEA E
WHERE E.DATUM >= %s
ORDER BY E.DATUM DESC,E.KURZ
"""
aSQL = aSQL % (T.wnfDateToSQL(T.wnfTagVorVor8Wochen()))
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return aSumme, []
ea = []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print (s)
k = {'konto_ea_id': row[0],
'datum': str(row[1]),
'ttmmjj': ttmmjj,
'kurz': row[2],
'betrag': T.sDM(row[3]),
'datumkurz': s}
aSumme = aSumme + row[3]
print(k)
ea.append(k)
return aSumme, ea
def listeProjekt(self, aProjekt_ID):
print(aProjekt_ID)
aSumme = 0
aSQL = """
SELECT E.ID,E.DATUM,E.KURZ,E.BETRAG
FROM KO_KUBEA E
WHERE E.PROJEKT_ID = %d
ORDER BY E.DATUM DESC,E.KURZ
"""
aSQL = aSQL % (aProjekt_ID)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return aSumme, []
ea = []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print (s)
k = {'konto_ea_id': row[0],
'datum': str(row[1]),
'ttmmjj': ttmmjj,
'kurz': row[2],
'betrag': T.sDM(row[3]),
'datumkurz': s}
aSumme = aSumme + row[3]
print(k)
ea.append(k)
return aSumme, ea
def listeProjektK(self, aProjekt_ID):
print(aProjekt_ID)
aSumme = 0
aSQL = """
SELECT K.ID,MAX(E.DATUM),K.KURZ,SUM(E.BETRAG)
FROM KO_KUBEA E
LEFT JOIN KO_KUBKAT K ON K.ID=E.KAT_ID
WHERE E.PROJEKT_ID = %d
AND NOT E.KAT_ID IS NULL
GROUP BY K.ID,K.KURZ
ORDER BY 4,K.KURZ
"""
aSQL = aSQL % (aProjekt_ID)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return aSumme, []
ea = []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print (s)
k = {'konto_ea_id': row[0],
'datum': str(row[1]),
'ttmmjj': ttmmjj,
'kurz': row[2],
'betrag': T.sDM(row[3]),
'datumkurz': s}
aSumme = aSumme + row[3]
print(k)
ea.append(k)
return aSumme, ea
def jsonLetzteEA(self):
aSumme, ea = self.listeLetzteEA()
j = {'summe': T.sDM(aSumme), 'ea': ea}
return j
def jsonListEA(self):
aSQL = """
SELECT E.ID,E.DATUM,E.KURZ, E.BEZ, E.BETRAG
FROM KO_KUBEA E
WHERE E.DATUM >= %s
ORDER BY E.DATUM DESC,E.KURZ
"""
aSQL = aSQL % (T.wnfDateToSQL(T.wnfErsterTagVormonat()))
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
ea = []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print s
k = {'id': row[0],
'datum': str(row[1]),
'kurz': row[2],
'bez': row[3],
'betrag': str(row[4])}
# print(k)
ea.append(k)
# print(ea)
return ea
def jsonListEASkip(self, aFirst, aSkip):
aSQL = """
SELECT FIRST %s SKIP %s E.ID,E.DATUM,E.KURZ, E.BEZ, E.BETRAG
FROM KO_KUBEA E
ORDER BY E.DATUM DESC,E.KURZ
"""
aSQL = aSQL % (aFirst, aSkip)
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
ea = []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print s
k = {'id': row[0],
'datum': str(row[1]),
'kurz': row[2],
'bez': row[3],
'betrag': str(row[4])}
# print(k)
ea.append(k)
print(ea)
return ea
def jsonDetailEA(self, id):
aSQL = """
SELECT E.ID,E.DATUM,E.KURZ, E.BEZ, E.BETRAG
FROM KO_KUBEA E
WHERE E.ID = %s
ORDER BY E.DATUM DESC,E.KURZ
"""
aSQL = aSQL % (id)
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
for row in cur:
s = "%s | %s" % (row[1].strftime("%d.%m.%y"), row[2])
ttmmjj = "%s" % (row[1].strftime("%d.%m.%y"))
# print s
k = {'id': row[0],
'datum': str(row[1]),
'kurz': row[2],
'bez': row[3],
'betrag': str(row[4])}
print(k)
return k
def jsonListKonten(self):
aSQL = """
SELECT
K.ID,
MAX(E.DATUM),
K.KURZ,
SUM(E.BETRAG)
FROM KO_KUBEA E
LEFT JOIN KO_KUB K ON K.ID=E.KUB_ID
GROUP BY K.KURZ,K.ID
HAVING SUM(E.BETRAG)<>0
ORDER BY K.KURZ
"""
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
ea = []
for row in cur:
k = {'id': row[0],
'datum': str(row[1]),
'kurz': row[2],
'betrag': str(row[3])}
# print(k)
ea.append(k)
print(ea)
return ea
def jsonKontostandSumme(self):
aSQL = """
SELECT
SUM(E.BETRAG)
FROM KO_KUBEA E
"""
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return {}
for row in cur:
k = [{'summe': str(row[0])}]
print(k)
return k
def htmlLetzteEA(self):
aSumme, ea = self.listeLetzteEA()
s = ''
for l in ea:
datum = l['ttmmjj'] # .encode('utf-8')
kurz = l['kurz'] # .encode('utf-8')
betrag = l['betrag']
s = '%s <tr><td class=table-3c-spalte1>%s</td><td class=table-3c-spalte2>%s</td><td class=table-3c-spalte3>%s</td></tr>' % (
s, datum, kurz, betrag)
return ("<table>"
"<tr><th class=table-3c-spalte1>Datum</th><th class=table-3c-spalte2>Bezeichnung</th><th class=table-3c-spalte3>Betrag</th></tr>"
"%s"
"<tr><th class=table-3c-spalte1></th><th class=table-3c-spalte2>Summe</th><th class=table-3c-spalte3>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
def htmlEAMonatlich(self):
aSumme, ea = self.analyseEAMonatlich()
s = ''
for l in ea:
kurz = l['kurz'] # .encode('utf-8')
betrag = l['betrag']
durchschnitt = l['durchschnitt']
s = '%s <tr><td class=table-3c-spalte1>%s</td><td class=table-3c-spalte3>%.2f</td><td class=table-3c-spalte3>%.2f</td></tr>' % (
s, kurz, betrag, durchschnitt)
return ("<table>"
"<tr><th class=table-3c-spalte1>Bezeichnung</th><th class=table-3c-spalte3>Betrag</th><th class=table-3c-spalte3>Durchschnitt</th></tr>"
"%s"
"<tr><th class=table-3c-spalte1></th><th class=table-3c-spalte2>Summe</th><th class=table-3c-spalte3>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
def htmldiagrammVonBis(self, aVon, aBis, dn):
aSumme, aData, aLabels, aRecord = self.analyseAusgabenVonBis(aVon, aBis)
p = '/home/wnf/Entwicklung/PycharmProjects/wnfportal/wnfportal_python/www/img/'
self.diagrammKostenartVonBis(p, dn, aData, aLabels)
s = ''
for l in aRecord:
aLabel = l['ID']
kurz = l['kurz'] # .encode('utf-8')
betrag = l['sDM']
s = '%s <tr><td class=table-3c-spalte1>%s</td><td class=table-3c-spalte2>%s</td><td class=table-3c-spalte3>%s</td></tr>' % (
s, aLabel, kurz, betrag)
tabelle = ("<table>"
"<tr><th class=table-3c-spalte1>Kurz</th><th class=table-3c-spalte2>Bezeichnung</th><th class=table-3c-spalte3>Betrag</th></tr>"
"%s"
"<tr><th class=table-3c-spalte1></th><th class=table-3c-spalte2>Summe</th><th class=table-3c-spalte3>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
return ('<img src="img/%s.png" alt="Diagramm"> %s' % (dn, tabelle))
def htmldiagrammLetzterMonat(self):
aVon = T.wnfDateToSQL(T.wnfErsterTagVormonat())
aBis = T.wnfDateToSQL(T.wnfLetzterTagVormonat())
dn = 'kreis_vormonat'
return self.htmldiagrammVonBis(aVon, aBis, dn)
def htmldiagrammLetzte12Monate(self):
aVon = T.wnfDateToSQL(T.wnfErsterVor12Monaten())
aBis = T.wnfDateToSQL(T.wnfHeute())
dn = 'kreis_12Monate'
return self.htmldiagrammVonBis(aVon, aBis, dn)
def htmldiagrammDieserMonat(self):
aVon = T.wnfDateToSQL(T.wnfErsterDieserMonat())
aBis = T.wnfDateToSQL(T.wnfLetzterDieserMonat())
dn = 'kreis_diesermonat'
return self.htmldiagrammVonBis(aVon, aBis, dn)
def csvKontoVerlauf(self, dn):
# Die Datei wird nur alle Minute neu geschrieben
if os.path.exists(dn):
if (time.time() - os.path.getmtime(dn) < 60):
return
os.remove(dn)
print(dn)
with open(dn, 'x') as out:
s = 'Datum,Kontostand'
out.write(s + '\n')
# alle Monate
aSQL = 'SELECT MIN(E.DATUM),MAX(E.DATUM) FROM KO_KUBEA E'
cur = self.sqlOpen(aSQL)
if (cur == None):
return
for row in cur:
aVon = row[0]
aBis = row[1]
while (aVon < aBis):
aVon = T.ersterNaechsterMonat(aVon)
# print(aVon)
aSQL = """
SELECT
SUM(E.BETRAG)
FROM KO_KUBEA E
WHERE E.DATUM < '%s'
""" % (aVon)
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return
for row in cur:
betrag = row[0]
s = aVon.strftime("%Y/%m/%d")
s = "%s,%s" % (s, betrag)
print(s)
with open(dn, 'a') as out:
out.write(s + '\n')
return
def listeAlleJahreEA(self):
aSumme = 0
aAnzJahre = 0
aSQL = """
SELECT
EXTRACT(YEAR FROM E.DATUM) AS JAHR,
SUM(E.BETRAG),
SUM(CASE WHEN E.BETRAG > 0 THEN E.BETRAG END) AS Einnahme,
SUM(CASE WHEN E.BETRAG < 0 THEN E.BETRAG END) AS Ausgabe
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
GROUP BY EXTRACT(YEAR FROM E.DATUM)
ORDER BY 1 DESC
"""
# print aSQL
cur = self.sqlOpen(aSQL)
if (cur == None):
return aSumme, []
ea = []
for row in cur:
s = "%s | %20s" % (row[0], T.sDM(row[1]))
# print s
k = {'jahr': row[0],
'betrag': row[1],
'sDM': T.sDM(row[1]),
'sDME': T.sDM(row[2]),
'sDMA': T.sDM(row[3])
}
aSumme = aSumme + row[1]
aAnzJahre = aAnzJahre + 1
# print k
ea.append(k)
if aAnzJahre > 0:
aSumme = aSumme / aAnzJahre
return aAnzJahre, aSumme, ea
def listeKostenartVonBis(self, aVon, aBis):
aSQL = """
SELECT ABS(SUM(E.BETRAG)),K.KURZ,K.ID
FROM KO_KUBEA E
LEFT JOIN KO_KUBKST K ON K.ID=E.KST_ID
WHERE E.IGNORIEREN = 0
AND NOT E.KST_ID IS NULL
AND E.BETRAG < 0
AND E.DATUM BETWEEN %s AND %s
GROUP BY K.KURZ,K.ID
ORDER BY 2
"""
aSQL = aSQL % (aVon, aBis)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return [], []
ea = []
kst = []
aRecord = []
aSumme = 0
for row in cur:
# s = "%s | %20s" % (row[0], T.sDM(row[1]))
# print s
ea.append(round(row[0]))
aSumme = aSumme + row[0]
kst.append(row[1])
k = {'betrag': row[0],
'sDM': T.sDM(row[0]),
'kurz': row[1],
'ID': row[2]
}
# print(aSumme)
aRecord.append(k)
return aSumme, ea, kst, aRecord
def analyseAusgabenVonBis10Prozent(self, aKst_ID, aKst_Kurz, aVon, aBis, a10Prozent):
aSQL = """
SELECT SUM(ABS(E.BETRAG)),K.KURZ,K.ID
FROM KO_KUBEA E
LEFT JOIN KO_KUBKAT K ON K.ID=E.KAT_ID
WHERE E.IGNORIEREN = 0
AND E.KST_ID = %d
AND E.BETRAG < 0
AND E.DATUM BETWEEN %s AND %s
GROUP BY K.KURZ,K.ID
ORDER BY 2
"""
aSQL = aSQL % (aKst_ID, aVon, aBis)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return []
aRec = []
aRest = 0
for row in cur:
if (row[0] > a10Prozent):
x = {'betrag': row[0],
'sDM': T.sDM(row[0]),
'kurz': '%s - %s' % (aKst_Kurz, row[1]),
'ID': aKst_ID
}
aRec.append(x)
else:
aRest = aRest + row[0]
if aRest > 0:
x = {'betrag': aRest,
'sDM': T.sDM(aRest),
'kurz': '%s' % (aKst_Kurz),
'ID': aKst_ID
}
aRec.append(x)
return aRec
def analyseAusgabenVonBis(self, aVon, aBis):
"""
Alle EA bis 10 % zusammenfassen
"""
aSQL = """
SELECT ABS(SUM(E.BETRAG)),K.KURZ,K.ID
FROM KO_KUBEA E
LEFT JOIN KO_KUBKST K ON K.ID=E.KST_ID
WHERE E.IGNORIEREN = 0
AND NOT E.KST_ID IS NULL
AND E.BETRAG < 0
AND E.DATUM BETWEEN %s AND %s
GROUP BY K.KURZ,K.ID
ORDER BY 2
"""
aSQL = aSQL % (aVon, aBis)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return 0, [], [], []
ea = []
kst = []
aRecord = []
aRecKst = []
aSumme = 0
for row in cur:
# s = "%s | %20s" % (row[0], T.sDM(row[1]))
# print s
k = {'betrag': row[0],
'kurz': row[1],
'KST_ID': row[2]
}
aSumme = aSumme + row[0]
# print(aSumme)
aRecKst.append(k)
a10Prozent = aSumme / 20
print(aSumme, a10Prozent)
for k in aRecKst:
if (k['betrag'] < a10Prozent):
x = {'betrag': k['betrag'],
'sDM': T.sDM(k['betrag']),
'kurz': k['kurz'],
'ID': k['KST_ID']
}
aRecord.append(x)
else:
rx = self.analyseAusgabenVonBis10Prozent(k['KST_ID'], k['kurz'], aVon, aBis, a10Prozent)
for x in rx:
aRecord.append(x)
print(aRecord)
for x in aRecord:
ea.append(round(x['betrag']))
kst.append(x['kurz'])
print(kst)
print(ea)
self.closeConnection()
return aSumme, ea, kst, aRecord
def analyseEAMonatlich(self):
"""
Alle EA mit Monatlich <> 0 zusammenfassen
"""
aSQL = """
SELECT SUM(E.BETRAG),K.KURZ,K.ID,E.MONATLICH,COUNT(*)
FROM KO_KUBEA E
LEFT JOIN KO_KUBKST K ON K.ID=E.KST_ID
WHERE E.IGNORIEREN = 0
AND E.MONATLICH <> 0
AND NOT E.KST_ID IS NULL
GROUP BY K.KURZ,K.ID,E.MONATLICH
ORDER BY 2
"""
aSQL = """
SELECT ABS(SUM(E.BETRAG)),E.KURZ,E.MONATLICH,COUNT(*)
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
AND E.MONATLICH <> 0
AND E.BETRAG<0
GROUP BY E.KURZ,E.MONATLICH
ORDER BY 1,2
"""
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return 0, [], [], []
aRecord = []
aSumme = 0
for row in cur:
# s = "%s | %20s" % (row[0], T.sDM(row[1]))
# print s
aDurchschnitt = row[0] / row[2] / row[3]
k = {'betrag': row[0],
'kurz': row[1],
'monatlich': row[2],
'anzahl': row[3],
'durchschnitt': aDurchschnitt
}
aSumme = aSumme + aDurchschnitt
# print(aSumme, k)
aRecord.append(k)
return aSumme, aRecord
def listeAlleMonateEA(self):
aSumme = 0
aAnzMonate = 0
aSQL = """
SELECT
EXTRACT(YEAR FROM E.DATUM) AS JAHR,
EXTRACT(MONTH FROM E.DATUM) AS MONAT,
SUM(E.BETRAG),
SUM(CASE WHEN E.BETRAG > 0 THEN E.BETRAG END) AS Einnahme,
SUM(CASE WHEN E.BETRAG < 0 THEN E.BETRAG END) AS Ausgabe
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
GROUP BY EXTRACT(YEAR FROM E.DATUM),EXTRACT(MONTH FROM E.DATUM)
ORDER BY 1 DESC,2 DESC
"""
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return aAnzMonate, aSumme, []
ea = []
for row in cur:
s = "%s/%s | %20s" % (row[0],row[1], T.sDM(row[2]))
# print(s)
k = {'jahr': row[0],
'monat': row[1],
'betrag': row[2],
'sDM': T.sDM(row[2]),
'sDME': T.sDM(row[3]),
'sDMA': T.sDM(row[4])
}
aSumme = aSumme + row[1]
aAnzMonate = aAnzMonate + 1
# print(k)
ea.append(k)
if aAnzMonate > 0:
aSumme = aSumme / aAnzMonate
return aAnzMonate, aSumme, ea
def openKontoverlauf(self):
aSQL = """
SELECT
EXTRACT(YEAR FROM E.DATUM) AS JAHR,
EXTRACT(MONTH FROM E.DATUM) AS MONAT,
SUM(E.BETRAG)
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
GROUP BY EXTRACT(YEAR FROM E.DATUM),EXTRACT(MONTH FROM E.DATUM)
ORDER BY 1,2
"""
# print(aSQL)
return self.sqlOpen(aSQL)
def openAlleMonateEinkommen(self):
aSQL = """
SELECT
FIRST 240
EXTRACT(YEAR FROM E.DATUM) AS JAHR,
EXTRACT(MONTH FROM E.DATUM) AS MONAT,
SUM(E.BETRAG),
SUM(CASE WHEN E.KAT_ID = 22 THEN E.BETRAG END) AS Uwe,
SUM(CASE WHEN E.KAT_ID = 24 THEN E.BETRAG END) AS Sabine
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
AND E.BETRAG > 1000
AND E.KST_ID = 11
AND E.KAT_ID IN (22,24)
GROUP BY EXTRACT(YEAR FROM E.DATUM),EXTRACT(MONTH FROM E.DATUM)
ORDER BY 1 DESC,2 DESC
"""
# print(aSQL)
return self.sqlOpen(aSQL)
def openAlleJahreEinkommen(self):
aSQL = """
SELECT
EXTRACT(YEAR FROM E.DATUM) AS JAHR,
SUM(E.BETRAG),
SUM(CASE WHEN E.KAT_ID = 22 THEN E.BETRAG END) AS Uwe,
SUM(CASE WHEN E.KAT_ID = 24 THEN E.BETRAG END) AS Sabine
FROM KO_KUBEA E
WHERE E.IGNORIEREN = 0
AND E.KST_ID = 11
AND E.KAT_ID IN (22,24)
GROUP BY EXTRACT(YEAR FROM E.DATUM)
ORDER BY 1 DESC,2 DESC
"""
# print(aSQL)
return self.sqlOpen(aSQL)
def chartjsKontoverlauf(self):
aLabels = ''
aDaten = ''
# alle Monate
aSQL = 'SELECT MIN(E.DATUM),MAX(E.DATUM) FROM KO_KUBEA E'
cur = self.sqlOpen(aSQL)
if (cur == None):
return aLabels, aDaten
for row in cur:
aVon = row[0]
aBis = row[1]
while (aVon < aBis):
aVon = T.ersterNaechsterMonat(aVon)
# print(aVon)
aSQL = """
SELECT
SUM(E.BETRAG)
FROM KO_KUBEA E
WHERE E.DATUM < '%s'
""" % (aVon)
# print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return aLabels, aDaten
for row in cur:
betrag = row[0]
s = aVon.strftime("%m/%Y")
print(s)
if aLabels != '':
aLabels = aLabels + ', '
aLabels = ("%s'%s'") % (aLabels, s)
if aDaten != '':
aDaten = aDaten + ', '
aDaten = ("%s %s") % (aDaten, betrag)
return aLabels, aDaten
def chartjsAlleMonateEinkommen(self):
"""
aLabels = "'Jan', 'Feb', 'Mar'"
aEKU = " 1000 , 1500 , 2000"
aEKS = " 4000 , 5000 , 6000"
"""
aLabels = ''
aEKU = ''
aEKS = ''
cur = self.openAlleMonateEinkommen()
if (cur == None):
return aLabels, aEKU, aEKS
for row in cur:
if aLabels != '':
aLabels = ', ' + aLabels
aLabels = ("'%s/%s'%s") % (row[1], row[0], aLabels)
if aEKU != '':
aEKU = ', ' + aEKU
aEKU = ("%s %s") % (row[3], aEKU)
if aEKS != '':
aEKS = ', ' + aEKS
aEKS = ("%s %s") % (row[4], aEKS)
return aLabels, aEKU, aEKS
def chartjsAlleJahreEinkommen(self):
"""
aLabels = "'Jan', 'Feb', 'Mar'"
aEKU = " 1000 , 1500 , 2000"
aEKS = " 4000 , 5000 , 6000"
"""
aLabels = ''
aEKU = ''
aEKS = ''
cur = self.openAlleJahreEinkommen()
if (cur == None):
return aLabels, aEKU, aEKS
for row in cur:
if aLabels != '':
aLabels = ', ' + aLabels
aLabels = ("'%s'%s") % (row[0], aLabels)
if aEKU != '':
aEKU = ', ' + aEKU
aEKU = ("%s %s") % (row[2], aEKU)
if aEKS != '':
aEKS = ', ' + aEKS
aEKS = ("%s %s") % (row[3], aEKS)
return aLabels, aEKU, aEKS
def listeAlleMonateEinkommen(self):
aSumme = 0
aAnzMonate = 0
cur = self.openAlleMonateEinkommen()
if (cur == None):
return aAnzMonate, aSumme, []
ea = []
for row in cur:
s = "%s | %20s" % (row[0], T.sDM(row[1]))
# print s
k = {'jahr': row[0],
'monat': row[1],
'betrag': row[2],
'sDMG': T.sDM(row[2]),
'sDMU': T.sDM(row[3]),
'sDMS': T.sDM(row[4])
}
aSumme = aSumme + row[2]
aAnzMonate = aAnzMonate + 1
# print k
ea.append(k)
if aAnzMonate > 0:
aSumme = aSumme / aAnzMonate
return aAnzMonate, aSumme, ea
def diagrammKostenartVonBis(self, aPfad, aDateiname, aData, aLabels):
d = Drawing(800, 800)
pie = Pie()
pie.x = 360
pie.y = 360
pie.xradius = 300
pie.yradius = 300
pie.data = aData
pie.labels = aLabels
pie.slices.strokeWidth = 0.5
# pie.slices[3].popout = 20
d.add(pie)
d.save(formats=['png'], outDir=aPfad, fnRoot=aDateiname)
def diagrammAlleJahreEA(self, aPngDateiname):
# Festlegen der Gesamtgröße in Pixel
d = Drawing(800, 600)
# Anlegen des Diagramms
diagramm = VerticalBarChart()
# Positionierung und Größe des Diagramms
diagramm.x = 50
diagramm.y = 50
diagramm.width = 700
diagramm.height = 500
# Holen der Daten
daten = []
jahre = []
aAnzJahre, aSumme, ea = self.listeAlleJahreEA()
print(ea)
for x in ea:
print
x['betrag'], x['jahr']
daten.append(float(x['betrag']))
jahre.append(str(x['jahr']))
ymin = min(daten)
ymax = max(daten)
# Daten für das Diagramm müssen als Liste von Tupeln vorliegen
daten = [tuple(daten)]
print(daten)
print(jahre)
# return False
# Hinzufügen der Daten
diagramm.data = daten
# Y-Achse (in ReportLab „valueAxis“) formatieren
diagramm.valueAxis.valueMin = ymin
diagramm.valueAxis.valueMax = ymax
diagramm.valueAxis.valueStep = 2000
# X-Achse (in ReportLab „categoryAxis“) formatieren
diagramm.categoryAxis.categoryNames = jahre
# Diagramm zeichnen
d.add(diagramm)
# ... und speichernhttp://www.reportlab.com/software/opensource/rl-toolkit/guide/
print(aPngDateiname)
renderPM.drawToFile(d, aPngDateiname, 'PNG')
def diagrammAlleMonateEinkommen(self, aPngDateiname):
# Festlegen der Gesamtgröße in Pixel
d = Drawing(800, 600)
# Anlegen des Diagramms
diagramm = VerticalBarChart()
# Positionierung und Größe des Diagramms
diagramm.x = 50
diagramm.y = 50
diagramm.width = 700
diagramm.height = 500
# Holen der Daten
daten = []
jahre = []
aAnzJahre, aSumme, ea = self.listeAlleMonateEinkommen()
print(ea)
for x in ea:
# print (x)
x['betrag'], x['jahr']
daten.append(float(x['betrag']))
jahre.append(str(x['jahr']))
ymin = min(daten)
ymax = max(daten)
# Daten für das Diagramm müssen als Liste von Tupeln vorliegen
daten = [tuple(daten)]
print(daten)
print(jahre)
# return False
# Hinzufügen der Daten
diagramm.data = daten
# Y-Achse (in ReportLab „valueAxis“) formatieren
diagramm.valueAxis.valueMin = ymin
diagramm.valueAxis.valueMax = ymax
diagramm.valueAxis.valueStep = 2000
# X-Achse (in ReportLab „categoryAxis“) formatieren
diagramm.categoryAxis.categoryNames = jahre
# Diagramm zeichnen
d.add(diagramm)
# ... und speichernhttp://www.reportlab.com/software/opensource/rl-toolkit/guide/
print(aPngDateiname)
renderPM.drawToFile(d, aPngDateiname, 'PNG')
def jsonAlleJahreEA(self):
aSumme, ea = self.listeAlleJahreEA()
j = {'summe': T.sDM(aSumme), 'ea': ea}
return j
def htmlAlleJahreEA(self):
aAnzahl, aSumme, ea = self.listeAlleJahreEA()
s = ''
for l in ea:
jahr = l['jahr']
# print type(konto),konto
betrag = l['betrag']
sSaldo = l['sDM']
sDME = l['sDME']
sDMA = l['sDMA']
if (betrag < 0):
aKlasse = 'class=table-right-currency-red'
else:
aKlasse = 'class=table-right-currency'
# print type(konto),konto
s = '%s <tr><td class=table-left>%s</td><td class=table-right-currency>%s</td><td class=table-right-currency>%s</td><td %s>%s</td></tr>' % (
s, jahr, sDME, sDMA, aKlasse, sSaldo)
return ("<table>"
"<tr><th class=table-left>Jahr</th><th class=table-right-currency>Einnahmen</th><th class=table-right-currency>Ausgaben</th><th class=table-right-currency>Saldo</th></tr>"
"%s"
"<tr><th class=table-left>Durchschnitt für %d Jahre</th><th class=table-right-currency></th><th class=table-right-currency></th><th class=table-right-currency>%s</th></tr>"
"</table>") % (s, aAnzahl, T.sDM(aSumme))
return ("<table>"
"<tr><th class=table-left>Jahr</th><th class=table-right-currency>Saldo</th></tr>"
"%s"
"<tr><th class=table-left>Durchschnitt</th><th class=table-right-currency>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
def htmlAlleMonateEA(self):
aAnzahl, aSumme, ea = self.listeAlleMonateEA()
# print(type(ea))
s = ''
for l in ea:
# print(l)
monat = "%2d/%d" % (l['monat'], l['jahr'])
betrag = l['betrag']
sSaldo = l['sDM']
sDME = l['sDME']
sDMA = l['sDMA']
if (betrag < 0):
aKlasse = 'class=table-right-currency-red'
else:
aKlasse = 'class=table-right-currency'
# print type(konto),konto
sDME = '<a href="monatea/%d/%d/">%s</a>' % (l['jahr'], l['monat'], sDME)
s = '%s <tr>' \
'<td class=table-left>%s</td>' \
'<td %s>%s</td>' \
'<td class=table-right-currency>%s</td>' \
'<td class=table-right-currency>%s</td>' \
'</tr>' \
% (s, monat, aKlasse, sSaldo, sDME, sDMA)
# print(s)
return ("<table>"
"<tr><th class=table-left>Monat</th><th class=table-right-currency>Saldo</th><th class=table-right-currency>Einnahmen</th><th class=table-right-currency>Ausgaben</th></tr>"
"%s"
"<tr><th class=table-left>Durchschnitt für %d Monate</th><th class=table-right-currency></th><th class=table-right-currency></th><th class=table-right-currency>%s</th></tr>"
"</table>") % (s, aAnzahl, T.sDM(aSumme))
def htmlAlleMonateEinkommen(self):
aAnzahl, aSumme, ea = self.listeAlleMonateEinkommen()
s = ''
for l in ea:
monat = "%2d/%d" % (l['monat'], l['jahr'])
betrag = l['betrag']
sDMG = l['sDMG']
sDMU = l['sDMU']
sDMS = l['sDMS']
if (betrag < 0):
aKlasse = 'class=table-right-currency-red'
else:
aKlasse = 'class=table-right-currency'
# print type(konto),konto
s = '%s <tr><td class=table-left>%s</td><td class=table-right-currency>%s</td><td class=table-right-currency>%s</td><td %s>%s</td></tr>' % (
s, monat, sDMU, sDMS, aKlasse, sDMG)
return ("<table>"
"<tr><th class=table-left>Monat</th><th class=table-right-currency>Uwe</th><th class=table-right-currency>Sabine</th><th class=table-right-currency>Gesamt</th></tr>"
"%s"
"<tr><th class=table-left>Durchschnitt für %d Monate</th><th class=table-right-currency></th><th class=table-right-currency></th><th class=table-right-currency>%s</th></tr>"
"</table>") % (s, aAnzahl, T.sDM(aSumme))
def getProjekt_ID(self, aKurz):
aSQL = "SELECT MAX(ID) FROM KO_KUBPROJEKT P WHERE P.KURZ='%s'"
aSQL = aSQL % (aKurz)
print(aSQL)
cur = self.sqlOpen(aSQL)
if (cur == None):
return 0
else:
for row in cur:
if (row[0]):
return row[0]
else:
return 0
def getProjekt_ID_Wintergarten_2017(self):
return self.getProjekt_ID('Wintergarten 2017')
def htmlProjekt(self, aProjekt_ID):
aSumme, ea = self.listeProjekt(aProjekt_ID)
s = ''
for l in ea:
datum = l['ttmmjj'] # .encode('utf-8')
kurz = l['kurz'] # .encode('utf-8')
betrag = l['betrag']
s = '%s <tr><td class=table-3c-spalte1>%s</td><td class=table-3c-spalte2>%s</td><td class=table-3c-spalte3>%s</td></tr>' % (
s, datum, kurz, betrag)
return ("<table>"
"<tr><th class=table-3c-spalte1>Datum</th><th class=table-3c-spalte2>Bezeichnung</th><th class=table-3c-spalte3>Betrag</th></tr>"
"%s"
"<tr><th class=table-3c-spalte1></th><th class=table-3c-spalte2>Summe</th><th class=table-3c-spalte3>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
def htmlProjektK(self, aProjekt_ID):
aSumme, ea = self.listeProjektK(aProjekt_ID)
s = ''
for l in ea:
datum = l['ttmmjj'] # .encode('utf-8')
kurz = l['kurz'] # .encode('utf-8')
betrag = l['betrag']
s = '%s <tr><td class=table-3c-spalte1>%s</td><td class=table-3c-spalte2>%s</td><td class=table-3c-spalte3>%s</td></tr>' % (
s, datum, kurz, betrag)
return ("<table>"
"<tr><th class=table-3c-spalte1>Datum</th><th class=table-3c-spalte2>Bezeichnung</th><th class=table-3c-spalte3>Betrag</th></tr>"
"%s"
"<tr><th class=table-3c-spalte1></th><th class=table-3c-spalte2>Summe</th><th class=table-3c-spalte3>%s</th></tr>"
"</table>") % (s, T.sDM(aSumme))
def htmlProjektWintergarten2017(self):
aProjekt_ID = self.getProjekt_ID_Wintergarten_2017()
return self.htmlProjekt(aProjekt_ID)
def htmlProjektWintergarten2017K(self):
aProjekt_ID = self.getProjekt_ID_Wintergarten_2017()
return self.htmlProjektK(aProjekt_ID)
def main():
k = dmKonten()
# k.analyseEAMonatlich()
print(k.htmlAlleMonateEA())
# print(k.chartjsAlleMonateEinkommen())
# print(k.chartjsKontoverlauf())
# print k.summeAlleKonten()
# print k.listeAlleKonten()
# print (k.listeAlleMonateEinkommen())
# print k.listeAlleJahreEA()
# print k.listeProjektK(1)
# print k.jsonAlleKonten()
# print k.jsonLetzteEA()
# print k.jsonAlleJahreEA()
# print(k.htmlProjektWintergarten2017())
# print(k.htmldiagrammLetzterMonat())
# print(k.htmldiagrammDieserMonat())
# k.csvKontoVerlauf('/home/wnf/Entwicklung/PycharmProjects/wnfportal/wnfportal_python/www/daten/kontoverlauf.csv')
# k.analyseAusgabenVonBis(
# T.wnfDateToSQL(T.wnfErsterTagVormonat()),
# T.wnfDateToSQL(T.wnfLetzterTagVormonat()))
# k.diagrammKostenartVonBis('/wnfdaten/wnfpython/wnfportal/trunk/src/wnfportal/m/diagramme/', 'kreis_2018_09',
# '01.09.2018', '30.09.2018')
# k.diagrammAlleJahreEA('/wnfdaten/wnfpython/wnfportal/trunk/src/wnfportal/m/diagramme/diagramm_alle_jahre.png')
# k.diagrammAlleJahreEinkommen(
# '/wnfdaten/wnfpython/wnfportal/trunk/src/wnfportal/m/diagramme/diagramm_alle_jahre_ek.png')
return 0
if __name__ == '__main__':
main()
|
[
"dev@wlsoft.de"
] |
dev@wlsoft.de
|
beb223699fadcff443ec1b36fb64cecf67b2359c
|
b5d0a6254b54c0a778181a67bcda14cc6663e871
|
/0-notes/job-search/Cracking the Coding Interview/C10SortingSearching/questions/10.5-question.py
|
5ec618baaa19cdb2c7b27b33ac1bfb9f081b82c6
|
[
"MIT",
"LicenseRef-scancode-public-domain"
] |
permissive
|
Web-Dev-Collaborative/Lambda-Final-Backup
|
113e719a76a144b86d06f3a412afe4b02689cad7
|
e9ab84928faa8364bacd863009ae9aec01ff9d1e
|
refs/heads/master
| 2023-06-07T15:34:00.682815
| 2021-04-17T01:53:14
| 2021-04-17T01:53:14
| 358,899,122
| 0
| 0
|
MIT
| 2023-05-30T04:03:16
| 2021-04-17T14:24:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 318
|
py
|
# Sparse Search
# Given a sorted array of strings that is interspersed with empty strings,
# write a method to find the location of a given string.
# EXAMPLE: INPUT: ball, {"at", "", "", "", "ball", "", "", "car", "", "", "dad", "", ""}
# OUTPUT: 4
# time complexity: O()
# space complexity: O()
|
[
"bryan.guner@gmail.com"
] |
bryan.guner@gmail.com
|
af5d3531a0c3b27b202c1ef66223d898bd77ec13
|
008aada8c0e718e0220eabc5b54732a1e1b07f97
|
/sergeant/connector/_connector.py
|
ee1985d5cf05a1683d5b4b588c6a582648b9599b
|
[
"MIT"
] |
permissive
|
gabriel-yahav/sergeant
|
59259a92c4c072e317d82022f19b440b21d2c294
|
0de9bfb4fdca62f061d6588c6839c4491c5d4f9b
|
refs/heads/master
| 2022-09-30T04:38:48.414842
| 2020-05-26T10:28:50
| 2020-05-26T10:28:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,931
|
py
|
import typing
class Lock:
def acquire(
self,
timeout: typing.Optional[float] = None,
check_interval: float = 1.0,
ttl: int = 60,
) -> bool:
raise NotImplementedError()
def release(
self,
) -> bool:
raise NotImplementedError()
def is_locked(
self,
) -> bool:
raise NotImplementedError()
def set_ttl(
self,
ttl: int,
) -> bool:
raise NotImplementedError()
def get_ttl(
self,
) -> typing.Optional[int]:
raise NotImplementedError()
class Connector:
name: str
def key_set(
self,
key: str,
value: bytes,
) -> bool:
raise NotImplementedError()
def key_get(
self,
key: str,
) -> typing.Optional[bytes]:
raise NotImplementedError()
def key_delete(
self,
key: str,
) -> bool:
raise NotImplementedError()
def queue_pop(
self,
queue_name: str,
) -> typing.Optional[bytes]:
raise NotImplementedError()
def queue_pop_bulk(
self,
queue_name: str,
number_of_items: int,
) -> typing.List[bytes]:
raise NotImplementedError()
def queue_push(
self,
queue_name: str,
item: bytes,
priority: str = 'NORMAL',
) -> bool:
raise NotImplementedError()
def queue_push_bulk(
self,
queue_name: str,
items: typing.Iterable[bytes],
priority: str = 'NORMAL',
) -> bool:
raise NotImplementedError()
def queue_length(
self,
queue_name: str,
) -> int:
raise NotImplementedError()
def queue_delete(
self,
queue_name: str,
) -> bool:
raise NotImplementedError()
def lock(
self,
name: str,
) -> Lock:
raise NotImplementedError()
|
[
"gal@intsights.com"
] |
gal@intsights.com
|
f8b918dbc080c727941fe32353727591500f3f2d
|
5c61851a03dd1ac98d03c2e98f27487f188ff00f
|
/{{cookiecutter.repo_name}}/manage.py
|
13bffdcfd10dc0e98343059f47512923a6698335
|
[
"BSD-3-Clause"
] |
permissive
|
tony/cookiecutter-flask-pythonic
|
e7208a8fc9ccbde10e541f8e657dbf4da7b388b3
|
d1274ec5d5b72cab128e593ed78de88c29bd54b5
|
refs/heads/master
| 2023-05-29T20:49:21.927268
| 2021-10-05T12:39:04
| 2021-10-05T12:39:04
| 35,064,692
| 39
| 4
| null | 2023-05-01T21:06:54
| 2015-05-04T22:52:20
|
Python
|
UTF-8
|
Python
| false
| false
| 1,203
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from flask_script import Manager
from {{ cookiecutter.repo_name }} import {{ cookiecutter.repo_name | capitalize }}
"""If not using Flask-Script::
app = {{ cookiecutter.repo_name | capitalize }}.from_cli(sys.argv[1:])
Does the trick for retrieving an application object using
pure argparse. But let's hook into Flask-Script's CLI argparse
instance.
"""
def app_wrapper(*args, **kwargs):
"""App factory returns the :class:`flask.Flask` via ``__call__``,
but because of the way :class:`flask_script.Manager` handles
accepting app objects, this wrapper returns the flask object directly.
:returns: Flask object build from CLI
:rtype: :class:`flask.Flask`
"""
return {{ cookiecutter.repo_name | capitalize }}.from_file(*args, **kwargs).app
manager = Manager(app_wrapper)
manager.add_option('-c', '--config', dest='config', required=False)
@manager.command
def run_server(*args, **kwargs):
{{ cookiecutter.repo_name | capitalize }}.from_file().run()
@manager.command
def testing(*args, **kwargs):
print('Run "./run-tests.py" or "python setup.py test".')
if __name__ == "__main__":
run_server()
|
[
"tony@git-pull.com"
] |
tony@git-pull.com
|
52361d8d4a6f74b1bd33bc572eee6582ff87237f
|
92a34017b2c604e3e1d5af6347f98cd05e8d0cb7
|
/orders/urls.py
|
3fbadc5750edf20f37089f51c4735544a62a7205
|
[] |
no_license
|
dandani-cs/swishtest
|
76b7c2e04193c749bef1b31e24fdd53a61707341
|
983e07930265690918d9d60b7ca972c2547d95bf
|
refs/heads/master
| 2023-02-18T03:25:23.652843
| 2020-12-12T12:54:15
| 2020-12-12T12:54:15
| 316,511,893
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 240
|
py
|
from django.urls import path
from .views import OrderListView, OrderCreateView
urlpatterns = [
path("", OrderListView.as_view(), name="order_view"),
path("new", OrderCreateView.as_view(), name="order_new")
]
|
[
"dani.cstech@gmail.com"
] |
dani.cstech@gmail.com
|
5b645591d5f4113d5f9bc03b5fb8121abefcbe03
|
47c12b42843d08a10655006fba6fb0c662e7bf62
|
/yonep.py
|
92b1c822b8ae4f9d036e2f38a53af06c6ffd61e1
|
[] |
no_license
|
easthgs/git-test
|
6325a8ba1bc77bec6a4b2d54fc84d08498d4b67e
|
34de227bbf017f6b78b5bcf2cd4a7a47069c797a
|
refs/heads/master
| 2021-01-10T12:23:10.331150
| 2016-02-20T17:55:13
| 2016-02-20T17:55:13
| 52,128,190
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from flask import Flask, render_template, url_for, redirect, request
app = Flask(__name__)
@app.route('/')
@app.route('/<name>')
def yo(name=None):
return render_template('yo.html', name=name)
@app.route('/add', methods=['POST'])
def add():
return redirect(url_for('yo', name=request.form['name']))
if __name__ == '__main__':
app.run(debug=True)
|
[
"eastnep@yahoo.co.jp"
] |
eastnep@yahoo.co.jp
|
b9d3d581f68fcf8fdc2b060485db026dfba0d13f
|
7cb7553a04ce0eace07f331a493b96f2bdb2fd26
|
/week13_H.W/IDS_20200529-8.py
|
fbc674aded1485154d1c326dc00a9f40c3df7241
|
[] |
no_license
|
hjkyuit1226/Introduction-to-DS
|
2308f0780e2c6944f98ba3c0a559bca96b23bfbf
|
1cf26d1c7b080bb518f2f131c26091461a844fd3
|
refs/heads/master
| 2021-02-19T15:32:10.344138
| 2020-06-06T19:04:13
| 2020-06-06T19:04:13
| 245,314,930
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 158
|
py
|
def get_fahrenheit(x):
"""
Transform a Celsius degree into Farenheit scale
"""
f = x*9/5 + 32
return f
print(get_fahrenheit(25))
|
[
"noreply@github.com"
] |
hjkyuit1226.noreply@github.com
|
c85864db8a16d494aabf4adc4a5f186176ac2e22
|
1cd904a429a2392f47762a037e7ac8545cd673f1
|
/homework_hse/homework03/homework03.py
|
79b8c6afc95b99b94d14f1dbddd140ce4a529c33
|
[] |
no_license
|
annnyway/homework
|
e0bce8af87fc6100f20ce736e7082721005f4842
|
4fd3aa4279560d7a56af99d24636128e520945df
|
refs/heads/master
| 2021-07-12T16:06:44.210852
| 2019-03-03T20:40:05
| 2019-03-03T20:40:05
| 148,026,613
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,018
|
py
|
import random
def adjective():
with open('adjectives.tsv', 'r') as f:
adjectives = f.read().split()
return random.choice(adjectives)
def noun():
with open('nouns.tsv', 'r') as f:
nouns = f.read().split()
return random.choice(nouns)
def transitive_verb():
with open('trans_verbs.tsv', 'r') as f:
verbs = f.read().split()
return random.choice(verbs)
def intransitive_verb():
with open('intrans_verbs.tsv', 'r') as f:
verbs = f.read().split()
return random.choice(verbs)
def transitive_verb_with_s():
with open('trans_verbs.tsv', 'r') as f:
verbs = f.read().split()
verb = random.choice(verbs)
ends = ['ss', 'x', 'z', 'ch', 'sh']
vowels = 'aeiou'
if verb.endswith('y') and verb[-2] not in vowels:
verb = verb[0:-1] + 'ies'
elif verb.endswith(tuple(ends)):
verb += 'es'
else:
verb += 's'
return verb
def intransitive_verb_with_s():
with open('intrans_verbs.tsv', 'r') as f:
verbs = f.read().split()
verb = random.choice(verbs)
ends = ['ss', 'x', 'z', 'ch', 'sh']
vowels = 'aeiou'
if verb.endswith('y') and verb[-2] not in vowels:
verb = verb[0:-1] + 'ies'
elif verb.endswith(tuple(ends)):
verb = verb + 'es'
else:
verb = verb + 's'
return verb
def adverb():
with open('adverbs.tsv', 'r') as f:
adverbs = f.read().split()
return random.choice(adverbs)
def comparative_adjective():
with open('comparative_adjectives.tsv', 'r') as f:
adjectives = f.read().split()
return random.choice(adjectives)
def affirmative_sentence():
sentence = adjective().capitalize() + ' ' + noun() + ' ' + transitive_verb_with_s() +\
' ' + noun() + ' that ' + intransitive_verb_with_s() + ' ' + adverb() + '.'
return sentence
def interrogative_sentence():
sentence = 'Did ' + noun() + ' ' + transitive_verb() + ' ' + adjective() + ' ' +\
noun() + ' ' + 'yesterday?'
return sentence
def negative_sentence():
sentence = adjective().capitalize() + ' ' + noun() + ' will not ' + transitive_verb() +\
' ' + noun() + '.'
return sentence
def conditional_sentence():
sentence = 'If ' + noun() + ' ' + transitive_verb_with_s() + ' ' + noun() + ', ' +\
noun() + ' will ' + intransitive_verb() + ' ' + comparative_adjective() + '.'
return sentence
def imperative_sentence():
sentence = transitive_verb().capitalize() + ' the ' + adjective() + ' ' + noun() + ', please.'
return sentence
def random_sentences():
sentences = [affirmative_sentence(), interrogative_sentence(), negative_sentence(), conditional_sentence(),
imperative_sentence()]
random.shuffle(sentences)
return sentences
def main():
with open('random_sentences.txt', 'w') as f:
for sent in random_sentences():
f.write(sent + '\n')
return 0
if __name__ == '__main__':
main()
|
[
"nuticbooms@gmail.com"
] |
nuticbooms@gmail.com
|
c40f6a5a4f47f0180df1f9da49f262d8982ae040
|
728bd80edb381d74b5d19b0d2bbd5f0c7e00412e
|
/partenaire/admin.py
|
e4d1b140508662c013b2e393f60d3fdb997f44dc
|
[] |
no_license
|
guilavogui24/applisigre
|
16b45b4448975c7872223b911745ca02d8c64a84
|
39572782b2646543129208503f8c47a5813223ce
|
refs/heads/master
| 2023-07-12T06:31:54.882558
| 2021-08-06T13:28:55
| 2021-08-06T13:28:55
| 393,380,589
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 540
|
py
|
from django.contrib import admin
from .models import Partenaire
from .models import TypePartenaire
from .models import FormeJuridique
from .models import Categories
from .models import SousCategorie
class SousCategorieAdmin(admin.ModelAdmin):
list_display = ('nom', 'Categories')
search_fields = ['nom']
# Register your models here.
admin.site.register(Partenaire)
admin.site.register(TypePartenaire)
admin.site.register(FormeJuridique)
admin.site.register(Categories)
admin.site.register(SousCategorie, SousCategorieAdmin)
|
[
"guilavoguijoseph@gmail.com"
] |
guilavoguijoseph@gmail.com
|
fc751a4c8a4c39cb45786c720a5ed4aa6a9bfb76
|
5cd9518f9a869a355019c743a5c2e65b0f3c50ba
|
/problem4.py
|
a96d8f797444928c1432d958ac34449608efcd17
|
[] |
no_license
|
astan54321/PA3
|
0d2ce6a1e9bce4ae9cafcedb930459e6b0bf66ee
|
13d4048c12ea9b42cf9990389cf7894b2430cb3c
|
refs/heads/master
| 2023-01-06T09:51:21.428543
| 2020-10-15T08:17:30
| 2020-10-15T08:17:30
| 303,866,798
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,484
|
py
|
from mrjob.job import MRJob
from mrjob.step import MRStep
import re
WORD_RE = re.compile(r"[\w']+")
class MRMostUsedWord(MRJob):
def steps(self):
return [
MRStep(mapper=self.mapper_get_words,
combiner=self.combiner_count_words,
reducer=self.reducer_count_words),
MRStep(reducer=self.reducer_find_max_word)
]
def mapper_get_words(self, _, line):
# yield each word in the line
for word in WORD_RE.findall(line):
word = word.lower()
yield (word, 1)
def combiner_count_words(self, word, counts):
# optimization: sum the words we've seen so far
yield (word, sum(counts))
def reducer_count_words(self, word, counts):
# send all (num_occurrences, word) pairs to the same reducer.
# num_occurrences is so we can easily use Python's max() function.
yield None, (sum(counts), word)
# discard the key; it is just None
def reducer_find_max_word(self, _, word_count_pairs):
# each item of word_count_pairs is (count, word),
# so yielding one results in key=counts, value=word
words = []
for word in word_count_pairs:
words.append(word)
for i in range(10):
temp_max = max(words)
yield temp_max
words.remove(temp_max)
if __name__ == '__main__':
MRMostUsedWord.run()
|
[
"astan54321@gmail.com"
] |
astan54321@gmail.com
|
aa3069e85491124d364115e57d1a97e1ff6dbda7
|
e2589896ad0e629d933f1e9e03f9963eb922664a
|
/backend/cool_dust_27675/wsgi.py
|
297564f38beadc76f1ea37eeabd22b393dcbc0c4
|
[] |
no_license
|
crowdbotics-apps/cool-dust-27675
|
89b947ddd6c87d70febeb2af15ffab3706b6cc13
|
f2fa1d6f4206955173a2ebf1b0f824ee5d184d1a
|
refs/heads/master
| 2023-05-08T10:46:07.382608
| 2021-06-02T06:44:22
| 2021-06-02T06:44:22
| 373,066,266
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 407
|
py
|
"""
WSGI config for cool_dust_27675 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cool_dust_27675.settings')
application = get_wsgi_application()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
b12892a96f4b48796a35f6700c11b1ce1875c2cf
|
94c8dd4126da6e9fe9acb2d1769e1c24abe195d3
|
/test/python/circuit/library/test_phase_estimation.py
|
8bf3d15d9ea0a395cd1d2ede7c122fdb666605b4
|
[
"Apache-2.0"
] |
permissive
|
levbishop/qiskit-terra
|
a75c2f96586768c12b51a117f9ccb7398b52843d
|
98130dd6158d1f1474e44dd5aeacbc619174ad63
|
refs/heads/master
| 2023-07-19T19:00:53.483204
| 2021-04-20T16:30:16
| 2021-04-20T16:30:16
| 181,052,828
| 1
| 0
|
Apache-2.0
| 2019-06-05T15:32:13
| 2019-04-12T17:20:54
|
Python
|
UTF-8
|
Python
| false
| false
| 5,238
|
py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test library of phase estimation circuits."""
import unittest
import numpy as np
from qiskit.test.base import QiskitTestCase
from qiskit import BasicAer, execute
from qiskit.circuit import QuantumCircuit
from qiskit.circuit.library import PhaseEstimation, QFT
from qiskit.quantum_info import Statevector
class TestPhaseEstimation(QiskitTestCase):
"""Test the phase estimation circuit."""
def assertPhaseEstimationIsCorrect(self, pec: QuantumCircuit, eigenstate: QuantumCircuit,
phase_as_binary: str):
r"""Assert that the phase estimation circuit implements the correct transformation.
Applying the phase estimation circuit on a target register which holds the eigenstate
:math:`|u\rangle` (say the last register), the final state should be
.. math::
|\phi_1\rangle \cdots |\phi_t\rangle |u\rangle
where the eigenvalue is written as :math:`e^{2\pi i \phi}` and the angle is represented
in binary fraction, i.e. :math:`\phi = 0.\phi_1 \ldots \phi_t`.
Args:
pec: The circuit implementing the phase estimation circuit.
eigenstate: The eigenstate as circuit.
phase_as_binary: The phase of the eigenvalue in a binary fraction. E.g. if the
phase is 0.25, the binary fraction is '01' as 0.01 = 0 * 0.5 + 1 * 0.25 = 0.25.
"""
# the target state
eigenstate_as_vector = Statevector.from_instruction(eigenstate).data
reference = eigenstate_as_vector
zero, one = [1, 0], [0, 1]
for qubit in phase_as_binary[::-1]:
reference = np.kron(reference, zero if qubit == '0' else one)
# the simulated state
circuit = QuantumCircuit(pec.num_qubits)
circuit.compose(eigenstate,
list(range(pec.num_qubits - eigenstate.num_qubits, pec.num_qubits)),
inplace=True)
circuit.compose(pec, inplace=True)
# TODO use Statevector for simulation once Qiskit/qiskit-terra#4681 is resolved
# actual = Statevector.from_instruction(circuit).data
backend = BasicAer.get_backend('statevector_simulator')
actual = execute(circuit, backend).result().get_statevector()
np.testing.assert_almost_equal(reference, actual)
def test_phase_estimation(self):
"""Test the standard phase estimation circuit."""
with self.subTest('U=S, psi=|1>'):
unitary = QuantumCircuit(1)
unitary.s(0)
eigenstate = QuantumCircuit(1)
eigenstate.x(0)
# eigenvalue is 1j = exp(2j pi 0.25) thus phi = 0.25 = 0.010 = '010'
# using three digits as 3 evaluation qubits are used
phase_as_binary = '0100'
pec = PhaseEstimation(4, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
with self.subTest('U=SZ, psi=|11>'):
unitary = QuantumCircuit(2)
unitary.z(0)
unitary.s(1)
eigenstate = QuantumCircuit(2)
eigenstate.x([0, 1])
# eigenvalue is -1j = exp(2j pi 0.75) thus phi = 0.75 = 0.110 = '110'
# using three digits as 3 evaluation qubits are used
phase_as_binary = '110'
pec = PhaseEstimation(3, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
with self.subTest('a 3-q unitary'):
unitary = QuantumCircuit(3)
unitary.x([0, 1, 2])
unitary.cz(0, 1)
unitary.h(2)
unitary.ccx(0, 1, 2)
unitary.h(2)
eigenstate = QuantumCircuit(3)
eigenstate.h(0)
eigenstate.cx(0, 1)
eigenstate.cx(0, 2)
# the unitary acts as identity on the eigenstate, thus the phase is 0
phase_as_binary = '00'
pec = PhaseEstimation(2, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
def test_phase_estimation_iqft_setting(self):
"""Test default and custom setting of the QFT circuit."""
unitary = QuantumCircuit(1)
unitary.s(0)
with self.subTest('default QFT'):
pec = PhaseEstimation(3, unitary)
expected_qft = QFT(3, inverse=True, do_swaps=False).reverse_bits()
self.assertEqual(pec.data[-1][0].definition, expected_qft)
with self.subTest('custom QFT'):
iqft = QFT(3, approximation_degree=2).inverse()
pec = PhaseEstimation(3, unitary, iqft=iqft)
self.assertEqual(pec.data[-1][0].definition, iqft)
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
levbishop.noreply@github.com
|
0e162d2c63ae0f43fffc22bc0537ef43c0b7ff22
|
baa4b4e6a4aae7df3236cc97f600262f969fe827
|
/project/server.py
|
39d0f9cd9df5c6768b9fbdd29f1e2e3bac6fc873
|
[] |
no_license
|
elihan27/CS-131
|
97517d86d0d92033ab207185d2a64cdbe55cfa50
|
088d05a138c9f7383faa048a500748f43e6c789b
|
refs/heads/master
| 2020-04-21T23:13:11.225262
| 2019-02-10T03:37:02
| 2019-02-10T03:37:02
| 169,938,878
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,303
|
py
|
import asyncio
import string
import aiohttp
import sys
import time
import logging
#As of 9:30 AM, 06/08/2018:
#has implemented "WHATSAT", "IAMAT", and "AT", dictionary, servers talking to each other
#need to implement: logger, buffer
server_info={}
#buffer=[]
portnumbers= {
'Goloman': 12717,
'Hands': 12718,
'Holiday': 12719,
'Welsh': 12720,
'Wilkes': 12721
}
logs={
'Goloman': 'Goloman.log',
'Hands': 'Hands.log',
'Holiday': 'Holiday.log',
'Welsh': 'Welsh.log',
'Wilkes': 'Wilkes.log'
}
comm_network = {'Goloman': ['Hands','Holiday', 'Wilkes'],
'Hands': ['Goloman', 'Wilkes'],
'Holiday': ['Goloman','Welsh', 'Wilkes'],
'Welsh':['Holiday'],
'Wilkes':['Goloman', 'Hands', 'Holiday']
}
def error(arg):
print("? ", arg)
sys.exit(0)
def messenger(client_id):
message = 'AT ' + server_info[client_id]['last_serv'] + " " + server_info[client_id]['time_diff'] + " " + client_id + " " + server_info[client_id]['location'] + " " + server_info[client_id]['time_stamp'] + '\r\n'
logging.basicConfig(filename=logs[server_info[client_id]['last_serv']],level=logging.INFO)
logging.info(message)
return message
def update_server(client_id, server, location, time_stamp):
server_info[client_id]={}
server_info[client_id]['last_serv']= server
server_info[client_id]['location']= location
server_info[client_id]['time_stamp']=time_stamp
value = time.time()-float(time_stamp)
sign=""
if (value>0):
sign="+"
server_info[client_id]['time_diff']=sign+str(value)
async def send_data(loop, serv_id, message):
for server in comm_network[serv_id]:
try:
coro = await (loop).create_connection(lambda:ClientProtocol(message,loop), '0.0.0.0', portnumbers[server])
logging.basicConfig(filename=logs[serv_id],level=logging.INFO)
logging.info("Opened connection to " +server)
logging.info("Sent: "+ message)
logging.info("Closed connection to " +server)
except ConnectionRefusedError:
logging.basicConfig(filename=logs[serv_id],level=logging.INFO)
logging.info("Could not connect to " +server)
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def called(location, radius, bound, proto):
async with aiohttp.ClientSession() as session:
size =len(location)
i=0
while (i!=size):
if ((location[i]=='+' or location[i]=='-') and i!=0):
break
i=i+1
true_location = location[:i]+ "," + location[i+1:]
info = await fetch(session, "https://maps.googleapis.com/maps/api/place/nearbysearch/json?location="+ true_location + "&radius=" + radius + "&key=AIzaSyD8h2eN8gP1G5wWc8FKc55sd93aEA8DrJI")
i=0
j=0
lines = info.split("\n")
length=len(lines)
while((i+1)!=length):
if ((lines[i]== " },") and (lines[i+1]==" {")):
j=j+1
# print(j)
if (j==bound):
break;
i=i+1
final = ""
if (j==bound):
final="\n".join(lines[:i])+"\n }\n"
else:
final = info
print(final)
proto.transport.write(final.encode())
class ClientProtocol(asyncio.Protocol):
def __init__(self, message, loop):
self.message = message
self.loop = loop
def connection_made(self, transport):
transport.write(self.message.encode())
print('Connection made: {!r}'.format(self.message))
class ServerProtocol(asyncio.Protocol):
def __init__(self, serv_id, loop):
self.serv_id=serv_id
self.loop = loop
self.portnumber= portnumbers[serv_id]
self.network = comm_network[serv_id]
def connection_made(self, transport):
self.transport = transport
self.peername = transport.get_extra_info('peername')
print('Connection from {}'.format(self.peername))
logging.basicConfig(filename=logs[self.serv_id],level=logging.INFO)
logging.info('Connection from {}'.format(self.peername))
def data_received(self, data):
message = data.decode()
print('Data received: {!r}'.format(message))
logging.basicConfig(filename=logs[self.serv_id],level=logging.INFO)
logging.info('Data received: {!r}'.format(message))
size = len(message)
words=[]
word=""
i=0
while (i!=size):
if (message[i] in string.whitespace):
if (word!= ""):
words.append(word)
word=""
i=i+1
else:
word+=message[i]
i=i+1
if (words[0]=="IAMAT"):
if (len(words)!=4): #arg lengh error
error(message)
if (len(words[2])==1):
error(message) #invalid location error
i =(words[2])[1:].find("-")
j=(words[2])[1:].find("+")
if ((i==-1) and (j==-1)):
error(message)
if (i==-1):
i==j
try:
x=float(words[2][:i+1])
y=float(words[2][i+1:])
z=float(words[3])
except ValueError:
error(message)
return_message=""
client_id=words[1]
if (server_info.get(client_id, -1)!=-1): #if it does exist
if (float(words[3])<= float(server_info[client_id]['time_stamp'])):
return_message=messenger(client_id)
else:
update_server(client_id, self.serv_id, words[2], words[3])
return_message=messenger(client_id)
asyncio.ensure_future(send_data(self.loop, self.serv_id, return_message))
else:
update_server(client_id, self.serv_id, words[2], words[3])
return_message=messenger(client_id)
asyncio.ensure_future(send_data(self.loop, self.serv_id, return_message))
self.transport.write(return_message.encode())
elif(words[0]=="WHATSAT"):
if (len(words)!=4): #arg lengh error
error(message)
try:
temp1=float(words[2])
temp2=int(words[3])
if (not((temp1<=50) and (temp1>=0) and (temp2>=0) and (temp2<=20))):
error(message)
except ValueError:
error(message)
client_id=words[1]
return_message=messenger(client_id)
radius=words[2]
bound =int(words[3])
print('Send: {!r}'.format(return_message))
self.transport.write(return_message.encode())
asyncio.ensure_future(called(server_info[client_id]['location'],radius,bound, self))
elif(words[0]=="AT"):
if (len(words)!=6): #arg lengh error
error(message)
client_id=words[3]
if (server_info.get(client_id, -1)!=-1):
try:
if(float(words[5])<= float(server_info[client_id]['time_stamp'])):
return_message=messenger(client_id)
print('Send: {!r}'.format(return_message))
self.transport.write(return_message.encode())
except ValueError:
error(message)
else:
update_server(client_id, words[1], words[4], words[5])
server_info[client_id]['time_diff']=words[2]
return_message=messenger(client_id)
print('Send: {!r}'.format(return_message))
asyncio.ensure_future(send_data(self.loop, self.serv_id, return_message))
else:
error(message)
def connection_lost(self, exc):
print('Lost connection of {}'.format(self.peername))
self.transport.close()
def main():
if (len(sys.argv) <2):
error(sys.argv)
serv_id = sys.argv[1]
loop = asyncio.get_event_loop()
if (portnumbers.get(serv_id, -1)!=-1):
coro = loop.create_server(lambda:ServerProtocol(serv_id, loop), '0.0.0.0', portnumbers[serv_id])
else:
error(sys.argv)
server = loop.run_until_complete(coro)
loop = asyncio.get_event_loop()
# Each client connection will create a new protocol instance
# The IP address or hostname can be used.
# 127.0.0.1 is intended for the 'localhost' loopback devices.
# If you have multiple NIC(Network Interface Card)s, you may specify the specific IP address to be used (listen).
# 0.0.0.0 is to use any available NIC device.
# Serve requests until Ctrl+C is pressed
print('Serving on {}'.format(server.sockets[0].getsockname()))
try:
loop.run_forever()
except KeyboardInterrupt:
pass
# Close the server
server.close()
loop.run_until_complete(server.wait_closed())
loop.close()
if __name__=="__main__":
main()
|
[
"ehan2016@gmail.com"
] |
ehan2016@gmail.com
|
8041a1fb64300f05fe3fc25480204f1c264af1d5
|
4c205e65142adcde0e55693e63aadc0f32b585a7
|
/Appliction_Generate_ETL/Fact/fact_commande_vente.py
|
2de5b3fc4f7fd4dfd663b5fbe28c2c9805ee8b0a
|
[] |
no_license
|
HamzaLebcir/Generic-DW-ERP-realization
|
3d42809a48c012b2cbc96860350a5aa61529ddb3
|
e13de06158fbbc55b8760485131951aa7006d5ca
|
refs/heads/main
| 2023-01-30T04:51:52.603268
| 2020-11-26T11:48:48
| 2020-11-26T11:48:48
| 316,214,185
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,315
|
py
|
from configparser import ConfigParser
config = ConfigParser()
class fact_commande_vente:
def __init__(self, file_config):
config.read(file_config, encoding='utf-8-sig')
self.ID = config.get('Fact_Commande_Vente','ID')
self.Key_Produit = config.get('Fact_Commande_Vente','Key_Produit')
self.Key_Client = config.get('Fact_Commande_Vente','Key_Client')
self.Key_DC = config.get('Fact_Commande_Vente','Key_DC')
self.ID_Magasin =config.get('Fact_Commande_Vente','ID_Magasin')
self.Description = config.get('Fact_Commande_Vente','Description')
self.Quantite = config.get('Fact_Commande_Vente','Quantite')
self.Prix_unitaire = config.get('Fact_Commande_Vente','Prix_unitaire')
self.Prix_HT = config.get('Fact_Commande_Vente','Prix_HT')
self.Remise = config.get('Fact_Commande_Vente','Remise')
self.Prix_Total = config.get('Fact_Commande_Vente','Prix_Total')
self.Date_commande =config.get('Fact_Commande_Vente','Date_commande')
self.Table = config.get('Fact_Commande_Vente','Table')
self.server = config.get('Linked_server','server')
def ETL(self):
if(self.ID==""):
return("\n")
else:
return(
"INSERT INTO Fact_Commande_Vente(ID,Key_Produit,Key_Client,Key_DC,ID_Magasin,Description,Quantite,Prix_unitaire,Prix_HT,Remise,Prix_total,Date_Commande) \n"
+"SELECT ID,Key_Produit,Key_Client,Key_DC,ID_Magasin,Description,Quantite,Prix_unitaire,Prix_HT,Remise,Prix_total,Date_Commande \n"
+"FROM OPENQUERY ("+self.server+",\n'select "
+self.ID +" as ID, "
+self.Key_Produit+ " as Key_Produit, "
+self.Key_Client+ " as Key_Client, "
+self.Key_DC+ " as Key_DC, "
+self.ID_Magasin+ " as ID_Magasin, "
+self.Description+ " as Description, "
+self.Quantite+ " as Quantite, "
+self.Prix_unitaire+ " as Prix_unitaire, "
+self.Prix_HT+ " as Prix_HT, "
+self.Remise+ " as Remise, "
+self.Prix_Total+ " as Prix_Total,"
+self.Date_commande+ " as Date_Commande \n"
+"FROM " +self.Table+ "'); \n"
)
|
[
"lebcirhamza7@gmail.com"
] |
lebcirhamza7@gmail.com
|
8efde5c2d68e9d8fcd988306f044844b4c316c77
|
c3796ebebb42e55878556a53abad1a2e18fa4020
|
/test/functional/wallet_resendwallettransactions.py
|
1d73e04f39a1b4d65c9ca45043ad36d0c6b036b9
|
[
"MIT"
] |
permissive
|
lycion/genex-project
|
a9e54d22138ca81339f76bba166aa9f366fa9dd8
|
fc103e93ee274dc57179d01c32b0235b29e364ca
|
refs/heads/master
| 2020-03-29T02:18:33.445995
| 2018-08-07T23:56:27
| 2018-08-07T23:56:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,322
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Genex Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resendwallettransactions RPC."""
from test_framework.test_framework import GenexTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
class ResendWalletTransactionsTest(GenexTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['--walletbroadcast=false']]
def run_test(self):
# Should raise RPC_WALLET_ERROR (-4) if walletbroadcast is disabled.
assert_raises_rpc_error(-4, "Error: Wallet transaction broadcasting is disabled with -walletbroadcast", self.nodes[0].resendwallettransactions)
# Should return an empty array if there aren't unconfirmed wallet transactions.
self.stop_node(0)
self.start_node(0, extra_args=[])
assert_equal(self.nodes[0].resendwallettransactions(), [])
# Should return an array with the unconfirmed wallet transaction.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
assert_equal(self.nodes[0].resendwallettransactions(), [txid])
if __name__ == '__main__':
ResendWalletTransactionsTest().main()
|
[
"40029035+genexcore@users.noreply.github.com"
] |
40029035+genexcore@users.noreply.github.com
|
2de0a3a5bc6d4a723ceea97771879e67868cf611
|
0543e8a6c4c45a80c3e11bbd6df694a63ad0155d
|
/diapers/migrations/0004_auto_20150822_1627.py
|
b4144d1ff6d3d09f9af4eea806abf61a693331fa
|
[] |
no_license
|
asorokoumov/compare
|
758f0cc22607db51022386d64f21c29610622b02
|
221aef6024f3c63c1a1d01f13f03166ce5b02a54
|
refs/heads/master
| 2022-08-09T16:05:33.342400
| 2019-05-08T09:54:48
| 2019-05-08T09:54:48
| 41,490,630
| 0
| 0
| null | 2022-07-06T19:18:37
| 2015-08-27T14:13:05
|
CSS
|
UTF-8
|
Python
| false
| false
| 249
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('diapers', '0003_auto_20150822_1624'),
]
operations = [
]
|
[
"sorokoumov.anton@gmail.com"
] |
sorokoumov.anton@gmail.com
|
f08c6bfd9d1ae8773db3560b2864739bc460de2e
|
4716314a21040600df9c186eefa9182ad6aab9d7
|
/civil_war/settings.py
|
b81ccbfb701633189dc2ec406661c89e9c140834
|
[] |
no_license
|
muilee/politician_analytics
|
164a7127ca5fe74a399cf83abc8b25d2e64bff24
|
54b2761f880e5f441f299909266ebbfb4e4a5a29
|
refs/heads/master
| 2021-05-12T13:43:24.811319
| 2018-01-31T12:21:14
| 2018-01-31T12:21:14
| 116,940,230
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,121
|
py
|
"""
Django settings for civil_war project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&=a0pel91(_ih8hjypmo^@t^(htn3&^i1c%ka3)@7&$2nm@ep$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ["*"]
# Application definition
INSTALLED_APPS = [
'politician',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'civil_war.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'civil_war.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Taipei'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
|
[
"matt@localhost.localdomain"
] |
matt@localhost.localdomain
|
b13d0766ba150b96be1c6b817e88795d4963c5de
|
bbab26a8d60b9b1c3e0037596165d3639a5e18ae
|
/assign2.py
|
797216f15613313e889f7b7f082da1c2bdbe46a0
|
[] |
no_license
|
MidhaTahir/-Python-
|
200d6c83c8066392143e7297659ea9ecc5b57b79
|
b054e4bc82289051e4f96b9b03f8402993b42a38
|
refs/heads/master
| 2021-07-19T20:13:37.920052
| 2020-06-24T18:31:05
| 2020-06-24T18:31:05
| 186,156,544
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 558
|
py
|
import random
arr = []
for i in range(10):
arr.append(random.randint(1,100))
print(arr)
minimum = min(arr)
print("The minimum number in random list is " + str(minimum))
minimum_position = arr.index(min(arr))
print("The minimum number position is " + str(minimum_position+1))
maximum = max(arr)
print("The maximum number in random list is " + str(maximum))
maximum_position = arr.index(max(arr))
print("The maximum number position is " + str(maximum_position+1))
sum = 0
for i in range(len(arr)):
sum = sum + arr[i]
mean = sum/len(arr)
print(mean)
|
[
"midhatahirkhan2011@gmail.com"
] |
midhatahirkhan2011@gmail.com
|
aebbafdcebef5f7edbb5985af8c57816dee40ee3
|
a8e132c3cb716021064ad30b3d5a61a093d8ae6d
|
/Tugas 1/Find GCD of a Number/Find GCD of a Number.py
|
c0eecb79b70470313db3e3ae0e98674f3fa75f5c
|
[] |
no_license
|
Ngurah30/Kriptoanalisis
|
2ad971eaec14c9967c27a4ec8270bc0cc4bd0dcd
|
e3a0e26f4005b060b9f8833525151ad4616ccaa4
|
refs/heads/main
| 2023-03-30T12:41:59.854953
| 2021-03-22T11:31:38
| 2021-03-22T11:31:38
| 343,434,756
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
# Menggunakan fungsi rekursif
def gcd(x, y):
if (y == 0):
return x
else:
return gcd(y, x % y)
x =int (input ("Masukkan bilangan pertama : ")) # Menginputkan bilangan pertama
y =int (input ("Masukkan bilangan kedua : ")) # Menginputkan bilangan kedua
bil = gcd(x, y) #Memanggil gcd untuk mencari hasil
print("Faktor persekutuan terbesarnya adalah ", bil)
|
[
"noreply@github.com"
] |
Ngurah30.noreply@github.com
|
f625664d16d2ea3be1e1bf2f040c0452b92aaf29
|
47541875c7be36ce612c382b7f98c92173c7144c
|
/WarDrivePiCar/Tests/test_main.py
|
47e6e1b2adc29e0bdcc292ffbd93dc17fff20045
|
[] |
no_license
|
MorenoB/WarDrivePi
|
72b6373796e9b6a5ff5c8841154da556b9471906
|
b4a29774de033df9f50043c6275a13d7a9d186cc
|
refs/heads/master
| 2021-01-17T15:26:16.733107
| 2017-01-27T11:47:12
| 2017-01-27T11:47:12
| 69,866,547
| 1
| 0
| null | 2017-01-27T11:47:13
| 2016-10-03T11:59:23
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,357
|
py
|
# Test files need to re register themselves for when using shell
import sys
import os
sys.path.insert(1, os.path.abspath(os.path.dirname(__file__)))
from unittest import TestCase
from program import Program
from Util.testing import TestThread
import sys
import time
import os
class TestMain(TestCase):
def test_movement(self):
program = Program()
# Force in some mock-up location data for the Phone module
current_dir = os.path.abspath(os.path.dirname(__file__))
file_path = os.path.join(current_dir, 'simulated_location_input.txt')
location_mockup_data = open(file_path, 'r').read()
file_path = os.path.join(current_dir, 'simulated_sensor_input.txt')
sensor_mockup_data = open(file_path, 'r').read()
program.force_phone_handler_input(location_data=location_mockup_data, sensor_data=sensor_mockup_data)
# Start the main program
new_thread = TestThread(program)
new_thread.start()
print "Simulating Keyboard Input..."
file_path = os.path.join(current_dir, 'simulatedInput.txt')
sys.stdin = open(file_path, 'r')
time.sleep(1)
print "Simulating Keyboard Interrupt..."
program.stop()
time.sleep(1)
print "Checking if program is done..."
self.assertEquals(program.is_running(), False)
|
[
"moreno_bralts@hotmail.com"
] |
moreno_bralts@hotmail.com
|
b94eb3cd9714f1550d11a2faa1808f08db720be0
|
bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d
|
/lib/surface/storage/delete.py
|
b0dd92d45fc1d77f4de21763de0131975f546827
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
google-cloud-sdk-unofficial/google-cloud-sdk
|
05fbb473d629195f25887fc5bfaa712f2cbc0a24
|
392abf004b16203030e6efd2f0af24db7c8d669e
|
refs/heads/master
| 2023-08-31T05:40:41.317697
| 2023-08-23T18:23:16
| 2023-08-23T18:23:16
| 335,182,594
| 9
| 2
|
NOASSERTION
| 2022-10-29T20:49:13
| 2021-02-02T05:47:30
|
Python
|
UTF-8
|
Python
| false
| false
| 5,897
|
py
|
# -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.storage import expansion
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_parallel
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.Hidden
@base.Deprecate(is_removed=False, warning='This command is deprecated. '
'Use `gcloud alpha storage rm` instead.')
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(base.Command):
"""Delete Cloud Storage objects and buckets."""
detailed_help = {
'DESCRIPTION': """\
*{command}* lets you delete Cloud Storage objects and buckets. You can
specify one or more paths (including wildcards) and all matching objects
and buckets will be deleted.
""",
'EXAMPLES': """\
To delete an object, run:
$ *{command}* gs://mybucket/a.txt
To delete all objects in a directory, run:
$ *{command}* gs://mybucket/remote-dir/*
The above command will delete all objects under remote-dir/ but not its sub-directories.
To delete a directory and all its objects and subdirectories, run:
$ *{command}* --recursive gs://mybucket/remote-dir
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and subdirectories of a directory, without deleting the directory
itself, run:
$ *{command}* --recursive gs://mybucket/remote-dir/*
or
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and directories in a bucket without deleting the bucket itself, run:
$ *{command}* gs://mybucket/**
To delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/*.txt
$ *{command}* gs://mybucket/remote-dir/*.txt
To go beyond directory boundary and delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/**/*.txt
$ *{command}* gs://mybucket/remote-dir/**/*.txt
To delete a bucket, run:
$ *{command}* gs://mybucket
You can use wildcards in bucket names. To delete all buckets with prefix of `my`, run:
$ *{command}* --recursive gs://my*
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'path',
nargs='+',
help='The path of objects and directories to delete. The path must '
'begin with gs:// and may or may not contain wildcard characters.')
parser.add_argument(
'--recursive',
action='store_true',
help='Recursively delete the contents of any directories that match '
'the path expression.')
parser.add_argument(
'--num-threads',
type=int,
hidden=True,
default=16,
help='The number of threads to use for the delete.')
flags.add_additional_headers_flag(parser)
def Run(self, args):
paths = args.path or ['gs://']
expander = expansion.GCSPathExpander()
objects, dirs = expander.ExpandPaths(paths)
if dirs and not args.recursive:
raise exceptions.RequiredArgumentException(
'--recursive',
'Source path matches directories but --recursive was not specified.')
buckets = []
dir_paths = []
for d in dirs:
obj_ref = storage_util.ObjectReference.FromUrl(d, allow_empty_object=True)
if not obj_ref.name:
buckets.append(obj_ref.bucket_ref)
dir_paths.append(d + '**')
sub_objects, _ = expander.ExpandPaths(dir_paths)
objects.update(sub_objects)
tasks = []
for o in sorted(objects):
tasks.append(storage_parallel.ObjectDeleteTask(
storage_util.ObjectReference.FromUrl(o)))
if buckets:
# Extra warnings and confirmation if any buckets will be deleted.
log.warning('Deleting a bucket is irreversible and makes that bucket '
'name available for others to claim.')
message = 'This command will delete the following buckets:\n '
message += '\n '.join([b.bucket for b in buckets])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
# TODO(b/120033753): Handle long lists of items.
message = 'You are about to delete the following:'
message += ''.join(['\n ' + b.ToUrl() for b in buckets])
message += ''.join(['\n ' + t.obj_ref.ToUrl() for t in tasks])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
storage_parallel.ExecuteTasks(tasks, num_threads=args.num_threads,
progress_bar_label='Deleting Files')
log.status.write(
'Deleted [{}] file{}.\n'.format(
len(tasks), 's' if len(tasks) > 1 else ''))
storage_client = storage_api.StorageClient()
for b in buckets:
storage_client.DeleteBucket(b)
log.DeletedResource(b.ToUrl(), kind='bucket')
|
[
"cloudsdk.mirror@gmail.com"
] |
cloudsdk.mirror@gmail.com
|
6fb69cae212e1193fbae6999d71ad04fb456e8f7
|
be77e3ff1de69b11a427309ad5e953dfdbdb55a2
|
/main.py
|
7b29ea9b11bb750109abb536c2465b092280ee36
|
[] |
no_license
|
Richard98PL/tibiaAntyLogout
|
58bfef476453ae021d69ebc7785eac6a1b47d947
|
b790f9ffb756624c1e6d71506f15e8f9dda390cb
|
refs/heads/main
| 2023-08-26T22:12:29.090560
| 2021-11-01T16:52:21
| 2021-11-01T16:52:21
| 423,093,921
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,158
|
py
|
from pynput.keyboard import Key, Controller
import win32gui
import win32con
import re
import time
import datetime
from random import randint
keyboard = Controller()
class WindowMgr:
"""Encapsulates some calls to the winapi for window management"""
def __init__ (self):
"""Constructor"""
self._handle = None
def find_window(self, class_name, window_name=None):
"""find a window by its class_name"""
self._handle = win32gui.FindWindow(class_name, window_name)
def _window_enum_callback(self, hwnd, wildcard):
"""Pass to win32gui.EnumWindows() to check all the opened windows"""
if re.match(wildcard, str(win32gui.GetWindowText(hwnd))) is not None:
self._handle = hwnd
def find_window_wildcard(self, wildcard):
"""find a window whose title matches the wildcard regex"""
self._handle = None
win32gui.EnumWindows(self._window_enum_callback, wildcard)
def set_foreground(self):
"""put the window in the foreground"""
win32gui.SetForegroundWindow(self._handle)
def get_rectangle(self):
win32gui.GetWindowRect(self._handle)
def antyLogout():
currentWindowManager = WindowMgr()
currentWindow = win32gui.GetForegroundWindow()
currentWindowManager._handle = currentWindow
tibiaWindowManager = WindowMgr()
tibiaWindowManager.find_window_wildcard("Tibia - *")
if currentWindowManager._handle != tibiaWindowManager._handle:
win32gui.ShowWindow(tibiaWindowManager._handle, win32con.SW_MAXIMIZE)
tibiaWindowManager.set_foreground()
keyboard.press(Key.ctrl)
movementKeys = [Key.up, Key.down]
for key in movementKeys:
keyboard.tap(key)
time.sleep( randint(15,31) / 1000)
keyboard.release(Key.ctrl)
now = datetime.datetime.now()
print(now.hour, now.minute, now.second)
if currentWindowManager._handle != tibiaWindowManager._handle:
win32gui.ShowWindow(tibiaWindowManager._handle, win32con.SW_MINIMIZE)
currentWindowManager.set_foreground()
antyLogout()
while True:
time.sleep(6*60 + randint(0,13))
antyLogout()
|
[
"noreply@github.com"
] |
Richard98PL.noreply@github.com
|
fedd1e48e973b16043436c9c51aa37b3063a283e
|
1f2df4dfed4af1485fefab0118dd6abd437de4de
|
/listings/migrations/0001_initial.py
|
7dc50d07d346be74a195cb7f75a96f3f72bf4ebd
|
[] |
no_license
|
Rhillx/IH_project
|
90bc0ecaa200a2fb51e520dd75f6485cb21e6f17
|
4a51311e24456e0aefec16872f340685ec7dca74
|
refs/heads/master
| 2020-04-25T18:55:04.270785
| 2019-03-02T05:56:23
| 2019-03-02T05:56:23
| 173,000,886
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,156
|
py
|
# Generated by Django 2.1.7 on 2019-02-21 22:48
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('realtors', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Listing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('address', models.CharField(max_length=200)),
('city', models.CharField(max_length=100)),
('state', models.CharField(max_length=100)),
('zipcode', models.CharField(max_length=20)),
('description', models.TextField(blank=True)),
('price', models.IntegerField()),
('bedrooms', models.IntegerField()),
('bathrooms', models.DecimalField(decimal_places=1, max_digits=2)),
('garage', models.IntegerField(default=0)),
('sqft', models.IntegerField()),
('lot_size', models.DecimalField(decimal_places=1, max_digits=5)),
('photo_main', models.ImageField(upload_to='photos/%Y/%m/%d/')),
('photo_1', models.ImageField(upload_to='photos/%Y/%m/%d/')),
('photo_2', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')),
('photo_3', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')),
('photo_4', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')),
('photo_5', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')),
('photo_6', models.ImageField(blank=True, upload_to='photos/%Y/%m/%d/')),
('is_published', models.BooleanField(default=True)),
('list_date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('realtor', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='realtors.Realtor')),
],
),
]
|
[
"rhillz718@gmail.com"
] |
rhillz718@gmail.com
|
c19e0105d28469432092df67fa6a94f2cb27922c
|
4a6f3bc478dcd0c32f376bd0369940526a0242c9
|
/main.py
|
71941dc9451847800a7fe93dae92f57aa8973e66
|
[
"Apache-2.0"
] |
permissive
|
1999foxes/python-gobang-bilibililive
|
179495d2f7c8094a490dcb90a386a42cffafe61b
|
efc1f631a8c9686177b92a3e54c183ee227a45d2
|
refs/heads/master
| 2023-02-24T14:46:29.337799
| 2021-01-31T18:00:25
| 2021-01-31T18:00:25
| 334,715,046
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,352
|
py
|
# _*_ encoding:utf-8 _*_
import os
def installlibs():
libs = {"numpy","requests","pygame"}
try:
for lib in libs:
os.system("pip3 install "+lib)
print("Successful")
except Exception as err:
print("Failed:", Exception)
try:
import pygame
from pygame.locals import *
import sys
import math
import storn
from socket import *
import select
import chessboard
import time
import danmuji
except:
installlibs()
roomID = '646'
pygame.init()
bg_size = 615, 615
WHITE = (255, 255, 255)
font1 = pygame.font.Font('font/12345.TTF', 35)
win_text = font1.render(u"黑棋胜利", True, WHITE)
win_text_rect = win_text.get_rect()
win_text_rect.left, win_text_rect.top = (bg_size[0] - win_text_rect.width) // 2, \
(bg_size[1] - win_text_rect.height) // 2
lose_text = font1.render(u"白棋胜利", True, WHITE)
lose_text_rect = lose_text.get_rect()
lose_text_rect.left, lose_text_rect.top = (bg_size[0] - lose_text_rect.width) // 2, \
(bg_size[1] - lose_text_rect.height) // 2
class StateMachine():
def __init__(self):
# state constant
self.BLACKTURN = 'BLACKTURN'
self.WHITETURN = 'WHITETURN'
self.BLACKWIN = 'BLACKWIN'
self.WHITEWIN = 'WHITEWIN'
self.GAMEOVER = 'GAMEOVER'
# current state
self.state = self.GAMEOVER
# Players ('all' or 'any' or some nickname, or 'ai' for white)
self.black = 'all'
self.white = 'mouse'
# deadlines
self.deadline = 0
self.allDeadline = 0
self.promptCountdown = 0
# new chess data, [[pos1, num1], [pos2, num2], ...]
self.data = []
# chessboard
self.board = chessboard.Chessboard()
# render screen
self.screen = pygame.display.set_mode(bg_size)
# danmuji
self.dmj = danmuji.Gift(roomID)
self.dmj.run()
print('hello')
def newGame(self):
self.state = self.BLACKTURN
self.setDeadline()
self.board.clear()
def setDeadline(self):
self.deadline = time.time() + 120
self.allDeadline = time.time() + 60
def player(self):
if self.state == self.WHITETURN:
return self.white
elif self.state == self.BLACKTURN:
return self.black
def nextTurn(self):
# add chess
print('data =', self.data)
print('add Chess to', self.data[0][0])
self.board.addChess(self.data[0][0])
# check who wins
if self.board.isWin():
if self.state == self.WHITETURN:
self.state = self.WHITEWIN
else:
self.state = self.BLACKWIN
# init for next turn
if self.state == self.WHITETURN:
self.state = self.BLACKTURN
elif self.state == self.BLACKTURN:
self.state = self.WHITETURN
self.setDeadline()
self.data = []
def addData(self, pos):
for i in self.data:
if i[0] == pos:
i[1] += 1
return
self.data.append([pos, 1])
def getData(self):
# get data from danmuji
if self.player() == 'ai':
self.data.append([self.board.ai(), 1])
elif self.player() == 'mouse':
return
else:
self.dmj.lock.acquire()
for danmu in self.dmj.danmuList:
if (self.player() == 'all' or self.player() == 'any' or self.player() == danmu[0]) and self.board.is_valid(danmu[1]):
self.addData(danmu[1])
self.dmj.danmuList = []
self.dmj.lock.release()
self.data.sort(key=lambda a:a[1], reverse = True)
def update(self):
if self.state == self.GAMEOVER or self.state == self.WHITEWIN or self.state == self.BLACKWIN:
if self.promptCountdown == 0:
self.promptCountdown = time.time() + 10
elif time.time() > self.promptCountdown:
self.promptCountdown = 0
self.newGame()
else:
self.getData()
if len(self.data) == 0:
if time.time() > self.deadline or (self.player() == 'all' and time.time() > self.allDeadline):
if self.state == self.BLACKTURN:
self.state = self.WHITEWIN
else:
self.state = self.BLACKWIN
elif self.player() != 'all' or time.time() > self.allDeadline:
self.nextTurn()
def renderScreen(self):
# 绘制棋盘
screen.blit(bg_image, (0, 0))
for i in self.board.black_chesses:
screen.blit(i.image, i.image_rect())
for i in self.board.white_chesses:
screen.blit(i.image, i.image_rect())
# draw gameover prompt
if self.state == self.BLACKWIN:
screen.blit(win_text, win_text_rect)
elif self.state == self.WHITEWIN:
screen.blit(lose_text, lose_text_rect)
# draw countdown
if self.player() == 'all':
text_countdown = font1.render('倒计时:'+str(int(self.allDeadline - time.time())), True, WHITE)
else:
text_countdown = font1.render('倒计时:'+str(int(self.deadline - time.time())), True, WHITE)
text_countdown_rect = text_countdown.get_rect()
text_countdown_rect.left, text_countdown_rect.top = (20, 0)
screen.blit(text_countdown, text_countdown_rect)
# draw player 'all' statistic
if self.player() == 'all':
for danmu in self.data:
tmp = font1.render(str(danmu[1]), True, WHITE)
tmp_rect = tmp.get_rect()
tmp_rect.left, tmp_rect.top = self.board.getPixel(danmu[0])
tmp_rect.left -= tmp_rect.width/2
tmp_rect.top -= 20
screen.blit(tmp, tmp_rect)
pygame.display.flip()
clock = pygame.time.Clock()
screen = pygame.display.set_mode(bg_size)
pygame.display.set_caption('五子棋')
bg_image = pygame.image.load('image/bg.png').convert_alpha() # 背景图片
def main():
state_machine = StateMachine()
state_machine.newGame()
running = True
while running:
state_machine.renderScreen()
state_machine.update()
for event in pygame.event.get():
if event.type == QUIT:
client_socket.close()
server_socket.close()
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
if event.button == 1:
print('click', event.pos)
if state_machine.player() == 'mouse' and state_machine.board.is_valid(state_machine.board.getPos(event.pos)):
state_machine.data = []
state_machine.data.append([state_machine.board.getPos(event.pos), 1])
state_machine.nextTurn()
clock.tick(60)
if __name__ == '__main__':
main()
|
[
"hl1999@yeah.net"
] |
hl1999@yeah.net
|
2cd11da1a7669bc9c9ef03e9c656328abf9a4495
|
f32f45a84f296392fa5433402bf126885fb2df23
|
/learningTemplates/basic_app/urls.py
|
ae5ad4e2a7806bef5b64fb1740555b78e54b3277
|
[] |
no_license
|
samjonescode/Python-Anywhere-First-Deployment
|
8878f2b4d3c25a4b6f05795cd712c1def58f03ed
|
8608956168363442988acc3468886131787db4d6
|
refs/heads/master
| 2021-10-22T23:32:56.691641
| 2019-03-13T17:08:50
| 2019-03-13T17:08:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
from django.urls import path
from basic_app import views
# template tagging
app_name = 'basic_app'
urlpatterns = [
path('index/',views.index,name='index'),
path('relative/',views.relative,name='relative'),
path('other/',views.other, name='other'),
]
|
[
"sjonesmusician@gmail.com"
] |
sjonesmusician@gmail.com
|
939c05361fb679aea62692cdc376799186c3289d
|
2bbb75129fa4e1d28b7fcfe58f585bcbfdc33dde
|
/lib/models/__init__.py
|
98f6d9847c0667734e0d47537fc46076fd0f5152
|
[] |
no_license
|
Gyyz/Targeted-Sentiment-analysis-with-memory-network-attention
|
1165ba850fd1a61a1ddbfd0d8a1ec4fa408ecf92
|
2208188eb6bd150e739acfd6b16ec810eac15e43
|
refs/heads/master
| 2021-06-22T10:09:52.490474
| 2017-08-10T08:27:58
| 2017-08-10T08:27:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 53
|
py
|
from nn import NN
import rnn
from sentiment import *
|
[
"yuz@fff"
] |
yuz@fff
|
8072e2425675f6565ceecb16805e4ef341e0456e
|
4cb37eaa0bdcf002c26aba4656df1287ce3cfe56
|
/main/settings.py
|
6abbf86caad648c174cc9b0af91a9fcf6aa0f5f6
|
[] |
no_license
|
jiuniuone/app-mall-backend
|
86ef3ec8cd78e019d90c8885173b68a6ecfdf6ca
|
07dcb5840bc0d5f2d98522c5eb3d33de74bdcb6f
|
refs/heads/master
| 2020-03-26T07:27:43.297704
| 2018-08-21T05:10:30
| 2018-08-21T05:10:30
| 144,655,502
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,210
|
py
|
import os
import socket
import sys
import time
from unipath import Path
APP_NAME = 'mall'
FUNCTION_NAME = "shopping mall"
DEBUG = socket.gethostname() not in ['public', 'stage']
# DEBUG = False
SHOW_SQL = 'runserver' in sys.argv
if DEBUG: SHOW_SQL = False
BASE_DIR = Path(__file__).ancestor(2)
SECRET_KEY = 'i%25adry^l0r87l+228213a^%67q015z7j9^uc96jm=n%%0e^l'
ALLOWED_HOSTS = ["*"]
ROOT_URLCONF = 'main.urls'
WSGI_APPLICATION = 'main.wsgi.application'
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
TIME_FORMAT = '%Y-%m-%d %H:%M:%S'
DATE_TIME_FORMAT = '%Y-%m-%d'
PAGINATE_BY = 10
START_TIME = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
AUTH_USER_MODEL = f'{APP_NAME}.User'
LOGIN_URL = f'/{APP_NAME}/user/login/'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.gzip.GZipMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
f'{APP_NAME}.middlewares.HttpsCheckMiddleware',
f'{APP_NAME}.middlewares.LogMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# 'DIRS': [BASE_DIR.child('templates')],
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.template.context_processors.static',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'acmin.processors.extra_context'
],
},
},
]
def rotating_handler(name):
return {
'level': 'DEBUG',
'filters': ['f1'],
'formatter': 'simple',
'class': 'logging.handlers.TimedRotatingFileHandler',
'when': 'midnight',
'interval': 1,
'backupCount': 100,
'filename': f'/var/log/{APP_NAME}/{name}.log',
}
def file_handler(name):
return {
'level': 'DEBUG',
'filters': ['f1'],
'formatter': 'simple',
'class': 'logging.FileHandler',
'filename': f'/var/log/{APP_NAME}/{name}.log',
}
def console_handler():
return {'level': 'DEBUG', 'filters': ['f1'], 'formatter': 'simple', 'class': 'logging.StreamHandler', }
def get_log_setting(debug):
log_modules = [APP_NAME]
return {
'version': 1,
'disable_existing_loggers': True,
'filters': {'f1': {'()': 'django.utils.log.RequireDebug' + str(debug)}},
'formatters': {'simple': {'format': '%(levelname)s %(asctime)s %(message)s'}, },
'handlers': dict({key: file_handler(key) for key in log_modules}, **{'console': console_handler()}),
'loggers': {key: {'level': 'INFO', 'handlers': ['console', key]} for key in log_modules}
}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'acmin',
APP_NAME
]
AUTH_PASSWORD_VALIDATORS = [
{'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', },
{'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', },
{'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', },
{'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', },
]
CACHALOT_UNCACHABLE_TABLES = (
'django_migrations',
)
from acmin.utils import get_ip, is_windows
MEDIA_ROOT = "e:/var/www/media/" if is_windows() else "/var/www/media/"
MEDIA_URL = f'http://{get_ip()}/media/'
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR.child(APP_NAME, "static")
]
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
USE_SQLITE3 = True
if DEBUG:
name = 'test' if 'test' in sys.argv else 'app'
if USE_SQLITE3:
DATABASES = {'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR.child(f'{APP_NAME}.db'),
'TEST_NAME': BASE_DIR.child(f'{APP_NAME}-test.db'),
}}
else:
DATABASES = {'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': APP_NAME,
'USER': APP_NAME,
'PASSWORD': '123456',
'HOST': '127.0.0.1',
'PORT': '5432',
}}
else:
DATABASES = {'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': APP_NAME,
'USER': APP_NAME,
'PASSWORD': '123456',
'HOST': '127.0.0.1',
'PORT': '5432',
}}
LOGGING = get_log_setting(DEBUG)
|
[
"hikelee@gmail.com"
] |
hikelee@gmail.com
|
816a6e3ddb957c0b742e5e7ca3543a7b6de38cf7
|
fff4db9bd3408d881168a4838bd3d342b6415583
|
/codesort/tests/test_data/sorted_1.py
|
27b8982e7e8ff7740a54d846e70e116697b5bb1d
|
[
"MIT"
] |
permissive
|
dougthor42/CodeSort
|
853da348ddaa7461b6374fce7d4c62d66e437a12
|
4e4b5b862b903a258433cfd399db124a5abfa67e
|
refs/heads/master
| 2021-01-01T20:01:05.040710
| 2014-10-09T00:32:13
| 2014-10-09T00:32:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,472
|
py
|
# -*- coding: utf-8 -*-
"""
Docstring!
"""
from __future__ import print_function, division
def ClassA(object):
""" ClassA, for sorting! """
def __init__(self):
""" Nothing needed here! """
pass
def _private_a(self):
""" And things! """
pass
def _private_b(self):
""" Stuff! """
pass
def public_a(self):
""" I'm tired... """
pass
def public_b(self):
""" That's all for now """
pass
def public_c(self):
""" More stuff! """
pass
class ClassB(object):
""" ClassA, for sorting! """
def __init__(self):
""" Nothing needed here! """
pass
def _private_a(self):
""" And things! """
pass
def _private_b(self):
""" Stuff! """
pass
def public_a(self):
""" I'm tired... """
pass
def public_b(self):
""" That's all for now """
pass
def public_c(self):
""" More stuff! """
pass
def module_func_a(a):
""" I hope this works """
pass
def module_func_b(a, b):
""" Please don't hate me """
pass
def module_func_c(c):
""" But in the original file, it's not! """
pass
if __name__ == "__main__":
""" This should be last """
pass
# A comment that starts the orphaned module code
x = 5
y = 27
print(x+y)
|
[
"dougthor42@users.noreply.github.com"
] |
dougthor42@users.noreply.github.com
|
7397068550e96dd401ecb0de352f442531574858
|
78a4379f22f1a1f8b801a26c4bb5357bdce24cb6
|
/apps/pay/migrations/0001_initial.py
|
c0f63180f3030475c4991ea0eef92a94c718e380
|
[] |
no_license
|
18801166104/TravelWebSite
|
706890ecc901b7fea277dd5794d81c0e54424e98
|
131399ecc3df3344105677b9e6745fb2f23750e7
|
refs/heads/master
| 2020-04-30T15:12:34.745958
| 2019-03-21T09:38:16
| 2019-03-21T09:38:16
| 176,461,032
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,163
|
py
|
# Generated by Django 2.1.7 on 2019-02-22 13:27
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='OrderItems',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('good_name', models.CharField(default='', max_length=30, verbose_name='购买商品名称')),
('good_num', models.IntegerField(default=1, verbose_name='购买数量')),
('good_price', models.FloatField(default=0, verbose_name='单价')),
('good_image', models.ImageField(default='', upload_to='', verbose_name='商品主图')),
('good_id', models.IntegerField(default=0, verbose_name='商品id')),
('order_num', models.CharField(max_length=25, verbose_name='订单号')),
],
options={
'verbose_name': '用户购买商品信息',
'verbose_name_plural': '用户购买商品信息',
},
),
]
|
[
"75197440@qq.com"
] |
75197440@qq.com
|
6faafc33a8fa0bcb600ee25a8837c929a209e065
|
9054a65b931267d15d74ad992f947d348e0a039f
|
/hw0/code/python_tutorial.py
|
2f781d96bfa2f67f823357da87ca917398da18dd
|
[] |
no_license
|
saunair/Autonomy
|
e0ba30b3e6301477efabc125880fcd7ee47bc0de
|
01621595250daeb858ddd096dfa67b8e1d4fe0c5
|
refs/heads/master
| 2021-06-13T07:09:04.409729
| 2017-02-24T02:09:47
| 2017-02-24T02:09:47
| 81,028,855
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,670
|
py
|
#!/usr/bin/env python
import numpy as np
import operator
def print_list(l):
print l
def sort_manual(shops):
shops_sorted = []
values = [ v for v in shops.values() ]
#TODO: Here implement manual sorting using loops
for i in range(0, len(values)):
for j in range(0, len(values)-i-1):
if values[j] < values[j+1]:
values[j], values[j+1] = values[j+1], values[j]
#print values
shops_sorted = len(values)*[0]
i = 0
for kk in shops.keys():
shops_sorted[values.index(shops[kk])] = [kk,shops[kk]]
print 'Manual sorting result:'
print_list(shops_sorted)
def sort_python(shops):
shops_sorted = sorted(shops.items(), key=operator.itemgetter(1), reverse=True)
#print shops_sorted
#TODO: Here implement sorting using pythons build in sorting functions
#shops_sorted = [ [k,v] for k, v in shops.items() ]
print 'Python sorting result: '
print_list(shops_sorted)
def sort_numpy(shops):
shops_sorted = []
# TODO: Here implement sorting using numpys build-in sorting function
x = np.array(shops)
y = np.array(shops.items())
print 'Numpy sorting result: '
y = y[y[:,1].argsort()[::-1]]
shops_sorted = y.tolist()
print_list(shops_sorted)
def main():
shops = {}
shops['21st Street'] = 0.9
shops['Voluto'] = 0.6
shops['Coffee Tree'] = 0.45
shops['Tazza D\' Oro'] = 0.75
shops['Espresso a Mano'] = 0.95
shops['Crazy Mocha'] = 0.35
shops['Commonplace'] = 0.5
sort_manual(shops)
sort_python(shops)
sort_numpy(shops)
if __name__ == "__main__":
main()
|
[
"snnair@andrew.cmu.edu"
] |
snnair@andrew.cmu.edu
|
dff8c7bdbed01f6050d2849d52ffd0a9b798193a
|
bcf09471588530a543b1eca8fd2938ee10c1a9c5
|
/api-service/api/service.py
|
320dbfe7cf29e4250393d9fe4930558d7373abd8
|
[] |
no_license
|
dlops-io/video-in-out
|
34bf5ded0a3d48a678cd4f47f4edade11ef6131a
|
c2cd180e5f00f67d9fcce532b728a92950ae25aa
|
refs/heads/main
| 2023-08-20T13:21:37.479774
| 2021-10-25T12:48:55
| 2021-10-25T12:48:55
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 752
|
py
|
from fastapi import FastAPI
from starlette.middleware.cors import CORSMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse
from starlette.staticfiles import StaticFiles
from api.routers import video
# Setup FastAPI app
app = FastAPI(
title="API Service",
description="API Service",
version="0.1.0"
)
# Enable CORSMiddleware
app.add_middleware(
CORSMiddleware,
allow_credentials=False,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
# Routes
@app.get(
"/",
summary="Index",
description="Root api"
)
async def get_index():
return {
"message": "API Server Running!"
}
# Additional routers here
app.include_router(video.router)
|
[
"shivasj@gmail.com"
] |
shivasj@gmail.com
|
7ce22c0d3c1840a8b15bf306b73f4eaade25b3f0
|
c80ae9c1decd51252f8d623ded02b1003eb369c2
|
/web/movies/migrations/0003_auto_20170827_1727.py
|
27edc80f52dabe8be01eff1bc0d86b5e77aa52e9
|
[] |
no_license
|
wudizhangzhi/demo
|
5fa605709fb6f3d90b530d65149298d98b45c7fd
|
679933b7fdacbf7942c1cceb4f69da8b9e7d4bdd
|
refs/heads/master
| 2021-01-20T12:42:40.050376
| 2017-09-30T02:15:11
| 2017-09-30T02:15:11
| 90,399,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 806
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-27 09:27
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movies', '0002_auto_20170826_1532'),
]
operations = [
migrations.AddField(
model_name='movies',
name='category',
field=models.IntegerField(choices=[(0, b'\xe7\x94\xb5\xe5\xbd\xb1'), (1, b'\xe7\x94\xb5\xe8\xa7\x86\xe5\x89\xa7'), (2, b'\xe7\xbb\xbc\xe8\x89\xba')], default=0),
),
migrations.AlterField(
model_name='movies',
name='film_type',
field=models.IntegerField(choices=[(0, b'\xe6\xad\xa3\xe7\x89\x87'), (1, b'\xe9\xa2\x84\xe5\x91\x8a')], default=0),
),
]
|
[
"zhangzhichao@promote.cache-dns.local"
] |
zhangzhichao@promote.cache-dns.local
|
74d689c8c85d5d2561a6abc2a06ba077a7496e0e
|
0fa82ccc0b93944c4cbb8255834b019cf16d128d
|
/Az/temp.py
|
caf3bc211fbf8fccda75e10e1fee9d32caddc4ec
|
[] |
no_license
|
Akashdeepsingh1/project
|
6ad477088a3cae2d7eea818a7bd50a2495ce3ba8
|
bdebc6271b39d7260f6ab5bca37ab4036400258f
|
refs/heads/master
| 2022-12-13T23:09:35.782820
| 2020-08-27T14:22:37
| 2020-08-27T14:22:37
| 279,722,741
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 563
|
py
|
def mincostTickets (days, costs):
dp = [0] * 366
for i in range (1,max (days)+1):
if i in days:
dp[i] = min (dp[i - 1] + costs[0], dp[i - 7] + costs[1], dp[i - 30] + costs[2])
else:
dp[i] = dp[i-1]
return dp[:max (days) + 1][-1]
def mincostTickets2( days, costs):
dp = [0]*366
for i in range(1,max(days)+1):
dp[i] = min(dp[i-1] + costs[0] , dp[i-7] + costs[1], dp[i-30] + costs[2])
return dp[:max(days)+1][-1]
days = [1,4,6,7,8,20]
costs= [2,7,15]
print (mincostTickets2 (days, costs))
|
[
"Akashdeep_S@Dell.com"
] |
Akashdeep_S@Dell.com
|
24a9afca2f817f33c7ce171ef49ab354c5dd6efc
|
d925eb9cf278a67c7714ffa26f25060ae176cb1a
|
/09_ingredient_splitter_v3.py
|
0cd41a40b08cf10dc8fe487ef18f3d9e721adf97
|
[] |
no_license
|
wenqitoh/Recipe-Moderniser
|
bf188697706d69e836396fbf99e7708962ec9f96
|
e257b35f82b70d15bda812dca774f089ce40e68e
|
refs/heads/main
| 2023-06-13T23:24:50.556741
| 2021-07-11T05:04:02
| 2021-07-11T05:04:02
| 372,673,168
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,474
|
py
|
"""Further version of ingredient splitter which splits the ingredients from one lin of input
into quantity, unit, and ingredient
version 3 - testing on full recipe
created by Wen-Qi Toh
7/7/21"""
import re # this is the Regular Expression module
# ingredient has mixed fraction followed by unit and ingredient
full_recipe = [
"1 1/2 ml flour",
"3/4 cup milk",
"1 cup flour",
"2 tablespoons white sugar",
"1 3/4 cups flour",
"1.5 tsp baking powder",
"pinch of cinnamon"
]
# the regex format below is expecting: number <space> number
mixed_regex = r"\d{1,3}\s\d{1,3}\/\d{1,3}"
# \d for a digit, /d{1,3} allows 1-3 digits, /s for space, \/ for divide
for recipe_line in full_recipe:
recipe_line = recipe_line.strip()
# get amount
if re.match(mixed_regex, recipe_line): # checking for mixed fraction
# get mixed number by matching the regex
pre_mixed_num = re.match(mixed_regex, recipe_line)
mixed_num = pre_mixed_num.group()
# .group returns the part of the string where there was a match
# replace the space in the mixed number with '+' sign
amount = mixed_num.replace(" ","+")
# changes the string into a float using python's evaluation method
amount = eval(amount)
# get unit and ingredient
compile_regex = re.compile(mixed_regex)
# compiles the regex into a string object - so we can search for patterns
unit_ingredient = re.split(compile_regex, recipe_line)
# produces the recipe line unit and amount as a list
unit_ingredient = (unit_ingredient[1]).strip()
# removes the extra white space before and after the unit
# 2nd element in list, converting into a string
else:
# splits the line at the first space
get_amount = recipe_line.split(" ", 1)
try:
amount = eval(get_amount[0]) # convert amount to float if possible
except NameError:
amount = get_amount[0]
unit_ingredient = get_amount[1]
# get unit and ingredient
# splits the string into a list containing just the unit and ingredient
get_unit = unit_ingredient.split(" ", 1)
unit = get_unit[0] # making the 1st item in the list 'unit'
ingredient = get_unit[1] # making the 2nd item in the list 'ingredient'
# all 3 elements of original reciple line now broken into the 3 variables
print("{} {} {}".format(amount, unit, ingredient))
|
[
"tohw@middleton.school.nz"
] |
tohw@middleton.school.nz
|
9ff2745dddde91c4bb375c36b46f51ff6af9493f
|
fe19282d91746bd21d1daed624d6d5102a871d8d
|
/assign5/models.py
|
05082fd5e43de3bfec644355f7fc8ca81186f2b1
|
[] |
no_license
|
vivekpradhan/autograder
|
344410aa8f7fa8c93137ec56762b949be77c24ae
|
0cd8c24d0ba1c8f2af129c1c2a7bd7a7bbe0853d
|
refs/heads/master
| 2021-01-24T10:34:04.424425
| 2017-10-09T12:07:32
| 2017-10-09T12:07:32
| 69,992,405
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,500
|
py
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Submission(models.Model):
eid = models.CharField(max_length=10, blank=False)
document = models.FileField(upload_to='media/')
uploaded_at = models.DateTimeField(auto_now_add=True)
assignment_num = models.IntegerField(default = 0)
password = models.CharField(max_length=10, default='')
class SubResults(models.Model):
password = models.CharField(max_length=10, default='')
processed_at = models.DateTimeField(auto_now_add=True)
assignment_num = models.IntegerField(default = 0)
pomlocation = models.CharField(max_length=500, blank=False)
eid = models.CharField(max_length=10, blank=False)
mvnstarted = models.BooleanField(default=False)
mvnended = models.BooleanField(default=False)
hadoopstarted = models.BooleanField(default=False)
hadoopended = models.BooleanField(default=False)
foundoutput = models.BooleanField(default=False)
numberoflines = models.IntegerField(default=-999)
numberofkeymatches = models.IntegerField(default=-999)
events_sorted = models.BooleanField(default=False)
features_sorted = models.BooleanField(default=False)
completed = models.BooleanField(default=False)
errors = models.CharField(max_length=500, default='')
def convert_to_string(self):
output = "<div id='output'>"
output+= '<li> Autograder started at '+ str(self.processed_at) + ' </li>\n'
#output+= '<li> EID: '+str(self.eid) + ' </li>\n'
output+= '<li> mvn clean package started?: ' +str(self.mvnstarted)+'</li>\n'
output+= '<li> mvn clean package ended?: ' +str(self.mvnended)+'</li>\n'
output+= '<li> hadoop job started?: ' +str(self.hadoopstarted)+'</li>\n'
output+= '<li> hadoop job ended?: ' +str(self.hadoopended)+'</li>\n'
output+= '<li> found output file?: '+str(self.foundoutput)+ '</li>\n'
output+= '<li> Number of lines in output is '+str(self.numberoflines)+' of xx</li>\n'
output+= '<li> Number of matching keys in output is '+str(self.numberofkeymatches)+' of xx</li>\n'
output+= '<li> Events sorted? : '+str(self.events_sorted)+'</li>\n'
output+= '<li> Features sorted? : '+str(self.features_sorted)+'</li>\n'
output+= '<li> Autograder is finished? '+str(self.completed)+'</li>\n'
output+= '<li> ERRORS: '+str(self.errors)+'</li>\n'
output+= '</div>'
return output
|
[
"vivkripra@gmail.com"
] |
vivkripra@gmail.com
|
a7ba513d9dbf489347919fd23a3cec055f45149c
|
e4bf5f3ad3beecf288507470b20ae30849effdb1
|
/node_modules/mongoose/node_modules/mongodb/node_modules/mongodb-core/node_modules/kerberos/build/config.gypi
|
fbceeb251c1b9d148e8e13e1a6ab16086dcc9128
|
[
"MIT",
"Apache-2.0"
] |
permissive
|
JohnnyLamb/scotch_jwt_token
|
6356e30026db88fdac64484e4ca0770b40d0701f
|
505db6a587deaefb75df7ad24b718bbac72472db
|
refs/heads/master
| 2021-01-10T16:36:39.379628
| 2015-10-17T23:32:49
| 2015-10-17T23:32:49
| 44,454,151
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,717
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 1,
"host_arch": "x64",
"icu_data_file": "icudt54l.dat",
"icu_data_in": "../../deps/icu/source/data/in/icudt54l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "./deps/icu",
"icu_small": "true",
"icu_ver_major": "54",
"node_install_npm": "true",
"node_prefix": "",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_mdb": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"uv_library": "static_library",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"nodedir": "/Users/johnny/.node-gyp/0.12.7",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"shell": "/bin/zsh",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"npat": "",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"access": "",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/johnny/.npm-init.js",
"userconfig": "/Users/johnny/.npmrc",
"node_version": "0.12.7",
"user": "",
"save": "true",
"editor": "vi",
"tag": "latest",
"global": "",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/Users/johnny/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/2.11.3 node/v0.12.7 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"init_version": "1.0.0",
"umask": "0022",
"git": "git",
"init_author_name": "",
"scope": "",
"onload_script": "",
"tmp": "/var/folders/3q/_3b6n01s6yd817hd60lsckk80000gn/T",
"unsafe_perm": "true",
"prefix": "/usr/local",
"link": ""
}
}
|
[
"agnusjack@gmail.com"
] |
agnusjack@gmail.com
|
8602f77cf80a23e828ccb71e0edd430dbdbb77c2
|
b97fd4dd496456bbbeda8d774bb179f1b03b0dba
|
/day11/part2.py
|
c85016a17ab75f04ef04b920eb6f2beffec81e34
|
[] |
no_license
|
woranov/aoc2020
|
7666cabc96267936fda04a58244193db19e9489e
|
671ef866e0c81a5f70898a7e6ec725b1f78378fb
|
refs/heads/master
| 2023-02-11T19:03:01.437839
| 2020-12-25T15:29:46
| 2020-12-25T15:29:46
| 317,584,125
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,312
|
py
|
import functools
import itertools
_TESTCASE = """\
L.LL.LL.LL
LLLLLLL.LL
L.L.L..L..
LLLL.LL.LL
L.LL.LL.LL
L.LLLLL.LL
..L.L.....
LLLLLLLLLL
L.LLLLLL.L
L.LLLLL.LL
""".strip().splitlines()
def compute(data):
"""
>>> compute(_TESTCASE)
26
"""
grid = [*map(list, data)]
rows = len(grid)
cols = len(grid[0])
# doing some memoization but its pretty pointless
@functools.lru_cache(maxsize=None)
def get_directions(r, c):
return {
"↓": tuple(zip(itertools.repeat(r), range(c + 1, cols))),
"↑": tuple(zip(itertools.repeat(r), range(c - 1, -1, -1))),
"→": tuple(zip(range(r + 1, rows), itertools.repeat(c))),
"←": tuple(zip(range(r - 1, -1, -1), itertools.repeat(c))),
"↗": tuple(zip(range(r + 1, rows), range(c + 1, cols))),
"↖": tuple(zip(range(r + 1, rows), range(c - 1, -1, -1))),
"↙": tuple(zip(range(r - 1, -1, -1), range(c + 1, cols))),
"↘": tuple(zip(range(r - 1, -1, -1), range(c - 1, -1, -1))),
}
non_floor_seats = [(r, c) for r in range(rows) for c in range(cols) if grid[r][c] != "."]
while True:
new_grid = [row[:] for row in grid]
for r_idx, c_idx in non_floor_seats:
sym = new_grid[r_idx][c_idx]
if sym == ".":
continue
else:
count = 0
for direction in get_directions(r_idx, c_idx).values():
for nb_r, nb_c in direction:
if grid[nb_r][nb_c] == ".":
continue
elif grid[nb_r][nb_c] == "#":
count += 1
break
if sym == "L" and count == 0:
new_grid[r_idx][c_idx] = "#"
elif sym == "#" and count >= 5:
new_grid[r_idx][c_idx] = "L"
if new_grid == grid:
break
else:
grid = new_grid
return sum(seat == "#" for row in grid for seat in row)
def main():
import pathlib
input_path = pathlib.Path(__file__).with_name("input.txt")
with input_path.open() as f:
print(compute(f.read().strip().splitlines()))
if __name__ == "__main__":
main()
|
[
"wor4nov@gmail.com"
] |
wor4nov@gmail.com
|
66bd585e840a22d0e4615b07176d861991dd6612
|
da617dbc147d7720490866b279068cdef87e00c9
|
/q7.py
|
510b9e8113cec4c9b681288211d223574fb7efc6
|
[] |
no_license
|
Shailaj97/Python-Assignment-1
|
444750a816bd593fdafb9eebdd9b6b5dd646915f
|
55e11b541fbc517da360afc41ea915a1a9af43ea
|
refs/heads/main
| 2023-07-17T06:10:59.775212
| 2021-09-01T04:32:43
| 2021-09-01T04:32:43
| 401,912,524
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 534
|
py
|
""" Write a Python program to combine two lists into a dictionary, where the elements of the first one serve as the keys and the elements of the second one serve as the values. The values of the first list need to be unique and hashable.
Sample Output:
Original lists:
['a', 'b', 'c', 'd', 'e', 'f']
[1, 2, 3, 4, 5]
Combine the values of the said two lists into a dictionary:
{'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5}
"""
list1= ['a', 'b', 'c', 'd', 'e', 'f']
list2 = [1, 2, 3, 4, 5]
a=dict(zip(list1,list2))
print(str(a))
|
[
"noreply@github.com"
] |
Shailaj97.noreply@github.com
|
425b4486fa1fd6169f158a9b5cac855c66475095
|
7d5d8492c2d88b88bdc57e3c32db038a7e7e7924
|
/IPCC-CMIP5/bias_correction/bc_extract_gcm.py
|
ef32727acb01fb3b19decbfd3c17f5d35dbfc4ff
|
[] |
no_license
|
CIAT-DAPA/dapa-climate-change
|
80ab6318d660a010efcd4ad942664c57431c8cce
|
2480332e9d61a862fe5aeacf6f82ef0a1febe8d4
|
refs/heads/master
| 2023-08-17T04:14:49.626909
| 2023-08-15T00:39:58
| 2023-08-15T00:39:58
| 39,960,256
| 15
| 17
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,770
|
py
|
# ---------------------------------------------------------------------------------
# Author: Jaime Tarapues
# Date: September 23th, 2014
# Updated: July 28th, 2014
# Purpose: Purpose: extract values daily data of cmip5
# ----------------------------------------------------------------------------------
import os, sys, string,glob, shutil
# python D:\jetarapues\_scripts\bc_extract_gcm.py T:\gcm\cmip5\raw\daily\rcp45\gfdl_esm2m\r1i1p1\pr_day_GFDL-ESM2M_rcp45_r1i1p1_20060101-21001231.nc D:\jetarapues\Request\Request_cnavarro\bc\tes.tab 2006 2100 -72.301412 5.339301 YES cdo
#Syntax
if len(sys.argv) < 8:
os.system('cls')
print "\n Too few args"
print " Syntax : <Extract_MaskGCM.py> <dirout> <mask> <dataset> <sres> <resolution> <models> <periods> <variable> <ascii> <descfile>"
print " - ie: "
sys.exit(1)
#Set variables
ifile = sys.argv[1]
odat = sys.argv[2]
yi = sys.argv[3]
yf = sys.argv[4]
lon=sys.argv[5]
lat=sys.argv[6]
vartype=sys.argv[7]
dircdo = sys.argv[8]
# Clean screen
os.system('cls')
name=os.path.basename(ifile)
if not os.path.exists(odat):
# print '\n...Extracting',name,'lon:'+str(lon)+' lat:'+lat,'Date:'+str(yi)+'-'+str(yf),'\n'
if vartype == 'NO':
os.system(dircdo+" -s -outputtab,date,value -remapnn,lon="+str(lon)+"_lat="+lat+' -selyear,'+str(yi)+'/'+str(yf)+' '+ifile+" > "+odat)
else:
var=name.split("_")[0]
if var == 'hur':
os.system(dircdo+" -s -outputtab,date,value -remapnn,lon="+str(lon)+"_lat="+lat+' -selyear,'+str(yi)+'/'+str(yf)+' -selname,'+var+' -sellevel,85000 '+ifile+" > "+odat)
else:
os.system(dircdo+" -s -outputtab,date,value -remapnn,lon="+str(lon)+"_lat="+lat+' -selyear,'+str(yi)+'/'+str(yf)+' -selname,'+var+' '+ifile+" > "+odat)
# else:
# print '\t...Extracted by coordinate',name
|
[
"jaime.tm8@gmail.com"
] |
jaime.tm8@gmail.com
|
e14ac3d06dfe4effe84493b1c1438edb268348ab
|
ab692ff0773367a0190309d3e7c3785a46a205d3
|
/main_finetune_imagenet.py
|
ffeb8bccfc2c0e2dac3ba392891c83eea971c116
|
[
"MIT"
] |
permissive
|
sAviOr287/imagenet_ICLR
|
f6da8149bf4280c923c584c4e6aade81fc469cf4
|
1ac83d799f5335355161156aa9bba63e0d82a063
|
refs/heads/main
| 2023-01-13T02:45:53.450571
| 2020-11-15T22:12:36
| 2020-11-15T22:12:36
| 313,132,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,010
|
py
|
import argparse
import os
import random
import shutil
import time
import warnings
import sys
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.distributed as dist
import torch.optim
import torch.multiprocessing as mp
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
from tensorboardX import SummaryWriter
from pprint import pprint
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('--data', metavar='DIR',
help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N',
help='mini-batch size (default: 256), this is the total '
'batch size of all GPUs on the current node when '
'using Data Parallel or Distributed Data Parallel')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate', dest='lr')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)',
dest='weight_decay')
parser.add_argument('-p', '--print-freq', default=10, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--resume_pruned', default='', type=str, metavar='PATH',
help='path to latest pruned network (default: none)')
# resume_pruned
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('--world-size', default=-1, type=int,
help='number of nodes for distributed training')
parser.add_argument('--rank', default=-1, type=int,
help='node rank for distributed training')
parser.add_argument('--dist-url', default='tcp://224.66.41.62:23456', type=str,
help='url used to set up distributed training')
parser.add_argument('--dist-backend', default='nccl', type=str,
help='distributed backend')
parser.add_argument('--seed', default=None, type=int,
help='seed for initializing training. ')
parser.add_argument('--gpu', default=None, type=int,
help='GPU id to use.')
parser.add_argument('--multiprocessing-distributed', action='store_true',
help='Use multi-processing distributed training to launch '
'N processes per node, which has N GPUs. This is the '
'fastest way to use PyTorch for either single node or '
'multi node data parallel training')
parser.add_argument('--grad_loop', default=1, type=int,
help='GPU id to use.')
best_acc1 = 0
writer = None
def main():
args = parser.parse_args()
args.lr /= args.grad_loop
if args.seed is not None:
random.seed(args.seed)
torch.manual_seed(args.seed)
cudnn.deterministic = True
warnings.warn('You have chosen to seed training. '
'This will turn on the CUDNN deterministic setting, '
'which can slow down your training considerably! '
'You may see unexpected behavior when restarting '
'from checkpoints.')
if args.gpu is not None:
warnings.warn('You have chosen a specific GPU. This will completely '
'disable data parallelism.')
if args.dist_url == "env://" and args.world_size == -1:
args.world_size = int(os.environ["WORLD_SIZE"])
args.distributed = args.world_size > 1 or args.multiprocessing_distributed
ngpus_per_node = torch.cuda.device_count()
if args.multiprocessing_distributed:
# Since we have ngpus_per_node processes per node, the total world_size
# needs to be adjusted accordingly
args.world_size = ngpus_per_node * args.world_size
# Use torch.multiprocessing.spawn to launch distributed processes: the
# main_worker process function
mp.spawn(main_worker, nprocs=ngpus_per_node, args=(ngpus_per_node, args))
else:
# Simply call main_worker function
main_worker(args.gpu, ngpus_per_node, args)
def main_worker(gpu, ngpus_per_node, args):
global best_acc1, writer
args.gpu = gpu
args.save_dir = None
if args.gpu is not None:
print("Use GPU: {} for training".format(args.gpu))
if args.distributed:
if args.dist_url == "env://" and args.rank == -1:
args.rank = int(os.environ["RANK"])
if args.multiprocessing_distributed:
# For multiprocessing distributed training, rank needs to be the
# global rank among all the processes
args.rank = args.rank * ngpus_per_node + gpu
dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
# create model
def forward_pre_hook(m, x):
m.mask.requires_grad_(False)
mask = m.mask
# mask.requires_grad_(False)
# mask.cuda(m.weight.get_device())
m.weight.data.mul_(mask.to(m.weight.get_device()))
if args.resume_pruned:
if os.path.isfile(args.resume_pruned):
print("=> loading checkpoint '{}'".format(args.resume_pruned))
checkpoint = torch.load(args.resume_pruned)
model = checkpoint['net'].cpu()
masks = checkpoint['mask']
ratio = checkpoint['ratio']
print("=> Ratios:")
pprint(ratio)
# optimizer.load_state_dict(checkpoint['optimizer'])
print("Loaded check point from %s." % args.resume_pruned)
print('=> Registering masks for each layer')
for m in model.modules():
if isinstance(m, nn.Linear) or isinstance(m, nn.Conv2d):
m.mask = nn.Parameter(masks[m]).requires_grad_(False).cpu()
m.register_forward_pre_hook(forward_pre_hook)
args.save_dir = os.path.join(*args.resume_pruned.split('/')[:-1])
writer = SummaryWriter(args.save_dir)
print('=> Will save to %s.' % args.save_dir)
else:
print("=> no checkpoint found at '{}'".format(args.resume))
elif args.pretrained:
import pdb ; pdb.set_trace()
print("=> using pre-trained model '{}'".format(args.arch))
model = models.__dict__[args.arch](pretrained=True)
else:
import pdb ; pdb.set_trace()
print("=> creating model '{}'".format(args.arch))
model = models.__dict__[args.arch]()
if args.distributed:
# For multiprocessing distributed, DistributedDataParallel constructor
# should always set the single device scope, otherwise,
# DistributedDataParallel will use all available devices.
if args.gpu is not None:
torch.cuda.set_device(args.gpu)
model.cuda(args.gpu)
# When using a single GPU per process and per
# DistributedDataParallel, we need to divide the batch size
# ourselves based on the total number of GPUs we have
args.batch_size = int(args.batch_size / ngpus_per_node)
args.workers = int(args.workers / ngpus_per_node)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
else:
model.cuda()
# DistributedDataParallel will divide and allocate batch_size to all
# available GPUs if device_ids are not set
model = torch.nn.parallel.DistributedDataParallel(model)
elif args.gpu is not None:
torch.cuda.set_device(args.gpu)
model = model.cuda(args.gpu)
else:
# DataParallel will divide and allocate batch_size to all available GPUs
if args.arch.startswith('alexnet') or args.arch.startswith('vgg'):
model.features = torch.nn.DataParallel(model.features)
model.cuda()
else:
model = torch.nn.DataParallel(model).cuda()
# define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda(args.gpu)
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
# optionally resume from a checkpoint
if args.resume:
def to_cpu(m):
if isinstance(m, dict):
for k in m.keys():
m[k] = to_cpu(m[k])
return m
elif isinstance(m, list):
return [to_cpu(_) for _ in m]
elif isinstance(m, torch.Tensor):
return m.cpu()
else:
return m
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint['epoch']
best_acc1 = checkpoint['best_acc1']
if args.gpu is not None:
# best_acc1 may be from a checkpoint from a different GPU
best_acc1 = best_acc1.to(args.gpu)
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
checkpoint['state_dict'] = to_cpu(checkpoint['state_dict'])
checkpoint['optimizer'] = to_cpu(checkpoint['optimizer'])
# for k in checkpoint['state_dict'].keys():
# checkpoint['state_dict'][k] = checkpoint['state_dict'][k].cpu()
#
# for k in checkpoint['optimizer'].keys():
# checkpoint['optimizer'][k] = checkpoint['optimizer'][k].cpu()
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume, checkpoint['epoch']))
del checkpoint
else:
print("=> no checkpoint found at '{}'".format(args.resume))
cudnn.benchmark = True
# Data loading code
traindir = os.path.join(args.data, 'train')
valdir = os.path.join(args.data, 'val')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_dataset = datasets.ImageFolder(
traindir,
transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset)
else:
train_sampler = None
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True, sampler=train_sampler)
val_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(valdir, transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
if args.evaluate:
validate(val_loader, model, criterion, args)
return
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
adjust_learning_rate(optimizer, epoch, args)
# train for one epoch
train_loss, train_top1, train_top5 = train(train_loader, model, criterion, optimizer, epoch, args)
# evaluate on validation set
acc1, val_loss, val_top1, val_top5 = validate(val_loader, model, criterion, args)
# remember best acc@1 and save checkpoint
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
if not args.multiprocessing_distributed or (args.multiprocessing_distributed
and args.rank % ngpus_per_node == 0):
save_checkpoint({
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': model.state_dict(),
'best_acc1': best_acc1,
'optimizer' : optimizer.state_dict(),
}, is_best, args=args)
# global writer
if writer is not None:
writer.add_scalar('train/loss', train_loss.avg, epoch)
writer.add_scalar('train/top1', train_top1.avg, epoch)
writer.add_scalar('train/top5', train_top5.avg, epoch)
writer.add_scalar('val/loss', val_loss.avg, epoch)
writer.add_scalar('val/top1', val_top1.avg, epoch)
writer.add_scalar('val/top5', val_top5.avg, epoch)
def train(train_loader, model, criterion, optimizer, epoch, args):
total = args.epochs
intv = total // 3
lr = args.lr * (0.1 ** (epoch // intv))
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(len(train_loader), batch_time, data_time, losses, top1,
top5, prefix="[lr={}, grad_loop={}] Epoch: [{}]".format(lr, args.grad_loop, epoch))
# switch to train mode
model.train()
end = time.time()
optimizer.zero_grad()
all_loss = 0
counts = 0
acc_top1 = 0
acc_top5 = 0
all_samples = 0
for i, (input, target) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
if args.gpu is not None:
input = input.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
all_samples += input.shape[0]
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(acc1[0], input.size(0))
top5.update(acc5[0], input.size(0))
# compute gradient and do SGD step
loss.backward()
if (i+1) % args.grad_loop == 0:
optimizer.step()
optimizer.zero_grad()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
all_loss += loss.item()
counts += 1
if i % args.print_freq == 0:
progress.print(i)
return losses, top1, top5
def validate(val_loader, model, criterion, args):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(len(val_loader), batch_time, losses, top1, top5,
prefix='Test: ')
# switch to evaluate mode
model.eval()
with torch.no_grad():
end = time.time()
for i, (input, target) in enumerate(val_loader):
if args.gpu is not None:
input = input.cuda(args.gpu, non_blocking=True)
target = target.cuda(args.gpu, non_blocking=True)
# compute output
output = model(input)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), input.size(0))
top1.update(acc1[0], input.size(0))
top5.update(acc5[0], input.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
progress.print(i)
print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return top1.avg, losses, top1, top5
def save_checkpoint(state, is_best, filename='checkpoint.pth.tar', args=None):
if args.save_dir is not None:
filename = os.path.join(args.save_dir, filename)
torch.save(state, filename)
if is_best:
best_location = 'model_best.pth.tar'
if args.save_dir is not None:
best_location = os.path.join(args.save_dir, 'model_best.pth.tar')
shutil.copyfile(filename, best_location)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, *meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def print(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
def adjust_learning_rate(optimizer, epoch, args):
total = args.epochs
intv = total // 3
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.lr * (0.1 ** (epoch // intv))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].contiguous().view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main()
|
[
"jeanfrancois287@hotmail.fr"
] |
jeanfrancois287@hotmail.fr
|
e7336bb7129d0dba26d5d33533f5b0997f133562
|
b53869d9c0b38ecc5f2bef40f03e0146d05c67c5
|
/send_mail/core/kafka_connect.py
|
420f3ba573dc426a1325a1038a87f916c876b163
|
[] |
no_license
|
manhcuong2801/send_sms
|
564435736e163b41b06c6d53c79d41ac9ca1886d
|
8734ffa74ddf723852d01a4b4ecc9dc3dd4accef
|
refs/heads/master
| 2023-04-03T13:35:43.433808
| 2021-04-23T12:42:59
| 2021-04-24T03:52:22
| 360,880,700
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,481
|
py
|
import json as j
from django.conf import settings as s
from kafka import KafkaConsumer as KC, KafkaProducer as KP
class KafkaConnector:
_bootstrap_server = s.KAFKA_SERVER
_topic = "topic_example"
_group_id = "group_example"
def get_consumer(
self,
topic: str = _topic,
group_id: str = _group_id,
is_handle_json: bool = True,
):
if not is_handle_json:
return KC(
topic, group_id=group_id, bootstrap_servers=[self._bootstrap_server]
)
return KC(
topic,
group_id=group_id,
bootstrap_servers=[self._bootstrap_server],
value_deserializer=lambda v: j.loads(v),
)
def get_producer(self, is_handle_json: bool = True):
if not is_handle_json:
return KP(bootstrap_servers=[self._bootstrap_server])
return KP(
bootstrap_servers=[self._bootstrap_server],
value_serializer=lambda v: j.dumps(v).encode("utf-8"),
)
def send_message_to_topic(
self,
topic: str,
bytes_msg: bytes = b"",
json_msg: dict = {},
is_handle_json: bool = True,
):
if not is_handle_json:
producer = self.get_producer(is_handle_json=False)
future = producer.send(topic, bytes_msg)
else:
producer = self.get_producer()
future = producer.send(topic, json_msg)
return future
|
[
"cuongle@tamdongtam.vn"
] |
cuongle@tamdongtam.vn
|
9bf4e35570827087d92050fcab5d0ddbc721c47b
|
e8469a38083f28633b74cfd2cf4399aac11a6514
|
/ver.1/selectors.py
|
4197f85bcdd90c359aaf3cf34c0d506aec0db4a4
|
[] |
no_license
|
JadenHeo/FO4-data-crawling
|
e67ff2b611e9f3b5c83b3e237e5d06065094d091
|
e1d4be6a144985d953c9e57b2aef409d7eddea8a
|
refs/heads/main
| 2023-08-07T17:59:28.884544
| 2021-10-04T07:57:22
| 2021-10-04T07:57:22
| 395,150,481
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,343
|
py
|
selector = {"name" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_name > div.name",
"pay" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_name > div.side_utils > div",
"position" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_ab > span > span.txt",
"live up" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_ab > span > span.live.up",
"birth" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.birth",
"height" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.height",
"weight" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.weight",
"physical" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.physical",
"skill" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.skill > span",
"foot" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.foot",
"class" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_etc > span.etc.season",
"team" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_team > div.etc.team",
"nation" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.info_line.info_team > div.etc.nation",
"speciality" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.info_wrap > div.skill_wrap > span",
"position_overalls" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_header > div.ovr_set > div",
"club_history" : "#middle > div > div > div:nth-child(4) > div:nth-child(1) > div.content.data_detail_club > div.data_table > ul > li > div",
"stat" : "#middle > div > div > div:nth-child(2) > div.content.data_detail > div > div.content_bottom"
}
position_overalls = ["ST", "LW", "CF", "RW", "CAM", "LM", "CM", "RM", "CDM", "LWB", "CB", "RWB", "LB", "SW", "RB", "GK"]
position_index = {"GK" : 0, "SW" : 1, "RWB" : 2, "RB" : 3, "RCB" : 4, "CB" : 5, "LCB" : 6, "LB" : 7, "LWB" : 8,
"RDM" : 9, "CDM" : 10, "LDM" : 11, "RM" : 12, "RCM" : 13, "CM" : 14, "LCM" : 15, "LM" : 16, "RAM" : 17,
"CAM" : 18, "LAM" : 19, "RF" : 20, "CF" : 21, "LF" : 22, "RW" : 23, "RT" : 24, "ST" : 25, "LT" : 26, "LW" : 27}
|
[
"hhj801@gmail.com"
] |
hhj801@gmail.com
|
de555bff1a32f8e3a9d93047cae238e8a3aa29ce
|
b0a9e9df50749086a7db7e135b2582d87cb8dbc2
|
/energy_estimation/nd/fhc/svm/hadron/sample_weighted_estimator_2d.py
|
aac40196ce6aae3c0ebd3a02e163dc5b2568c47b
|
[] |
no_license
|
kaikai581/sklearn-nova
|
6ff204bfb4351395c8029b80fedfbb6b8702f06d
|
e16644c0651123fe3ebdf42a9fb3dc50fbd2c2de
|
refs/heads/master
| 2020-03-08T17:26:46.222451
| 2020-01-07T01:12:36
| 2020-01-07T01:12:36
| 128,268,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,781
|
py
|
#!/usr/bin/env python
"""
This script weights each point with inverse square root of its true hadronic
energy when fitting. This is equivalent to modifying the SVM loss function.
Two predictors are used, namely calibrated hadronic energy, and one swappable
varisble.
"""
from __future__ import print_function
print(__doc__)
from array import array
from matplotlib.colors import LogNorm
from ROOT import *
from root_numpy import root2array
from scipy.spatial.distance import pdist
from scipy.stats import tstd, skew, kurtosis
from sklearn import preprocessing
from sklearn.externals import joblib
from sklearn.svm import SVR
import argparse
import matplotlib.pyplot as plt
import numpy as np
import os
def fit_with_subdata(var, cpar, gpar, scaledown, offset):
# retrieve training data and official reco hadronic energy for comparison
cut = 'mustopz<1275&&isnumucc==1'
X = root2array('../training_data.root',
branches=['calehad', var],
selection=cut,
step=scaledown, start=offset)
X = X.view(np.float32).reshape(X.shape + (-1,))
recoemu_official = root2array('../training_data.root', branches='recoemu',
selection=cut,
step=scaledown, start=offset)
trueenu = root2array('../training_data.root', branches='trueenu',
selection=cut,
step=scaledown, start=offset)
y = trueenu - recoemu_official
yoff = root2array('../training_data.root', branches='recoehad',
selection=cut,
step=scaledown, start=offset)
# rescale the regressors
scaler = preprocessing.StandardScaler().fit(X)
# calculate the mean pairwise squared distance between regressors
Xstd = scaler.transform(X)
if gpar == 'auto':
mean_squared_dist = np.mean(pdist(Xstd, 'sqeuclidean'))
gpar = '{0:.3g}'.format(1./mean_squared_dist)
# save the scaler
os.system('mkdir -p models/2d')
joblib.dump(scaler, 'models/2d/sample_weighted_hadronic_scaler_{}_c{}g{}step{}offset{}.pkl'.format(var, cpar, gpar, scaledown, offset))
# make an array for sample weights
swei = np.copy(y)
#~ swei[y != 0] = 1./np.sqrt(np.abs(swei[y != 0]))
swei[y != 0] = 1./np.abs(swei[y != 0])
swei[y == 0.] = 1.
# train svm with standardized regressors
svr = SVR(kernel='rbf', C=float(cpar), gamma=float(gpar), verbose=True)
y_pred = svr.fit(Xstd, y, swei).predict(Xstd)
# save the model
joblib.dump(svr, 'models/2d/sample_weighted_hadronic_energy_estimator_{}_c{}g{}step{}offset{}.pkl'.format(var, cpar, gpar, scaledown, offset))
# make plots
#~ plt.figure(1)
#~ xbin = np.linspace(-.05,2,80)
#~ ybin = np.linspace(-.2,5,80)
#~ plt.hist2d(X,y,[xbin,ybin], norm=LogNorm())
#~ plt.colorbar()
#~ plt.scatter(X, y_pred, s=2, c='red', alpha=0.5)
# save plots
#~ os.system('mkdir -p plots/2d')
#~ plt.savefig('plots/2d/sample_weighted_estimator_overlaid_on_data_{}_c{}g{}step{}offset{}.pdf'.format(var, cpar, gpar, scaledown, offset))
# estimate various reco values
yest = y_pred
rest = (yest-y)/y
roff = (yoff-y)/y
# save root file
os.system('mkdir -p output_root_files/2d')
toutf = TFile('output_root_files/2d/sample_weighted_resolution_{}_c{}g{}step{}offset{}.root'.format(var, cpar, gpar, scaledown, offset), 'recreate')
tr = TTree( 'tr', 'resolution tree' )
r1 = array( 'f', [ 0. ] )
r2 = array( 'f', [ 0. ] )
svmehad = array( 'f', [ 0. ] )
offehad = array( 'f', [ 0. ] )
trueehad = array( 'f', [ 0. ] )
tr.Branch( 'rest', r1, 'rest/F' )
tr.Branch( 'roff', r2, 'roff/F' )
tr.Branch('svmehad', svmehad, 'svmehad/F')
tr.Branch('offehad', offehad, 'offehad/F')
tr.Branch('trueehad', trueehad, 'trueehad/F')
for i in range(len(rest)):
r1[0] = rest[i]
r2[0] = roff[i]
svmehad[0] = yest[i]
offehad[0] = yoff[i]
trueehad[0] = y[i]
tr.Fill()
tr.Write()
toutf.Close()
if __name__ == '__main__':
# list of second variable
varlist = ['cvnpi0', 'cvnchargedpion', 'cvnneutron', 'cvnproton', 'npng']
# parse command line arguments
parser = argparse.ArgumentParser(description='Hadronic energy SVM with sample weights.')
parser.add_argument('-c','--cpar',type=str,default='100')
parser.add_argument('-g','--gpar',type=str,default='auto')
parser.add_argument('-s','--step',type=int,default='500')
parser.add_argument('-o','--offset',type=int,default='0')
parser.add_argument('-v','--variable',type=int,default='0')
args = parser.parse_args()
# specified parameters
cpar = args.cpar
gpar = args.gpar
scaledown = args.step
offset = args.offset
var = varlist[args.variable]
# fit model with arguments
fit_with_subdata(var, cpar, gpar, scaledown, offset)
|
[
"shihkailin78@gmail.com"
] |
shihkailin78@gmail.com
|
cc22681d605c52facf8d17b0ff1cd2612d797397
|
eb5ab5ce3763f5e5b80a38f77ee98b7b954d726e
|
/cciaa/portlet/calendar/tests/base.py
|
34f3751741116d33cc8d925131230f0af4066c0c
|
[] |
no_license
|
PloneGov-IT/cciaa.portlet.calendar
|
088e400c567066ce486f6dfb6eaa2f482abd0471
|
81aa9deb5e082520604946387a6a60dd229db21a
|
refs/heads/master
| 2021-01-21T19:28:56.055750
| 2013-11-20T16:48:08
| 2013-11-20T16:48:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,309
|
py
|
from Products.Five import zcml
from Products.Five import fiveconfigure
from Testing import ZopeTestCase as ztc
from Products.PloneTestCase import PloneTestCase as ptc
from Products.PloneTestCase.layer import onsetup
@onsetup
def setup_product():
"""Set up additional products and ZCML required to test this product.
The @onsetup decorator causes the execution of this body to be deferred
until the setup of the Plone site testing layer.
"""
# Load the ZCML configuration for this package and its dependencies
fiveconfigure.debug_mode = True
import cciaa.portlet.calendar
zcml.load_config('configure.zcml', cciaa.portlet.calendar)
fiveconfigure.debug_mode = False
# We need to tell the testing framework that these products
# should be available. This can't happen until after we have loaded
# the ZCML.
ztc.installPackage('cciaa.portlet.calendar')
# The order here is important: We first call the deferred function and then
# let PloneTestCase install it during Plone site setup
setup_product()
ptc.setupPloneSite(products=['cciaa.portlet.calendar'])
class TestCase(ptc.PloneTestCase):
"""Base class used for test cases
"""
class FunctionalTestCase(ptc.FunctionalTestCase):
"""Test case class used for functional (doc-)tests
"""
|
[
"keul@db7f04ef-aaf3-0310-a811-c281ed44c4ad"
] |
keul@db7f04ef-aaf3-0310-a811-c281ed44c4ad
|
28b5d104d8e8eb1a445ac2a3359068286e7227cf
|
dde3f20bc5d50bb87f735f88645a07d9789ada2a
|
/contrib/seeds/generate-seeds.py
|
885030bca90708d6cc21d0386331627e2401a544
|
[
"MIT"
] |
permissive
|
wai1496/Quaz
|
6c96dd0009df9254127d4e37d5398e0e09b355c9
|
8cea70d86e0b38cd580a8de457d1890b4710c0d2
|
refs/heads/master
| 2021-04-15T03:37:17.426013
| 2018-03-16T08:43:39
| 2018-03-16T08:43:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,366
|
py
|
#!/usr/bin/python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef QUAZ_CHAINPARAMSSEEDS_H\n')
g.write('#define QUAZ_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the quaz network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 9992)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 19992)
g.write('#endif // QUAZ_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
[
"devquaz@gmail.com"
] |
devquaz@gmail.com
|
8b97fb6b8a7718a7b273586c5c11230785335bf5
|
51d348426c6e5fa79f2e77baf59bdbf8357d9f12
|
/week10/Инфоматрикс/d.массивы/1.py
|
39e914161d08fae03f9bd90984ada04bfe926359
|
[] |
no_license
|
Zhansayaas/webdev
|
c01325b13abf92cef13138d7ffc123cf9bc4f81a
|
dd054d0bcafc498eccc5f4626ab45fd8b46b3a3f
|
refs/heads/main
| 2023-04-10T23:33:30.469465
| 2021-04-17T10:21:53
| 2021-04-17T10:21:53
| 322,049,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 82
|
py
|
n=int(input())
a=input().split()
for i in range(0,n,2):
print(a[i],end=' ')
|
[
"noreply@github.com"
] |
Zhansayaas.noreply@github.com
|
b60bee7fa7a89fdb6ab3d2d5194fd86e56b4801b
|
0f0a7e594c53acbce5e93fad653abed2a3d02466
|
/zhidao/middlewares.py
|
190647c84d6e3a0b2e42f946e2fa9359d599844a
|
[] |
no_license
|
ElliottYan/crawler
|
de1e5a7ae1dbf0bd3bd3faa9224180ebb7051964
|
cab3e078ec85b13219bee836fa136c2f43eb4f2f
|
refs/heads/master
| 2020-03-24T06:12:58.864182
| 2018-07-27T03:05:52
| 2018-07-27T03:05:52
| 142,520,505
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,880
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class ZhidaoSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
|
[
"elliottyan37@gmail.com"
] |
elliottyan37@gmail.com
|
0d9c11e159814d4603e92a7da12ac0993d494912
|
db012fc9716bb27e9ff22078bc17edb7745b3f13
|
/examples/plot_classifier_dl85_plot_tree.py
|
c1d3f1657cd0965bb5b814f5562272bc9936fd33
|
[
"MIT"
] |
permissive
|
VivanVatsa/pydl8.5
|
df15b99c313da29b32a0571c59e90225aaa71566
|
8686839d000e47375b5ed70ad42828e6c3eef6e6
|
refs/heads/master
| 2023-08-14T11:25:06.511972
| 2021-09-21T18:50:13
| 2021-09-21T18:50:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,406
|
py
|
"""
==============================================
DL85Classifier example to export tree as image
==============================================
"""
import numpy as np
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from dl85 import DL85Classifier
import graphviz
print("######################################################################\n"
"# DL8.5 default classifier #\n"
"######################################################################")
# read the dataset and split into features and targets
dataset = np.genfromtxt("../datasets/anneal.txt", delimiter=' ')
X, y = dataset[:, 1:], dataset[:, 0]
# split the dataset into training and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
clf = DL85Classifier(max_depth=2)
clf.fit(X, y)
y_pred = clf.predict(X_test)
# show results
print("Model built in", round(clf.runtime_, 4), "seconds")
print("Found tree:", clf.tree_)
print("Confusion Matrix below\n", confusion_matrix(y_test, y_pred))
print("Accuracy on training set =", round(clf.accuracy_, 4))
print("Accuracy on test set =", round(accuracy_score(y_test, y_pred), 4))
# print the tree
dot = clf.export_graphviz()
graph = graphviz.Source(dot, format="png")
graph.render("plots/anneal_odt")
|
[
"aglingael@gmail.com"
] |
aglingael@gmail.com
|
114910137765ee9246494ef8b775990951da0d1f
|
b321ca6310cd84bd8603fa9685365bb2a4acc945
|
/公司真题/拼多多/phone_number.py
|
144534cc23631ee5da9b7f732598e83ae9e6c492
|
[] |
no_license
|
baixiaoyanvision/python-algorithm
|
71b2fdf7d6b57be8a2960c44160f2a7459e153ae
|
6cbb61213af8264e083af1994522929fb7711616
|
refs/heads/master
| 2020-08-27T03:41:08.332322
| 2019-10-02T13:28:49
| 2019-10-02T13:28:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,096
|
py
|
# line1 = input()
# line2 = input()
line1 = '6 5'
line2 = '787585'
N, K = [int(i) for i in line1.split()]
line2 = [int(i) for i in line2]
result = []
line2_set = set(line2)
min_money = 99999999
for val in line2_set:
sub_vals = [abs(val - number) for number in line2]
sort_sub_vals = sorted( list(range(len(sub_vals))), key=lambda x: sub_vals[x] )
pay_money = sum([sub_vals[i] for i in sort_sub_vals[:K]])
equal_val = sub_vals[sort_sub_vals[K-1]]
copy_line2 = line2[:]
for i in sort_sub_vals[:K-1]:
copy_line2[i] = val
last_change = None
for i in range(len(copy_line2)):
if abs(copy_line2[i]-val) == equal_val:
last_change = i
copy_line2[last_change] = val
copy_line2 = [str(i) for i in copy_line2]
copy_line2 = ''.join(copy_line2)
if pay_money > min_money:
continue
elif pay_money < min_money:
result = []
result.append(copy_line2)
min_money = pay_money
else:
result.append(copy_line2)
result = sorted(result)
print(min_money)
print(result[0])
|
[
"18310523922@163.com"
] |
18310523922@163.com
|
ef3863b249697253ae98c02f073111c5d9fb56a5
|
2406724e872efc10c25fff5fb4b551c65cf4f298
|
/Codes/Pong_game.py
|
48b58da41228fb79140ac11d9c18925f1ca9ffb7
|
[] |
no_license
|
Pradhyuman12/Beginner-Hacktoberfest
|
c6d57b692f34f19b3334a7b364ca084f2b85bf95
|
5e0184d0825e5bafbb4625c31396bca9e6792c7a
|
refs/heads/main
| 2023-08-30T11:31:58.281561
| 2021-10-29T18:19:26
| 2021-10-29T18:19:26
| 412,496,299
| 0
| 0
| null | 2021-10-01T14:20:41
| 2021-10-01T14:20:40
| null |
UTF-8
|
Python
| false
| false
| 2,339
|
py
|
import turtle
wn = turtle.Screen()
wn.title("Pong Game")
wn.bgcolor("black")
wn.setup(width=800, height=600)
wn.tracer()
#Score
score_a = 0
score_b = 0
#Paddle A
paddle_a = turtle.Turtle()
paddle_a.speed(0)
paddle_a.shape("square")
paddle_a.color("white")
paddle_a.shapesize(stretch_wid=5, stretch_len=1)
paddle_a.penup()
paddle_a.goto(-350, 0)
#Paddle B
paddle_b = turtle.Turtle()
paddle_b.speed(0)
paddle_b.shape("square")
paddle_b.color("white")
paddle_b.shapesize(stretch_wid=5, stretch_len=1)
paddle_b.penup()
paddle_b.goto(350, 0)
#Ball
ball = turtle.Turtle()
ball.speed(0)
ball.shape("square")
ball.color("white")
ball.penup()
ball.goto(0, 0)
ball.dx = 2
ball.dy = -2
# Pen
pen = turtle.Turtle()
pen.speed(0)
pen.color("white")
pen.penup()
pen.hideturtle()
pen.goto(0, 260)
pen.write("Player A : 0 Player B : 0", align="center", font=("Courier", 24, "normal"))
#Function
def paddle_a_up():
y = paddle_a.ycor()
y += 20
paddle_a.sety(y)
def paddle_a_down():
y = paddle_a.ycor()
y -= 20
paddle_a.sety(y)
def paddle_b_up():
y = paddle_b.ycor()
y += 20
paddle_b.sety(y)
def paddle_b_down():
y = paddle_b.ycor()
y -= 20
paddle_b.sety(y)
#Keyboard binding
wn.listen()
wn.onkeypress(paddle_a_up, "w")
wn.onkeypress(paddle_a_down, "s")
wn.onkeypress(paddle_b_up, "Up")
wn.onkeypress(paddle_b_down, "Down")
#Main game loop
while True:
wn.update()
#MOVE THE BALL
ball.setx(ball.xcor() + ball.dx)
ball.sety(ball.ycor() + ball.dy)
# Border checking
if ball.ycor() > 290:
ball.sety(290)
ball.dy *= -1
if ball.ycor() < -290:
ball.sety(-290)
ball.dy *= -1
if ball.xcor() > 390:
ball.goto(0,0)
ball.dx *= -1
score_a += 1
pen.clear()
pen.write("Player A : {} Player B : {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
if ball.xcor() < -390:
ball.goto(0,0)
ball.dx *= -1
score_b += 1
pen.clear()
pen.write("Player A : {} Player B : {}".format(score_a, score_b), align="center", font=("Courier", 24, "normal"))
if (ball.xcor() > 340 and ball.xcor() < 350) and (ball.ycor() < paddle_b.ycor() + 40 and ball.ycor() > paddle_b.ycor() -40):
ball.setx(340)
ball.dx *= -1
if (ball.xcor() < -340 and ball.xcor() > -350) and (ball.ycor() < paddle_a.ycor() + 40 and ball.ycor() > paddle_a.ycor() -40):
ball.setx(-340)
ball.dx *= -1
|
[
"noreply@github.com"
] |
Pradhyuman12.noreply@github.com
|
c246469f12df1abde9b82eeadc65ac655fed42e2
|
add649416e475ef5febb207ec3c90ef504a4d5b7
|
/Marie Laure/django/mycalendar2/mycalendar2/wsgi.py
|
11e4e77fa13f33700e778d5217741c38e3dbf3d6
|
[] |
no_license
|
juliencampus/python
|
e3bc77453d449533db02bc69376ea6d1a4f2e9ba
|
323013fa32be74571ccd665cd3faa74ff8a905f3
|
refs/heads/main
| 2022-12-29T09:09:08.917610
| 2020-10-23T14:55:14
| 2020-10-23T14:55:14
| 303,637,772
| 0
| 4
| null | 2020-10-21T11:12:06
| 2020-10-13T08:29:19
|
Python
|
UTF-8
|
Python
| false
| false
| 399
|
py
|
"""
WSGI config for mycalendar2 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mycalendar2.settings')
application = get_wsgi_application()
|
[
"marie-laure.riedinger@le-campus-numerique.fr"
] |
marie-laure.riedinger@le-campus-numerique.fr
|
25055392f3e0fab759ac48b34d2c85f1944c0249
|
dbbb304265437b2d591d6d42953efc96bd8b4a24
|
/blog/views.py
|
c19f66271a7b0a97d3aca9fd694b2641d6ce7c38
|
[
"Apache-2.0"
] |
permissive
|
echessa/django-ex
|
647edbea8a9dd7159420af96dd3414bdaaf8aff3
|
33758d254886d3298acba20c0d231bcd6bb6c09a
|
refs/heads/master
| 2020-12-02T21:12:24.753773
| 2017-07-05T09:09:18
| 2017-07-05T09:09:18
| 96,271,914
| 1
| 1
| null | 2017-07-05T03:11:15
| 2017-07-05T03:11:15
| null |
UTF-8
|
Python
| false
| false
| 1,463
|
py
|
from django.shortcuts import render, redirect
from django.utils import timezone
from django.shortcuts import render, get_object_or_404
from .forms import PostForm
from .models import Post
def post_list(request):
posts = Post.objects.filter(published_date__lte=timezone.now()).order_by('-published_date')
return render(request, 'blog/post_list.html', {'posts': posts})
def post_detail(request, pk):
post = get_object_or_404(Post, pk=pk)
return render(request, 'blog/post_detail.html', {'post': post})
def post_new(request):
if request.method == "POST":
form = PostForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm()
return render(request, 'blog/post_edit.html', {'form': form})
def post_edit(request, pk):
post = get_object_or_404(Post, pk=pk)
if request.method == "POST":
form = PostForm(request.POST, instance=post)
if form.is_valid():
post = form.save(commit=False)
post.author = request.user
post.published_date = timezone.now()
post.save()
return redirect('post_detail', pk=post.pk)
else:
form = PostForm(instance=post)
return render(request, 'blog/post_edit.html', {'form': form})
|
[
"jokhessa@yahoo.com"
] |
jokhessa@yahoo.com
|
7df32dbb48957659309409618ea967eed738d6a8
|
ff1dbdb9baed0be909aaf60b7b78bef9441bfcd9
|
/review_homework/ifstatement_review.py
|
330b7c81edabb8d705433d5648705605630043d7
|
[] |
no_license
|
singh-sonali/AP-Compsci
|
ab2c923c79e67eebc021168c720de10618c58583
|
1f26e40b88532cfb9aee39d18fb2ea254d8a1083
|
refs/heads/master
| 2020-03-28T06:43:33.963516
| 2019-03-20T12:04:25
| 2019-03-20T12:04:25
| 147,855,508
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,381
|
py
|
# Partner 1:
# Partner 2:
''' Instructions:
Work with a partner to complete these tasks. Assume that all variables are declared; you need only write the if-statement using the variables indicated in the description. Write your solution below the commented description.
'''
''' 1.
Variable grade is a character. If it is an A, print good work.
'''
''' 2.
Variable yards is an int. If it is less than 17, multiply yards by 2.
'''
''' 3.
Variable success is a boolean. If something is a success, print congratulations.
'''
''' 4.
Variable word is a String. If the string's second letter is 'f', print fun.
'''
''' 5.
Variable temp is a float. Variable celsius is a boolean. If celsius is true, convert to fahrenheit, storing the result in temp. F = 1.8C + 32.
'''
''' 6.
Variable numItems is an int. Variable averageCost and totalCost are floats. If there are items, calculate the average cost. If there are no items, print no items.
'''
''' 7.
Variable pollution is a float. Variable cutoff is a float. If pollution is less than the cutoff, print safe condition. If pollution is greater than or equal to cutoff, print unsafe condition.
'''
''' 8.
Variable score is a float, and grade is a char. Store the appropriate letter grade in the grade variable according to this chart.
F: <60; B: 80-89; D: 60-69; A: 90-100; C: 70-79.
'''
''' 9.
Variable letter is a char. If it is a lowercase letter, print lowercase. If it is an uppercase, print uppercase. If it is 0-9, print digit. If it is none of these, print symbol.
'''
''' 10.
Variable neighbors is an int. Determine where you live based on your neighbors.
50+: city; 25+: suburbia; 1+: rural; 0: middle of nowhere.
'''
''' 11.
Variables doesSignificantWork, makesBreakthrough, and nobelPrizeCandidate are booleans. A nobel prize winner does significant work and makes a break through. Store true in nobelPrizeCandidate if they merit the award and false if they don't.
'''
''' 12.
Variable tax is a boolean, price and taxRate are floats. If there is tax, update price to reflect the tax you must pay.
'''
''' 13.
Variable word and type are Strings. Determine (not super accurately) what kind of word it is by looking at how it ends.
-ly: adverb; -ing; gerund; -s: plural; something else: error
'''
''' 14.
If integer variable currentNumber is odd, change its value so that it is now 3 times currentNumber plus 1, otherwise change its value so that it is now half of currentNumber (rounded down when currentNumber is odd).
'''
''' 15.
Assign true to the boolean variable leapYear if the integer variable year is a leap year. (A leap year is a multiple of 4, and if it is a multiple of 100, it must also be a multiple of 400.)
'''
''' 16.
Determine the smallest of three ints, a, b and c. Store the smallest one of the three in int result.
'''
''' 17.
If an int, number, is even, a muliple of 5, and in the range of -100 to 100, then it is a special number. Store whether a number is special or not in the boolean variable special.
'''
''' 18.
Variable letter is a char. Determine if the character is a vowel or not by storing a letter code in the int variable code.
a/e/o/u/i: 1; y: -1; everything else: 0
'''
''' 19.
Given a string dayOfWeek, determine if it is the weekend. Store the result in boolean isWeekend.
'''
''' 20.
Given a String variable month, store the number of days in the given month in integer variable numDays.
'''
''' 21.
Three integers, angle1, angle2, and angle3, supposedly made a triangle. Store whether the three given angles make a valid triangle in boolean variable validTriangle.
'''
''' 22.
Given an integer, electricity, determine someone's monthly electric bill, float payment, following the rubric below.
First 50 units: 50 cents/unit
Next 100 units: 75 cents/unit
Next 100 units: 1.20/unit
For units above 250: 1.50/unit, plus an additional 20% surcharge.
'''
''' 23.
String, greeting, stores a greeting. String language stores the language. If the language is English, greeting is Hello. If the language is French, the greeting is Bonjour. If the language is Spanish, the greeting is Hola. If the language is something else, the greeting is something of your choice.
'''
''' 24.
Generate a phrase and store it in String phrase, given an int number and a String noun. Here are some sample phrases:
number: 5; noun: dog; phrase: 5 dogs
number: 1; noun: cat; phrase: 1 cat
number: 0; noun: elephant; phrase: 0 elephants
number: 3; noun: human; phrase: 3 humans
number: 1; noun: home; phrase: 3 homes
'''
''' 25.
If a string, userInput, is bacon, print out, "Why did you type bacon?". If it is not bacon, print out, "I like bacon."
'''
''' 26.
Come up with your own creative tasks someone could complete to practice if-statements. Also provide solutions.
'''
''' Task 1:
'''
# solution
''' Task 2:
'''
# solution
''' Task 3:
'''
# solution
''' Sources
http://www.bowdoin.edu/~ltoma/teaching/cs107/spring05/Lectures/allif.pdf
http://www.codeforwin.in/2015/05/if-else-programming-practice.html
Ben Dreier for pointing out some creative boolean solutions.
'''
|
[
"ssingh20@choate.edu"
] |
ssingh20@choate.edu
|
9ee04dc56eec32ca912fa8b81136a49356550e03
|
dfbe04629c68e49c0671b0ed1890d82d96180164
|
/graphsage/datafetcher.py
|
28421aa404bbc8ea1a9ba08303cc535fc7c67f45
|
[] |
no_license
|
bluelancer/MLAdv2020Proj
|
417d9c3853297541d48036f3af5999aea68465ad
|
7057492f7b4f171aa1bdf5a6f792c5fbdc679d35
|
refs/heads/main
| 2023-04-30T02:47:49.581387
| 2021-01-12T11:47:17
| 2021-01-12T11:47:17
| 325,494,559
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,263
|
py
|
from collections import defaultdict
from scipy.io import mmread
import numpy as np
class DataFetcher:
def __init__(self):
return
def load(self, dataset):
if dataset == 'cora':
return self.load_cora()
elif dataset == 'pubmed':
return self.load_pubmed()
def load_cora(self):
num_nodes = 2708
num_feats = 1433
feat_data = np.zeros((num_nodes, num_feats))
labels = np.empty((num_nodes, 1), dtype=np.int64)
node_map = {}
label_map = {}
with open("../dataset_UNRL/citation/cora/cora.content") as fp:
for i, line in enumerate(fp):
info = line.strip().split()
for j in range(len(info) - 2):
feat_data[i, j] = float(info[j + 1])
# feat_data[i,:] = map(float(), info[1:-1])
node_map[info[0]] = i
if not info[-1] in label_map:
label_map[info[-1]] = len(label_map)
labels[i] = label_map[info[-1]]
adj_lists = defaultdict(set)
link_list = []
with open("../dataset_UNRL/citation/cora/cora.cites") as fp:
for i, line in enumerate(fp):
info = line.strip().split()
paper1 = node_map[info[0]]
paper2 = node_map[info[1]]
adj_lists[paper1].add(paper2)
adj_lists[paper2].add(paper1)
link_list.append((paper1, paper2, 1))
return feat_data, labels, adj_lists, link_list
def load_pubmed(self):
# hardcoded for simplicity...
num_nodes = 19717
num_feats = 500
feat_data = np.zeros((num_nodes, num_feats))
labels = np.empty((num_nodes, 1), dtype=np.int64)
node_map = {}
with open("../dataset_UNRL/citation/pubmed-data/Pubmed-Diabetes.NODE.paper.tab") as fp:
fp.readline()
feat_map = {entry.split(":")[1]: i - 1 for i, entry in enumerate(fp.readline().split("\t"))}
for i, line in enumerate(fp):
info = line.split("\t")
node_map[info[0]] = i
labels[i] = int(info[1].split("=")[1]) - 1
for word_info in info[2:-1]:
word_info = word_info.split("=")
feat_data[i][feat_map[word_info[0]]] = float(word_info[1])
adj_lists = defaultdict(set)
with open("../dataset_UNRL/citation/pubmed-data/Pubmed-Diabetes.DIRECTED.cites.tab") as fp:
fp.readline()
fp.readline()
link_list = []
for line in fp:
info = line.strip().split("\t")
paper1 = node_map[info[1].split(":")[1]]
paper2 = node_map[info[-1].split(":")[1]]
adj_lists[paper1].add(paper2)
adj_lists[paper2].add(paper1)
link_list.append((paper1, paper2, 1))
return feat_data, labels, adj_lists, link_list
def load_blogcata(self):
filename = '../dataset_UNRL/soc/soc-BlogCatalog/soc-BlogCatalog.mtx'
link_matrix = mmread(filename)
return link_matrix
if __name__ == '__main__':
datafetcher = DataFetcher()
datafetcher.load_blogcata()
|
[
"zhaoyu@zhaoyudeMacBook-Pro.local"
] |
zhaoyu@zhaoyudeMacBook-Pro.local
|
34fc9717d6ba5477e1aa8e8cc9c71b46b8ee7fd2
|
2f2feae3dee5847edbf95c1eeb14e656490dae35
|
/2022/day_13_distress_signal_1.py
|
e89f9fb5f20ecbd78b7b38f8d58eca40028031af
|
[] |
no_license
|
olga3n/adventofcode
|
32597e9044e11384452410b7a7dda339faf75f32
|
490a385fb8f1c45d22deb27bf21891e193fe58a2
|
refs/heads/master
| 2023-01-07T09:19:04.090030
| 2022-12-25T13:31:22
| 2022-12-25T13:31:22
| 163,669,598
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,209
|
py
|
#!/usr/bin/env python3
import sys
import json
from typing import Iterable, List, Any, Tuple, Optional
def is_right_order(left: List[Any], right: List[Any]) -> Optional[bool]:
iter_left = iter(left)
iter_right = iter(right)
while True:
item_left = next(iter_left, None)
item_right = next(iter_right, None)
if item_left is None and item_right is None:
return None
if item_left is None:
return True
if item_right is None:
return False
if isinstance(item_left, int) and isinstance(item_right, int):
if item_left < item_right:
return True
if item_left > item_right:
return False
if item_right == item_left:
continue
if isinstance(item_left, int):
item_left = [item_left]
if isinstance(item_right, int):
item_right = [item_right]
value = is_right_order(item_left, item_right)
if value is not None:
return value
def build_pairs(data: Iterable[str]) -> Iterable[Tuple[List[Any], List[Any]]]:
buf = []
for line in data:
if not line.strip():
continue
buf.append(line)
if len(buf) == 2:
yield json.loads(buf[0]), json.loads(buf[1])
buf = []
def right_order_pairs(data: Iterable[str]) -> int:
return sum(
index + 1 for index, pair in enumerate(build_pairs(data))
if is_right_order(pair[0], pair[1])
)
def test_right_order_pairs():
data = [
'[1,1,3,1,1]',
'[1,1,5,1,1]',
'',
'[[1],[2,3,4]]',
'[[1],4]',
'',
'[9]',
'[[8,7,6]]',
'',
'[[4,4],4,4]',
'[[4,4],4,4,4]',
'',
'[7,7,7,7]',
'[7,7,7]',
'',
'[]',
'[3]',
'',
'[[[]]]',
'[[]]',
'',
'[1,[2,[3,[4,[5,6,7]]]],8,9]',
'[1,[2,[3,[4,[5,6,0]]]],8,9]'
]
assert right_order_pairs(data) == 13
def main():
data = sys.stdin
result = right_order_pairs(data)
print(result)
if __name__ == '__main__':
main()
|
[
"olga3n@gmail.com"
] |
olga3n@gmail.com
|
3351932d3d3a75e35b6b1fcbd967fa8b054bd65b
|
13a32b92b1ba8ffb07e810dcc8ccdf1b8b1671ab
|
/home--tommy--mypy/mypy/lib/python2.7/site-packages/theano/sandbox/cuda/tests/test_tensor_op.py
|
cb9162354ac7fa9120cf4dd3b05d616e784e0f36
|
[
"Unlicense"
] |
permissive
|
tommybutler/mlearnpy2
|
8ec52bcd03208c9771d8d02ede8eaa91a95bda30
|
9e5d377d0242ac5eb1e82a357e6701095a8ca1ff
|
refs/heads/master
| 2022-10-24T23:30:18.705329
| 2022-10-17T15:41:37
| 2022-10-17T15:41:37
| 118,529,175
| 0
| 2
|
Unlicense
| 2022-10-15T23:32:18
| 2018-01-22T23:27:10
|
Python
|
UTF-8
|
Python
| false
| false
| 5,283
|
py
|
"""
This file test tensor op that should also operate on CudaNdaray.
"""
from __future__ import absolute_import, print_function, division
from nose.plugins.skip import SkipTest
from nose_parameterized import parameterized
import numpy
import theano
from theano import tensor
import theano.tensor as T
import theano.tests.unittest_tools as utt
# Skip test if cuda_ndarray is not available.
import theano.sandbox.cuda as cuda
from theano.tensor.nnet.tests import test_conv3d2d
if cuda.cuda_available is False:
raise SkipTest('Optional package cuda disabled')
if theano.config.mode == 'FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
mode_without_gpu = theano.compile.mode.get_mode('FAST_RUN').excluding('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
mode_without_gpu = theano.compile.mode.get_default_mode().excluding('gpu')
def test_shape_i():
x = cuda.ftensor3()
v = cuda.CudaNdarray(numpy.zeros((3, 4, 5), dtype='float32'))
f = theano.function([x], x.shape[1])
topo = f.maker.fgraph.toposort()
assert f(v) == 4
if theano.config.mode != 'FAST_COMPILE':
assert len(topo) == 1
assert isinstance(topo[0].op, T.opt.Shape_i)
def test_shape():
x = cuda.ftensor3()
v = cuda.CudaNdarray(numpy.zeros((3, 4, 5), dtype='float32'))
f = theano.function([x], x.shape)
topo = f.maker.fgraph.toposort()
assert numpy.all(f(v) == (3, 4, 5))
if theano.config.mode != 'FAST_COMPILE':
assert len(topo) == 4
assert isinstance(topo[0].op, T.opt.Shape_i)
assert isinstance(topo[1].op, T.opt.Shape_i)
assert isinstance(topo[2].op, T.opt.Shape_i)
assert isinstance(topo[3].op, T.opt.MakeVector)
def test_softmax_optimizations():
from theano.tensor.nnet.nnet import softmax, crossentropy_categorical_1hot
x = tensor.fmatrix('x')
one_of_n = tensor.lvector('one_of_n')
op = crossentropy_categorical_1hot
op(x, one_of_n)
fgraph = theano.gof.FunctionGraph(
[x, one_of_n],
[op(softmax(x), one_of_n)])
assert fgraph.outputs[0].owner.op == op
mode_with_gpu.optimizer.optimize(fgraph)
assert str(fgraph.outputs[0].owner.op) == 'OutputGuard'
assert fgraph.outputs[0].owner.inputs[0].owner.op == cuda.host_from_gpu
assert fgraph.outputs[0].owner.inputs[0].owner.inputs[0].owner.op == cuda.nnet.gpu_crossentropy_softmax_argmax_1hot_with_bias
def test_may_share_memory_cuda():
from theano.misc.may_share_memory import may_share_memory
a = cuda.CudaNdarray(numpy.zeros((3, 4), dtype='float32'))
b = cuda.CudaNdarray(numpy.zeros((3, 4), dtype='float32'))
na = numpy.zeros((3, 4))
nb = numpy.zeros((3, 4))
va = a.view()
vb = b.view()
ra = a.reshape((4, 3))
rb = b.reshape((4, 3))
# can't test the transpose as ta._strides = is not implemented
# manual transpose of a
# ta = a.reshape((4,3))
# ta._strides = (ta._strides[1],ta._strides[0])#not implemented
# elem_size=elem_size = numpy.zeros(0,dtype=a.dtype).dtype.itemsize
# ta.gpudata += ta.size*elem_size
for a_, b_, rep in [(a, a, True), (b, b, True), (a, b, False),
(a, na, False), (b, nb, False),
(na, b, False), (nb, a, False),
(a, va, True), (b, vb, True),
(va, b, False), (a, vb, False),
(a, ra, True), (b, rb, True),
(ra, b, False), (a, rb, False), ]:
assert may_share_memory(a_, b_) == rep
assert may_share_memory(b_, a_) == rep
# test that it raise error when needed.
for a_, b_, rep in [(a, (0,), False), (a, 1, False), (a, None, False)]:
assert may_share_memory(a_, b_, False) == rep
assert may_share_memory(b_, a_, False) == rep
try:
may_share_memory(a_, b_)
raise Exception("An error was expected")
except TypeError:
pass
try:
may_share_memory(b_, a_)
raise Exception("An error was expected")
except TypeError:
pass
def test_deepcopy():
a = cuda.fmatrix()
a_v = cuda.CudaNdarray(numpy.zeros((3, 4), dtype='float32'))
# We force the c code to check that we generate c code
mode = theano.Mode("c", mode_with_gpu.optimizer)
f = theano.function([a], a, mode=mode)
theano.printing.debugprint(f)
out = f(a_v)
assert out is not a_v
assert numpy.allclose(numpy.asarray(a_v), numpy.asarray(out))
# We force the python linker as the default code should work for this op
mode = theano.Mode("py", mode_with_gpu.optimizer)
f = theano.function([a], a, mode=mode)
theano.printing.debugprint(f)
out = f(a_v)
assert out is not a_v
assert numpy.allclose(numpy.asarray(a_v), numpy.asarray(out))
def test_get_diagonal_subtensor_view():
test_conv3d2d.test_get_diagonal_subtensor_view(wrap=cuda.CudaNdarray)
@parameterized.expand(('valid', 'full'), utt.custom_name_func)
def test_conv3d(border_mode):
test_conv3d2d.check_conv3d(border_mode=border_mode,
mode=mode_with_gpu,
shared=cuda.shared_constructor)
|
[
"tbutler.github@internetalias.net"
] |
tbutler.github@internetalias.net
|
98cbe8d9f4b12239cf1d517d7cd9c165e3ac2876
|
83df3d7773a4c7c3da8824cd3e6b3f0d6e6cfa07
|
/world.py
|
92f49a7703868c98e84f84d8b7d82c02437552a6
|
[] |
no_license
|
azmzing/c
|
d4f8690ae0ad43c6441b6559995042ebe730108e
|
69b2511683d709961034ec46814400bccae0ae5b
|
refs/heads/master
| 2020-03-15T23:42:45.118141
| 2018-05-07T06:13:12
| 2018-05-07T06:13:12
| 132,399,355
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16
|
py
|
print"b hellow
|
[
"amazing.zanjun@outlook.com"
] |
amazing.zanjun@outlook.com
|
27687dfa574c3d83cfd55d79f9b60849782917ff
|
09a32f98165c20fbc0c519f826cbd445b1906d54
|
/merge_opt.py
|
03c8ad504fd12016c93a0f874d11647442ea508f
|
[] |
no_license
|
huuthai37/LSTM-Consensus
|
d2ec2d63073a1a996c827c1f71a8edc4e763a8dc
|
97233cf0a4176b0a0fc69c4c7051a5289628e48f
|
refs/heads/master
| 2020-03-17T05:53:33.663364
| 2018-07-13T04:12:50
| 2018-07-13T04:12:50
| 133,332,710
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,447
|
py
|
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--dataset', help='Dataset', default='ucf101')
parser.add_argument('-s', '--segment', help='Number of segments', default=3, type=int)
parser.add_argument('-debug', '--debug', help='Number of classes', default=1, type=int)
args = parser.parse_args()
print args
import cv2
import os
import sys
import random
import numpy as np
import config
import pickle
# Lay tuy chon
dataset = args.dataset
num_seq = args.segment
if args.debug == 1:
debug = True
else:
debug = False
# Cau hinh folder du lieu
server = config.server()
data_input_folder = config.data_input_path()
output_path = config.data_output_path()
out_file_folder = r'{}database/'.format(output_path)
data_file = r'{}data-{}-{}.pickle'.format(out_file_folder,dataset,num_seq)
count = 0
with open(data_file,'rb') as f1:
data = pickle.load(f1)
length_data = len(data)
data_folder_opt = r'{}{}-opt/'.format(output_path,dataset)
data_folder_seq_opt = r'{}{}-seq-opt/'.format(output_path,dataset)
if not os.path.isdir(data_folder_seq_opt + 'u'):
os.makedirs(data_folder_seq_opt + 'u') # tao data_folder_seq_opt + 'u'/
print 'Create directory ' + data_folder_seq_opt + 'u'
if not os.path.isdir(data_folder_seq_opt + 'v'):
os.makedirs(data_folder_seq_opt + 'v') # tao data_folder_seq_opt + 'v'/
print 'Create directory ' + data_folder_seq_opt + 'v'
for l in range(length_data):
path_video = data[l][0]
render_opt = data[l][1]
name_video = path_video.split('/')[1]
u = data_folder_opt + 'u/' + name_video + '/frame'
v = data_folder_opt + 'v/' + name_video + '/frame'
if not os.path.isdir(data_folder_seq_opt + 'u/' + name_video):
os.makedirs(data_folder_seq_opt + 'u/' + name_video) # tao data_folder_seq_opt + 'u/' + name_video/
print 'Create directory ' + data_folder_seq_opt + 'u/' + name_video
if not os.path.isdir(data_folder_seq_opt + 'v/' + name_video):
os.makedirs(data_folder_seq_opt + 'v/' + name_video) # tao data_folder_seq_opt + 'v/' + name_video/
print 'Create directory ' + data_folder_seq_opt + 'v/' + name_video
return_data = []
if (render_opt[0] >= 0):
render = render_opt
else:
render = [render_opt[1]]
len_render_opt = len(render)
for k in range(len_render_opt):
nstack_u = np.zeros((2560,340))
nstack_v = np.zeros((2560,340))
for i in range(10):
img_u = cv2.imread(u + str(render[k] + 5 + i).zfill(6) + '.jpg', 0)
img_v = cv2.imread(v + str(render[k] + 5 + i).zfill(6) + '.jpg', 0)
# img_u = np.ones((240,320))
# img_v = np.ones((240,320))
if (img_u is None) | (img_v is None):
print 'Error render optical flow'
print(u + str(render[k] + 5 + i).zfill(6) + '.jpg')
sys.exit()
hh, ww = img_u.shape
if (hh != 256) | (ww != 340):
img_u = cv2.resize(img_u, (340, 256))
img_v = cv2.resize(img_v, (340, 256))
nstack_u[(256*i):(256*(i+1)),:] = img_u
nstack_v[(256*i):(256*(i+1)),:] = img_v
os.chdir(data_folder_seq_opt + 'u/' + name_video)
cv2.imwrite('{}.jpg'.format(k),nstack_u)
os.chdir(data_folder_seq_opt + 'v/' + name_video)
cv2.imwrite('{}.jpg'.format(k),nstack_v)
if l%1000 == 0:
print l
|
[
"huuthai37@gmail.com"
] |
huuthai37@gmail.com
|
c0d29ea3e56d0a9a1129476105c243a8a2566772
|
8d2a124753905fb0455f624b7c76792c32fac070
|
/pytnon-month01/周六练习-practice on saturday/独立完成/OOP-fanb-1_student_manager_system.py
|
370a4186757ac84e2f949eca27cb01e393c5348c
|
[] |
no_license
|
Jeremy277/exercise
|
f38e4f19aae074c804d265f6a1c49709fd2cae15
|
a72dd82eb2424e4ae18e2f3e9cc66fc4762ec8fa
|
refs/heads/master
| 2020-07-27T09:14:00.286145
| 2019-09-17T11:31:44
| 2019-09-17T11:31:44
| 209,041,629
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,533
|
py
|
#学生信息管理系统:
# 数据模型类:StudentModel
# 数据:编号 id,姓名 name,年龄 age,成绩 score
class StudentModel:
def __init__(self,name,age,score,id = 0):
self.name = name
self.age = age
self.score = score
self.id = id
# 逻辑控制类:StudentManagerController
# 数据:学生列表 __stu_list
# (#私有属性,提供只读)
# 行为:获取列表 stu_list,添加学生 add_student,删除学生remove_student,
# 修改学生update_student,根据成绩排序order_by_score。
class StudentManagerController:
__stu_id = 1000
def __init__(self): #函数中不需要定义行参
self.__stu_list = [] #赋值空列表
@property
def stu_list(self):
return self.__stu_list
def add_student(self,stu):
StudentManagerController.__stu_id += 1
stu.id = StudentManagerController.__stu_id
self.__stu_list.append(stu)
def remove_student(self,id):
for item in self.__stu_list:
if item.id == id:
self.__stu_list.remove(item)
return True
def update_student(self,stu):
for item in self.__stu_list:
if item.id == stu.id:
item.name = stu.name
item.age = stu.age
item.score = stu.score
return True
def order_by_score(self):
for i in range(len(self.__stu_list)-1):
for j in range(i+1,len(self.__stu_list)):
if self.__stu_list[i].score > self.__stu_list[j].score:
self.__stu_list[i],self.__stu_list[j] = self.__stu_list[j],self.__stu_list[i]
# 界面视图类:StudentManagerView
# 数据:逻辑控制对象__manager
# 行为:显示菜单__display_menu,选择菜单项__select_menu_item,入口逻辑main,
# 输入学生__input_students,输出学生__output_students,
# 删除学生__delete_student,修改学生信息__modify_student
class StudentManagerView():
def __init__(self):
self.__manager = StudentManagerController()
def __display_menu(self):
print('''
学生信息管理系统1.0
+-----------------------+
| 0)退出管理系统 |
| 1)添加学生信息 |
| 2)显示学生信息 |
| 3)删除学生信息 |
| 4)修改学生信息 |
| 5)按照成绩排序 |
+-----------------------+
''')
def main(self):
choice = None
while choice != 0:
self.__display_menu()
choice = input('请输入选项:')
if choice == '0':
print('谢谢使用,退出!')
break
elif choice == '1':
self.__input_students()
elif choice == '2':
self.__output_students()
elif choice == '3':
self.__delete_student()
elif choice == '4':
self.__modify_student()
elif choice == '5':
self.__sort_by_score()
else:
print('请重新输入选项!')
def __input_students(self):
name = input('请输入学生姓名:')
age = int(input('请输入学生年龄:'))
score = int(input('请输入学生成绩:'))
stu = StudentModel(name,age,score)
self.__manager.add_student(stu)
print('添加学生信息成功!')
def __output_students(self):
print('学生信息:')
for item in self.__manager.stu_list:
print(item.id,item.name,item.age,item.score)
def __delete_student(self):
stu_id = int(input('请输入学生编号:'))
if self.__manager.remove_student(stu_id):
print('删除学生信息成功!')
else:
print('删除学生信息失败!')
def __modify_student(self):
id = int(input('请输入需要修改的学生ID:'))
name = input('请输入修改后学生姓名:')
age = int(input('请输入修改后学生年龄:'))
score = int(input('请输入修改后学生成绩:'))
stu = StudentModel(name, age, score, id)
if self.__manager.update_student(stu):
print('修改学生信息成功!')
else:
print('修改学生信息失败!')
def __sort_by_score(self):
self.__manager.order_by_score()
print('排序成功!')
view = StudentManagerView()
view.main()
#1.测试逻辑控制代码
#测试添加学员
# manger = StudentManagerController()
# s01 = StudentModel('许瑶',18,98)
# s02 = StudentModel('许仙',16,99)
# s03 = StudentModel('小青',15,79)
# s04 = StudentModel('姐夫',15,79)
# manger.add_student(s01)
# manger.add_student(s02)
# manger.add_student(s03)
# manger.add_student(s04)
# for item in manger.stu_list:
# print(item.id,item.name,item.age,item.score)
# # #manger.stu_list列表 保存学生对象
# # print(manger.stu_list[1].name)
# # #测试删除学员
# manger.remove_student(1004)
# for item in manger.stu_list:
# print('删除后:',item.id,item.name)
# # #测试修改学员
# manger.update_student(StudentModel('娘子',19,80,1001))
# for item in manger.stu_list:
# print('修改后:',item.id,item.name,item.age,item.score)
# # #测试按成绩排序
# manger.order_by_score()
# for item in manger.stu_list:
# print('按分数升序排列:',item.id,item.name,item.age,item.score)
|
[
"13572093824@163.com"
] |
13572093824@163.com
|
edbc5843172b296c275bf4d38092d8dabd6213fe
|
bd3b1eaedfd0aab45880c100b86bc4714149f5cd
|
/student/dyp1/11.py
|
c6e63aa6b223b8b5cdbb13353fe5872beeeea0a7
|
[] |
no_license
|
ophwsjtu18/ohw19f
|
a008cd7b171cd89fa116718e2a5a5eabc9f7a93e
|
96dedf53a056fbb4d07c2e2d37d502171a6554a6
|
refs/heads/master
| 2020-08-08T12:59:38.875197
| 2020-04-01T10:38:14
| 2020-04-01T10:38:14
| 213,835,959
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,178
|
py
|
import numpy as np
import cv2
capture = cv2.VideoCapture(0)
face_cascade = cv2.CascadeClassifier('C:\\Users\\DING-DING\\AppData\\Local\\Programs\\Python\\Python36\\Lib\\site-packages\\cv2\\data\\haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('C:\\Users\\DING-DING\\AppData\\Local\\Programs\\Python\\Python36\\Lib\\site-packages\\cv2\\data\\haarcascade_eye.xml')
while(True):
ret, frame = capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
img = cv2.rectangle(gray,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
def hhh( lists ):
for (x,y,w,h) in lists:
a = x
for num in range(1,4):
for num in range(1,4):
cv2.rectangle(img,(x,y),(x+int(w/3),y+int(h/3)),(255,0,0),2)
x+=int(w/3)
x=a
y+=int(h/3)
hhh(faces)
cv2.imshow('frame',gray)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"noreply@github.com"
] |
ophwsjtu18.noreply@github.com
|
00333130e10a7ca7f1207ec2896cc61f017a3839
|
24e2b10c6d13deac27bd4b8af07907de4c9e8bcd
|
/traingle.py
|
413d3bf8ba2ac1ff1e4a678063e1efb7d2d49f1a
|
[] |
no_license
|
adilreza/docker-env-var-python
|
a5b2e0269511d588bc2de30ab35f09a86138419a
|
156d3fa9b3fc25e56a40edc091818d6c86ba017b
|
refs/heads/main
| 2023-03-29T12:02:06.158341
| 2021-04-08T12:25:56
| 2021-04-08T12:25:56
| 355,896,730
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 360
|
py
|
import os
def triangle(n):
k = n - 1
for i in range(0, n):
for j in range(0, k):
print(end=" ")
k = k - 1
for j in range(0, i+1):
print("* ", end="")
print("\r")
n = os.environ.get('TR_NUMBER')
# TR_NUMBER=8 python3 traingle.py
# docker run -e TR_NUMBER=7 bff2ba77dbd1
nn = int(n)
triangle(nn)
|
[
"adil.reza@selise.ch"
] |
adil.reza@selise.ch
|
9f704f4065654f070e858bc08858abfdfaeb1745
|
796198b4613ae30ff7735d7a8473064b8ecb0247
|
/abc140/D.py
|
cc3f77d7f92a52f460d82787bdd62c4304943b30
|
[] |
no_license
|
Tomoki-Kikuta/atcoder
|
993cb13ae30435d02ea2e743cf3cead1a7882830
|
97b886de867575084bd1a70310a2a9c1c514befe
|
refs/heads/master
| 2021-07-16T15:14:00.706609
| 2020-06-29T06:15:13
| 2020-06-29T06:15:13
| 184,001,549
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
n,k = map(int,input().split())
s = input()
h = 0
for i in range(n-1):
if s[i] == s[i+1]:
h+=1
print(min([n-1,h+2*k]))
|
[
"tomoki0819@akane.waseda.jp"
] |
tomoki0819@akane.waseda.jp
|
713b479653ed7764eabad8e061233c7fc1086f24
|
0c2ca3b3c7f307c29f45957e87ed940c23571fae
|
/fhirclient/models/bodysite_tests.py
|
a3aaa3593967b5390640ec04095fcc47317b4e4a
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
myungchoi/client-py-1.0.3
|
49c3d15b8dfb845e7cbc933084ed5fcc37e7c4ed
|
08e4e5828fb461c105907fd454b19dfc8463aad8
|
refs/heads/master
| 2021-06-25T04:36:26.952685
| 2021-02-11T16:27:26
| 2021-02-11T16:27:26
| 209,669,881
| 0
| 0
|
NOASSERTION
| 2021-03-20T01:45:42
| 2019-09-20T00:11:10
|
Python
|
UTF-8
|
Python
| false
| false
| 2,663
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 on 2016-03-23.
# 2016, SMART Health IT.
import os
import io
import unittest
import json
from . import bodysite
from .fhirdate import FHIRDate
class BodySiteTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("BodySite", js["resourceType"])
return bodysite.BodySite(js)
def testBodySite1(self):
inst = self.instantiate_from("bodysite-example.json")
self.assertIsNotNone(inst, "Must have instantiated a BodySite instance")
self.implBodySite1(inst)
js = inst.as_json()
self.assertEqual("BodySite", js["resourceType"])
inst2 = bodysite.BodySite(js)
self.implBodySite1(inst2)
def implBodySite1(self, inst):
self.assertEqual(inst.code.coding[0].code, "53120007")
self.assertEqual(inst.code.coding[0].display, "Arm")
self.assertEqual(inst.code.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.code.text, "Arm")
self.assertEqual(inst.description, "front of upper left arm directly below the tattoo")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier[0].system, "http://www.acmehosp.com/bodysites")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "12345")
self.assertEqual(inst.image[0].contentType, "image/png;base64")
self.assertEqual(inst.image[0].title, "ARM")
self.assertEqual(inst.modifier[0].coding[0].code, "419161000")
self.assertEqual(inst.modifier[0].coding[0].display, "Unilateral left")
self.assertEqual(inst.modifier[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[0].text, "Left")
self.assertEqual(inst.modifier[1].coding[0].code, "261183002")
self.assertEqual(inst.modifier[1].coding[0].display, "Upper")
self.assertEqual(inst.modifier[1].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[1].text, "Upper")
self.assertEqual(inst.modifier[2].coding[0].code, "255549009")
self.assertEqual(inst.modifier[2].coding[0].display, "Anterior")
self.assertEqual(inst.modifier[2].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[2].text, "Anterior")
self.assertEqual(inst.text.status, "generated")
|
[
"myungchoi@gmail.com"
] |
myungchoi@gmail.com
|
0312b66b3e7867e0ab472288a6c5e79ae7d68dd8
|
dacdc8eca8258abc9c82b3a87d97f6a461503edf
|
/data/process_data.py
|
5318bb50a55db8751ba09ebc42d4a40d09cfc52d
|
[] |
no_license
|
eherdter/MessageClassifier
|
ce8f6503c9980068f038e1aa777c9c6a99921bf6
|
449431fab917b9294acce82977eb1d7c44e42bb3
|
refs/heads/master
| 2020-06-12T12:10:07.883068
| 2019-07-10T19:15:54
| 2019-07-10T19:15:54
| 194,294,707
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,016
|
py
|
import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
''' Loads messages and category datasets and joins them
together using the common id.'''
''' Returns: pandas.DataFrame '''
#Load data.
messages = pd.read_csv(messages_filepath, dtype=str, encoding ='latin1')
categories = pd.read_csv(categories_filepath, dtype=str, encoding ='latin1')
#Merge datasets using common id.
df = categories.set_index('id').join(messages.set_index('id'))
return df
def clean_data(df):
''' Tidy and clean df. Tidies the categories columns, cleans up duplicates
rows,removes rows with non binary (0/1) category entries, removes rows where
there is no category selected (1).'''
''' Returns: pandas.DataFrame '''
#split categories into separate category columns and assign column names
categories = df.categories.str.split(';', expand=True)
categories.columns = categories.iloc[0].apply(lambda x: x[:len(x)-2])
#convert category values to just binary (0/1)
for column in categories:
# set each value to be the last character of the string
categories[column] = categories[column].astype(str).str[-1]
categories[column] = categories[column].astype(int)
#replace categories column in df with new categories
df = df.drop(columns=['categories'])
df = pd.concat([categories,df], axis=1)
#removes duplicate rows
df = df.drop_duplicates()
#remove child alone category because there are no notes for it
df = df.drop(['child_alone'], axis=1)
#change level 2 for related column to level 0 b/c they are not related posts
df['related'][df['related'] == 2] = 0
return df
def save_data(df, database_filepath):
''' Saves cleaned df to a SQL database.'''
engine = create_engine('sqlite:///' + database_filepath)
df.to_sql('messages', con=engine, if_exists='replace', index=False)
return None
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main()
|
[
"eherdter@mail.usf.edu"
] |
eherdter@mail.usf.edu
|
a6d438db0bc275cba915649a275ab52409197ac7
|
059b6f2963515af4ee4a5342f45ab05a4f431b60
|
/string reverse.py
|
2e5ff44a2f7f05c080f8e1c3b9e5f2ab8162ad8f
|
[] |
no_license
|
Surya-Narayanan0503/Python-Programs
|
70c20c0b1e07420b3058bbdd6ac88bcb9b1c273a
|
2fcdc31ed3c4cc068f2a23ef02a465673c0700e7
|
refs/heads/master
| 2020-06-20T02:40:16.488314
| 2019-07-19T08:49:07
| 2019-07-19T08:49:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 29
|
py
|
n=input()
i=n[::-1]
print(i)
|
[
"noreply@github.com"
] |
Surya-Narayanan0503.noreply@github.com
|
9c7357576d312b577fde01d5955822e944b46c7b
|
d0f11aa36b8c594a09aa06ff15080d508e2f294c
|
/leecode/1-500/401-500/472-连接词.py
|
4edb1540db15225aeb711ca0bd0954fa23641a7b
|
[] |
no_license
|
saycmily/vtk-and-python
|
153c1fe9953fce685903f938e174d3719eada0f5
|
5045d7c44a5af5c16df5a3b72c157e9a2928a563
|
refs/heads/master
| 2023-01-28T14:02:59.970115
| 2021-04-28T09:03:32
| 2021-04-28T09:03:32
| 161,468,316
| 1
| 1
| null | 2023-01-12T05:59:39
| 2018-12-12T10:00:08
|
Python
|
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
class Solution:
def findAllConcatenatedWordsInADict(self, words):
def search(word, pre_dict):
if len(word)==0:
return True
cur_dict = pre_dict
for i,c in enumerate(word):
cur_dict = cur_dict.get(c,None)
if not cur_dict:
return False
if '#' in cur_dict:
if search(word[i+1:], pre_dict):
return True
return False
def insert(word, cur_dict):
for c in word:
if c not in cur_dict:
cur_dict[c] = {}
cur_dict = cur_dict[c]
cur_dict['#'] ={}
words.sort(key=lambda x: len(x))
ret = []
pre_dict = {}
for word in words:
if len(word)==0:
continue
if search(word, pre_dict):
ret.append(word)
else:
insert(word, pre_dict)
return ret
|
[
"1786386686@qq.com"
] |
1786386686@qq.com
|
c4d7e36734ac5ab04af3745ef7239a1f37d315fd
|
1005b44f86523c377b607c69d96a0f6cbfbd62c7
|
/Weather ToPy_bot/bot.py
|
9cb6e7c3d32246ca654b13e6d664c8f6a65b966c
|
[] |
no_license
|
Tofan93/Python
|
7e3308b99ea8f0db374a74d778d0fe6229bc0f59
|
20c0085bf8592bc2bee6ca35360701054993fd2f
|
refs/heads/master
| 2022-11-11T17:53:13.606553
| 2020-07-09T06:31:24
| 2020-07-09T06:31:24
| 258,720,575
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 912
|
py
|
import pyowm
import telebot
owm = pyowm.OWM('b819a91869f45bb714b2cb0a0c647732', language = 'ru')
bot = telebot.TeleBot('1223510132:AAEg4XkxdnSa92GP4dfcamjb3uznTB9rkzM')
@bot.message_handler(content_types=['text'])
def send_echo(message):
obs = owm.weather_at_place(message.text)
city = obs.get_weather()
temp = city.get_temperature('celsius')['temp']
answer = 'В городе ' + message.text + ' сейчас ' + city.get_detailed_status()
answer += 'Температура в районе ' + str(round(temp)) + ' градусов' + '\n\n'
if temp<10:
answer += 'Очень холодно, оденься потеплее))'
elif temp<17:
answer += 'Прохладно, лучше оденься:)'
else:
answer += 'Не холодно, хоть в трусах иди:)'
bot.send_message(message.chat.id, answer)
bot.polling(none_stop = True)
|
[
"noreply@github.com"
] |
Tofan93.noreply@github.com
|
05fa6c5fd8aacf359f1d9088e7f7c6cdb1a8d9ab
|
08fe9b6afba5708f8d51d79c1d503b8e87ff96a8
|
/Sets/symmetric_diff.py
|
844629f4671501b4b5d6caaf92c07ccc6faf3d96
|
[] |
no_license
|
Harsh-2909/HackerRank-Python-Solutions
|
667e83290c1421150b8ce782a2ffad82b84c4f57
|
2a6c5f013870791eb45c34e470c2a797a49f4cc1
|
refs/heads/master
| 2020-05-30T21:54:37.185556
| 2019-08-10T06:17:10
| 2019-08-10T06:17:10
| 189,983,460
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 232
|
py
|
m = int(input())
m_set = set(map(int, input().split()))
n = int(input())
n_set = set(map(int, input().split()))
m_set, n_set = list(m_set - n_set), list(n_set - m_set)
l = [*m_set, *n_set]
l.sort()
for i in l:
print(i)
|
[
"harsh29092000@gmail.com"
] |
harsh29092000@gmail.com
|
92c7ef3d0dd3f76c0176fecc684bb6e824e0fa9f
|
1521645e97dfa364ce4ecc34ef97aa00510a5d7c
|
/cart/test_forms.py
|
aa48460ebb4a3c7ccf62ff39049c915e09f89007
|
[] |
no_license
|
kajamiko/u_m_website
|
0327903646ae21a024e0d95df937c49605c9e615
|
158953e5e375856c80ab34859c581b628681657e
|
refs/heads/master
| 2022-12-14T03:38:42.702543
| 2018-10-23T16:08:30
| 2018-10-23T16:08:30
| 139,005,832
| 0
| 0
| null | 2022-12-08T02:48:16
| 2018-06-28T10:53:31
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 838
|
py
|
from django.test import TestCase
from .forms import CartAddTicketForm
from django.contrib.auth.models import User
class TestTicketForm(TestCase):
def test_for_updating(self):
"""
Normal, as from a real view
"""
form = CartAddTicketForm({'donation': 20, 'update': True})
self.assertTrue(form.is_valid())
def test_for_update_with_no_donation_value(self):
"""
Donation value is required
"""
form = CartAddTicketForm({'update': True})
self.assertFalse(form.is_valid())
def test_for_add_with_no_update(self):
"""
Update is not required, as it is also an add form
"""
form = CartAddTicketForm({'donation': 20})
self.assertTrue(form.is_valid())
|
[
"kajaths@gmail.com"
] |
kajaths@gmail.com
|
6696d49ec45e3e7ddf84f6655775d70902a6d38f
|
9fbec0955358e0dc3c24e2b7e5e7aeb4fa12f963
|
/info.py
|
5eacb8d09d67a42912218c1f4e3c65871f39fe3c
|
[] |
no_license
|
AyselHavutcu/FlaskBlog
|
9b0142d7a400a6b73f9367d42a09e421e1ab1a0b
|
820e1e4740eb1f31ed302b208f61af0ba32f8f14
|
refs/heads/master
| 2020-07-26T01:20:28.171975
| 2019-09-14T18:30:30
| 2019-09-14T18:30:30
| 208,485,109
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,188
|
py
|
from flask import Flask,render_template,flash,redirect,url_for,session,logging,request#web sunucumuzu aga kaldıracak
from flask_mysqldb import MySQL
#mysql icin gerekli olan fromları dahil ettik
from wtforms import From,StringField,PasswordField,validators
#bunlarda formda kullanmak icin dahil ediyourz
from passlib.hash import sha256_crypt #parola gizliligi icin ekledik
app = Flask(__name__)
#her bir pyhton dosyamız aslında bir modul
#biz bu pyhton dosyalarını 2 turlu kullanabilriz
#1. si:Biz bu python dosyalarının icine degisik fonksiyonlar yazarız ve
#daha sonra bu fonksiyonları pyhtonda fonksiyon cagrısı yaparak kullanablirz
#2.si:biz bu python dosyalarını modul olarak baska bir python dosyasında calıstırabilirz
#ve sadece fonksiyonları almak isteyebilir ve fonksiyon cagrılarının calısmamasını saglayabilir
#eger python dosyasını terminalde calıstırısak name in degeri main olursa terminalde calısmıs
#eger degilse baska bir modul olarak aktarılmıs demektir o zaman ben bu fonksiyon cagrılarını yapamam
@app.route("/") #her url adresi istedigimizde kullanılan bir decorator
def index():
#yukarda request yaptık ve bu fonksiyon direk bu requestten sonra calısacaktır
#response donmemiz lazım
numbers = [1,2,3,4,5]
return render_template("index.html",numbers = numbers)
#baska requestler de yapabilirz
@app.route("/about")
def about():
return render_template("about.html")
#dinamik url tanımlayacaz
@app.route("/article/<string:id>")
def detail(id):
return "Article ID:" +id
if __name__ == "__main__":
app.run(debug=True)
#debug true dememizin sebebi debug ı aktiflestiriyoruz
#yani herhangi bir yerde hatamız olursa bize bir uyarı mesajı fırlatılacak
#jinja templater ı icinde html css kodlarını ve boostrap modullerimiz
#kullanıyoruz aynı zamanda python kodlarımızıda bu template e uygun bir sekilde kullanabiliyoruz
#boylelikle biz fonksiyonlarımızda herhangi bir deger urettigimiz zaman bu template a bu degeri verip template i response olarak donebilioruz
#bunun icin bizim bu template render etmemiz gerekiyor boylece biz bu template bir pythom degeri gonderebiliyoruz
|
[
"42868243+AyselHavutcu@users.noreply.github.com"
] |
42868243+AyselHavutcu@users.noreply.github.com
|
2602f0d5d20194361a9b3aaf0ea2586b950fa49b
|
5af72fd35f3f967be5b6c195eaedd8739df3ee47
|
/SnakeBlock.py
|
fb9c5afcab27983b4ec802adfbba6fe9c6d19186
|
[] |
no_license
|
tomluko/PythonSnake
|
0d43e0d7ba37fd4ca29b37856294d5f63502412b
|
e336cf1853fa060f3b33445d485a2b353bb0dccc
|
refs/heads/master
| 2021-01-10T02:43:08.590884
| 2015-06-02T19:52:54
| 2015-06-02T19:52:54
| 36,740,484
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 270
|
py
|
import Block
#snakes body segment
class SnakeBlock(Block.Block):
def __init__(self, size, position):
solid = True
removable = False
color = (0, 153, 0)
super(SnakeBlock, self).__init__(solid, removable, position, size, color)
|
[
"tomazasax@yahoo.com"
] |
tomazasax@yahoo.com
|
4d5620e88c6380c33efa91a55293aac691fa505e
|
64cacb589af13865d8c1aef9b27e65bfed742967
|
/md_image_backup_py3.py
|
576aa084a8a8b6120e98074c4de692836ce5a214
|
[] |
no_license
|
eggfly/markdown-img-backup
|
6f984a380ca837b69446f3e805cfeba19a8dd40f
|
6d0e5bb2e679b081ca0b960869d87a17b5e1cdda
|
refs/heads/master
| 2022-01-11T17:53:49.165213
| 2018-12-26T00:30:34
| 2018-12-26T00:30:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,552
|
py
|
# coding=utf-8
import sys
import os
import re
import requests
import urllib.request
def backup():
try:
# 备份指定文件的img
download(str('你的markdown文件路径' + sys.argv[1]))
except IndexError:
# 备份文件夹下的所有img
search('你的markdown文件路径', '.md')
def search(path, word):
for filename in os.listdir(path):
fp = os.path.join(path, filename)
if os.path.isfile(fp) and word in filename:
print(fp)
download(str(fp))
elif os.path.isdir(fp):
search(fp, word)
def download(file_path):
# filename = "test"
name = file_path.split(u"\\")
filename = name[-1]
f_md = open(file_path, 'rb')
# all text of md file
text = f_md.read().decode('utf-8')
# regex
# img_reg = r'\!{1}\[(.*?)\]\((.*?)\)'
result = re.findall('!\[(.*?)\]\((.*?)\)', text)
print(result)
for i in range(len(result)):
img_quote = result[i][0]
img_url = result[i][1]
# download img
request = urllib.request.Request(img_url)
response = urllib.request.urlopen(request)
img_contents = response.read()
# img name spell
urlname = img_url.split(u"/")
img_name = filename + '_' + \
str(i) + '_' + img_quote + str(urlname[len(urlname) - 1])
print (img_name + '~~~' + img_url)
# write to file
f_img = open('img/' + img_name, 'wb')
f_img.write(img_contents)
f_img.close()
f_md.close()
backup()
|
[
"hj531@live.com"
] |
hj531@live.com
|
1f54af48b0de5de3deb1326d6dfc2e3b9b08012e
|
7246faf9a222269ce2612613f58dc5ff19091f10
|
/baekjoon/3000~5999/4949_균형잡힌세상.py
|
69e300ec26003ff839d8917a542427b2e7f68cc4
|
[] |
no_license
|
gusdn3477/Algorithm_Study
|
87a2eb72a8488d9263a86db70dadc7944434d41d
|
3fefe1dcb40122157845ffc542f41cb097711cc8
|
refs/heads/main
| 2023-08-30T12:18:21.412945
| 2021-09-28T13:00:11
| 2021-09-28T13:00:11
| 308,364,230
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 598
|
py
|
a = input()
while a != '.':
poc = []
for i in range(len(a)):
if a[i] == '(' or a[i] == '[':
poc.append(a[i])
if a[i] == ')':
if not poc or poc[-1] != '(':
poc.append(a[i])
break
if poc[-1] == '(':
poc.pop()
if a[i] == ']':
if not poc or poc[-1] != '[':
poc.append(a[i])
break
if poc[-1] == '[':
poc.pop()
if not poc:
print("yes")
else:
print("no")
poc.clear()
a = input()
|
[
"gusdn3477@naver.com"
] |
gusdn3477@naver.com
|
5686a856ed2ea24c4fa6fc47556d57a680b78c88
|
8a9dfd08d8bfbe07f948851ada95085bb279daa2
|
/Python/26-Remove-Duplicates-from-Sorted-Array.py
|
ff24549e704ad2e566add902835aae98f2e34dfb
|
[] |
no_license
|
lukk47/LeetCode
|
e437dd39f27ce40f421b03dad50935ec55239dc6
|
038cadf2b50c3aa28a7c3834f5e0dc1e8ef40e38
|
refs/heads/master
| 2023-09-01T18:57:09.888955
| 2021-02-08T09:46:12
| 2021-02-08T09:46:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 428
|
py
|
class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
index = 0
last_number = None
while index < len(nums):
if nums[index]!=last_number:
last_number = nums[index]
index = index +1
else:
del nums[index]
return len(nums)
|
[
"noreply@github.com"
] |
lukk47.noreply@github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.