blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
30c68a7f2b2becc4f6f4df45a976b76f30e1591a
|
2e6b15509a4487241f5734346e8ac9173c958c99
|
/django-heroku/wsgi.py
|
47ce2f18f91679239b037018f891010cbd62af47
|
[] |
no_license
|
Bibliocratie/Bibliocratie
|
9dd47ab105eb7e0dfb2566b307ad8bfd66b1aad5
|
b66347ced05dc7821e721fd3d05d619791e4d543
|
refs/heads/master
| 2020-06-05T08:07:19.298421
| 2015-07-27T16:03:34
| 2015-07-27T16:03:34
| 39,377,060
| 9
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 578
|
py
|
import os
if not os.environ.has_key('DJANGO_SETTINGS_MODULE'):
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'django-heroku.settings.prod')
from django.conf import settings
from django.core.wsgi import get_wsgi_application
from ws4redis.uwsgi_runserver import uWSGIWebsocketServer
_django_app = get_wsgi_application()
_websocket_app = uWSGIWebsocketServer()
def application(environ, start_response):
if environ.get('PATH_INFO').startswith(settings.WEBSOCKET_URL):
return _websocket_app(environ, start_response)
return _django_app(environ, start_response)
|
[
"B@MacBook-Air-de-B.local"
] |
B@MacBook-Air-de-B.local
|
8360faa0d31d50eef471dccad16bfd4386691623
|
7da1ec9bef7d196f606868bdc3626d97cc9fac9e
|
/src/app.py
|
fb7e0bfbcba827be3cb51f556e00f8942b49be8d
|
[
"MIT"
] |
permissive
|
budavariam/flask-starter
|
1b36485d6a1e6e5529b44363f5820cef36dc6a10
|
8fb65bd584860e7538501abad69432d66ec39e1d
|
refs/heads/main
| 2023-03-17T08:22:14.608597
| 2021-02-27T11:34:28
| 2021-02-27T11:34:28
| 342,830,658
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 735
|
py
|
from flask import Flask, request
import os
import logging
import json
log = logging.getLogger()
app = Flask(__name__)
@app.route('/data', methods = ['POST'])
def post_endpoint():
try:
data = request.get_json(force=True, silent=True)
if data is None:
data = dict()
a = data.get("a", 23)
b = data.get("b", 42)
return json.dumps({"result": a + b })
except Exception as e:
log.error(f"Failed to calculate data. error: {repr(e)}")
return json.dumps({"error": repr(e)})
@app.route('/version')
def environment_info():
return json.dumps({"version": "0.0.1"})
@app.route('/ping')
def ping():
return "PONG"
if __name__ == '__main__':
app.run(port=8084)
|
[
"budavariam@gmail.com"
] |
budavariam@gmail.com
|
c8b87c41f32998b463217b16fc1f4ff0a8d275a6
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/TSZLQX/YW_TSZL_QXJY_023.py
|
0361e34af00e10526411e4766416568a5bb4e161
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,295
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
from UpdateTszlqx import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_TSZL_QXJY_023(xtp_test_case):
# YW_TSZL_QXJY_023
def test_YW_TSZL_QXJY_023(self):
title = 'OMS初始化—可选权限—Secuinfo表,深A证券账号有买入退市整理期股票权限(申报市价单)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
updateSecuRightHasMSz()
updateSecuRightHasNoMSh()
clear_data_and_restart_all()
Api.trade.Logout()
Api.trade.Login()
case_goal = {
'期望状态': '废单',
'errorID': 11000315,
'errorMSG': queryOrderErrorMsg(11000315),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('060001', '2', '0', '10', '0', 'B', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST_OR_CANCEL'],
'price': stkparm['涨停价'],
'quantity': 200,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 414
if __name__ == '__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
a246a75a3361ac96db9e074d4c7f4593f016116d
|
0bb2bfd8e378bf1893b169355ac1e031c21a469c
|
/gen_crawler/configs/auto360_config.py
|
223b0ac570ea49634b629a6e4d0554d5a8941451
|
[] |
no_license
|
DMGbupt/general-crawler
|
f0ac919f80bf2745456355d912ed252508abda63
|
6a97374d0faf55dcf33b645a21d3d172b6400e1e
|
refs/heads/master
| 2021-01-10T17:58:17.151451
| 2016-01-09T15:05:32
| 2016-01-09T15:05:32
| 54,119,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,587
|
py
|
# -*- coding: utf-8 -*-
CONF = [
# 政策法规
{'keyword':"政策法规", 'url': "http://www.autochina360.com/news/zhengcebiaozhun/list_24_1.html"},
# 汽车行业资讯
{'keyword': "乘用车",
'url': "http://www.autochina360.com/news/zc/cyc/list_3_1.html"},
{'keyword': "商用车", 'url': "http://www.autochina360.com/news/zc/syc/list_4_1.html"},
{'keyword': "数据",
'url': "http://www.autochina360.com/news/zc/shujujiance/list_69_1.html"},
# 零部件
{'keyword': "动力总成", 'url': "http://www.autochina360.com/news/lbj/dlzcpf/list_6_1.html"},
{'keyword': "底盘系统", 'url': "http://www.autochina360.com/news/lbj/dpxt/list_7_1.html"},
{'keyword': "轮胎及材料", 'url': "http://www.autochina360.com/news/lbj/ltjcl/list_8_1.html"},
{'keyword': "电子器件", 'url': "http://www.autochina360.com/news/lbj/dzdq/list_9_1.html"},
{'keyword': "技术创新",
'url': "http://www.autochina360.com/news/lbj/jishuchuangxin/list_106_1.html"},
# 售后
{'keyword': "汽车维修", 'url': "http://www.autochina360.com/news/hsc/qcwx/list_11_1.html"},
{'keyword': "汽车保养", 'url': "http://www.autochina360.com/news/hsc/qcby/list_12_1.html"},
{'keyword': "售后服务", 'url': "http://www.autochina360.com/news/hsc/shfw/list_13_1.html"},
{'keyword': "汽车用品", 'url': "http://www.autochina360.com/news/hsc/qcyp/list_14_1.html"},
{'keyword': "企业召回",
'url': "http://www.autochina360.com/news/hsc/qiyezhaohui/list_107_1.html"},
# 行业
{'keyword': "装备制造", 'url': "http://www.autochina360.com/news/hyfw/zbzz/list_16_1.html"},
{'keyword': "汽车物流", 'url': "http://www.autochina360.com/news/hyfw/qcwl/list_17_1.html"},
{'keyword': "企业管理", 'url': "http://www.autochina360.com/news/hyfw/qygl/list_18_1.html"},
{'keyword': "汽车改装", 'url': "http://www.autochina360.com/news/hyfw/qcgz/list_19_1.html"},
{'keyword': "金融服务", 'url': "http://www.autochina360.com/news/hyfw/jrfw/list_20_1.html"},
{'keyword': "燃油燃气", 'url': "http://www.autochina360.com/news/hyfw/ryrq/list_21_1.html"},
# 其它
{'keyword': "互联网营销",
'url': "http://www.autochina360.com/news/hulianwangyingxiao/list_29_1.html"},
{'keyword': "车联网", 'url': "http://www.autochina360.com/news/chelianwang/list_28_1.html"},
{'keyword': "新能源车", 'url': "http://www.autochina360.com/news/xinnenyuan/list_27_1.html"},
{'keyword': "产业投资",
'url': "http://www.autochina360.com/news/chanyetouzi/list_23_1.html"},
{'keyword': "经销商",
'url': "http://www.autochina360.com/news/jingxiaoshang/list_22_1.html"},
# 数据
{'keyword': "数据新闻",
'url': "http://www.autochina360.com/news/zc/shujujiance/list_69_1.html"},
{'keyword': "乘用排行",
'url': "http://www.autochina360.com/information/chengyongchepaixing/list_149_1.html"},
{'keyword': "商用车排行",
'url': "http://www.autochina360.com/information/shangyongchepaixing/list_150_1.html"},
{'keyword': "零部件排行",
'url': "http://www.autochina360.com/information/lingbujianpaixing/list_151_1.html"},
{'keyword': "其它排行",
'url': "http://www.autochina360.com/information/qitapaixing/list_152_1.html"},
# 工程机械
{'keyword': "行业研究",
'url': "http://www.autochina360.com/finance/hangyeyanjiu/list_143_1.html"},
{'keyword': "宏观资讯",
'url': "http://www.autochina360.com/finance/hongguanzixun/list_141_1.html"},
{'keyword': "行业动态",
'url': "http://www.autochina360.com/cmchina360/xingye/xingyedongtai/list_182_1.html"},
{'keyword': "分析评论",
'url': "http://www.autochina360.com/cmchina360/xingye/fenxipinglun/list_183_1.html"},
{'keyword': "人物访谈",
'url': "http://www.autochina360.com/cmchina360/xingye/renwufangtan/list_185_1.html"},
{'keyword': "二手租赁",
'url': "http://www.autochina360.com/cmchina360/xingye/ershouzulin/list_190_1.html"},
# 企业动态
{'keyword': "整车", 'url':
"http://www.autochina360.com/cmchina360/zhengchechangqiyedongtai/list_179_1.html"},
{'keyword': "零部件",
'url': "http://www.autochina360.com/cmchina360/lingbujianqiyedongtai/list_180_1.html"},
{'keyword': "展会", 'url':
"http://www.autochina360.com/cmchina360/zhanhuiluntan/zhanhuixinwen/list_186_1.html"},
]
|
[
"li_enterprise@163.com"
] |
li_enterprise@163.com
|
640e41c092c6f04dcaa03f6fd45f7d5d7b4e8500
|
f899c1c5ea6283a06d86d2acf576ad9211657904
|
/ejui/__main__.py
|
69092aafb576d47c132f76fa210be32747aa554f
|
[] |
no_license
|
sleirsgoevy/ejui
|
59867d0784f7d05cdb83846f7f984dff212c024d
|
f3c964f40c94a23e9050edca9e7c460b4013d0d2
|
refs/heads/master
| 2021-07-05T21:50:06.477276
| 2020-08-24T22:16:18
| 2020-08-24T22:16:18
| 169,611,662
| 1
| 2
| null | 2020-01-22T13:47:47
| 2019-02-07T17:23:59
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 543
|
py
|
import ejui, sys, os.path
if len(sys.argv) != 3:
sys.stderr.write("""\
usage: ejui.py <bind_addr> <tgt_addr>
Environment variables:
* EJUI_PRELOAD
`pathsep`-separated list of modules to be imported before ejui starts.
""")
exit(1)
if 'EJUI_PRELOAD' in os.environ:
for i in os.environ['EJUI_PRELOAD'].split(os.path.pathsep):
__import__(i)
host, port = sys.argv[1].rsplit(':', 1)
if host.startswith('[') and host.endswith(']'): host = host[1:-1]
port = int(port)
tgt_addr = sys.argv[2]
ejui.main((host, port), tgt_addr)
|
[
"sleirsgoevy@gmail.com"
] |
sleirsgoevy@gmail.com
|
e4701e25f8387d1dcd3ef351894d00ed975c5a08
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/150/usersdata/269/65305/submittedfiles/formula.py
|
cb676043cecd68bf46ac20543bd3863f9f65501c
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 187
|
py
|
# -*- coding: utf-8 -*-
p=float(input('digite o valor de p: '))
i=float(input('digite o valor de i: '))
n=float(input('digite o valor de n: '))
v=p * (((1+i)**(n))-1)/i
print('%.2f' %v)
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
49a792fdecbea9605461319909fcf1366104dd64
|
f0eac781f81f2f9646b2200053996e120ba8c07a
|
/src/k_medoids.py
|
88af82ee821ae4a00e6b13ccb07d6bd003b67f81
|
[] |
no_license
|
jsnajder/derivsem
|
9cbb620030bcba5ea1593dca13d9dccf50fe773d
|
dbae8f7980d1a67524256d702454d69fec7eda10
|
refs/heads/master
| 2021-01-17T17:36:05.883896
| 2016-07-15T06:17:15
| 2016-07-15T06:17:15
| 61,334,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,551
|
py
|
# -*- coding: utf-8 -*-
"""K-medoids clustering"""
# Authors: Timo Erkkilä <timo.erkkila@gmail.com>
# Antti Lehmussola <antti.lehmussola@gmail.com>
# License: BSD 3 clause
import numpy as np
import warnings
from sklearn.base import BaseEstimator, ClusterMixin, TransformerMixin
from sklearn.metrics.pairwise import PAIRWISE_DISTANCE_FUNCTIONS
from sklearn.utils import check_array, check_random_state
from sklearn.utils.validation import check_is_fitted
class KMedoids(BaseEstimator, ClusterMixin, TransformerMixin):
"""
k-medoids class.
Parameters
----------
n_clusters : int, optional, default: 8
How many medoids. Must be positive.
distance_metric : string, optional, default: 'euclidean'
What distance metric to use.
clustering : {'pam'}, optional, default: 'pam'
What clustering mode to use.
init : {'random', 'heuristic'}, optional, default: 'heuristic'
Specify medoid initialization.
max_iter : int, optional, default : 300
Specify the maximum number of iterations when fitting.
random_state : int, optional, default: None
Specify random state for the random number generator.
"""
# Supported clustering methods
CLUSTERING_METHODS = ['pam']
# Supported initialization methods
INIT_METHODS = ['random', 'heuristic']
def __init__(self, n_clusters=8, distance_metric='euclidean',
clustering_method='pam', init='heuristic',
max_iter=300, random_state=None):
self.n_clusters = n_clusters
self.distance_metric = distance_metric
self.init = init
self.max_iter = max_iter
self.clustering_method = clustering_method
self.random_state = random_state
def _check_init_args(self):
# Check n_clusters
if self.n_clusters is None or self.n_clusters <= 0 or \
not isinstance(self.n_clusters, int):
raise ValueError("n_clusters has to be nonnegative integer")
# Check distance_metric
if callable(self.distance_metric):
self.distance_func = self.distance_metric
elif self.distance_metric in PAIRWISE_DISTANCE_FUNCTIONS:
self.distance_func = \
PAIRWISE_DISTANCE_FUNCTIONS[self.distance_metric]
else:
raise ValueError("distance_metric needs to be " +
"callable or one of the " +
"following strings: " +
"{}".format(PAIRWISE_DISTANCE_FUNCTIONS.keys()) +
". Instead, '{}' ".format(self.distance_metric) +
"was given.")
# Check clustering_method
if self.clustering_method not in self.CLUSTERING_METHODS:
raise ValueError("clustering must be one of the following: " +
"{}".format(self.CLUSTERING_METHODS))
# Check init
if self.init not in self.INIT_METHODS:
raise ValueError("init needs to be one of " +
"the following: " +
"{}".format(self.INIT_METHODS))
# Check random state
self.random_state_ = check_random_state(self.random_state)
def fit(self, X, y=None):
"""Fit K-Medoids to the provided data.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Returns
-------
self
"""
self._check_init_args()
# Check that the array is good and attempt to convert it to
# Numpy array if possible
X = self._check_array(X)
# Apply distance metric to get the distance matrix
D = self.distance_func(X)
medoid_ics = self._get_initial_medoid_indices(D, self.n_clusters)
# Old medoids will be stored here for reference
old_medoid_ics = np.zeros((self.n_clusters,))
# Continue the algorithm as long as
# the medoids keep changing and the maximum number
# of iterations is not exceeded
self.n_iter_ = 0
while not np.all(old_medoid_ics == medoid_ics) and \
self.n_iter_ < self.max_iter:
self.n_iter_ += 1
# Keep a copy of the old medoid assignments
old_medoid_ics = np.copy(medoid_ics)
# Get cluster indices
cluster_ics = self._get_cluster_ics(D, medoid_ics)
# Update medoids with the new cluster indices
self._update_medoid_ics_in_place(D, cluster_ics, medoid_ics)
# Expose labels_ which are the assignments of
# the training data to clusters
self.labels_ = cluster_ics
# Expose cluster centers, i.e. medoids
self.cluster_centers_ = X.take(medoid_ics, axis=0)
# Return self to enable method chaining
return self
def _check_array(self, X):
X = check_array(X)
# Check that the number of clusters is less than or equal to
# the number of samples
if self.n_clusters > X.shape[0]:
raise ValueError("The number of medoids " +
"({}) ".format(self.n_clusters) +
"must be larger than the number " +
"of samples ({})".format(X.shape[0]))
return X
def _get_cluster_ics(self, D, medoid_ics):
"""Returns cluster indices for D and current medoid indices"""
# Assign data points to clusters based on
# which cluster assignment yields
# the smallest distance
cluster_ics = np.argmin(D[medoid_ics, :], axis=0)
return cluster_ics
def _update_medoid_ics_in_place(self, D, cluster_ics, medoid_ics):
"""In-place update of the medoid indices"""
# Update the medoids for each cluster
for cluster_idx in range(self.n_clusters):
if sum(cluster_ics == cluster_idx) == 0:
warnings.warn("Cluster {} is empty!".format(cluster_idx))
continue
# Find current cost that is associated with cluster_idx.
# Cost is the sum of the distance from the cluster
# members to the medoid.
curr_cost = np.sum(D[medoid_ics[cluster_idx],
cluster_ics == cluster_idx])
# Extract the distance matrix between the data points
# inside the cluster_idx
D_in = D[cluster_ics == cluster_idx, :]
D_in = D_in[:, cluster_ics == cluster_idx]
# Calculate all costs there exists between all
# the data points in the cluster_idx
all_costs = np.sum(D_in, axis=1)
# Find the index for the smallest cost in cluster_idx
min_cost_idx = np.argmin(all_costs)
# find the value of the minimum cost in cluster_idx
min_cost = all_costs[min_cost_idx]
# If the minimum cost is smaller than that
# exhibited by the currently used medoid,
# we switch to using the new medoid in cluster_idx
if min_cost < curr_cost:
# Find data points that belong to cluster_idx,
# and assign the newly found medoid as the medoid
# for cluster c
medoid_ics[cluster_idx] = \
np.where(cluster_ics == cluster_idx)[0][min_cost_idx]
def transform(self, X):
"""Transforms X to cluster-distance space.
Parameters
----------
X : array-like or sparse matrix, shape=(n_samples, n_features)
Data to transform.
Returns
-------
X_new : array, shape=(n_samples, n_clusters)
X transformed in the new space.
"""
check_is_fitted(self, "cluster_centers_")
# Apply distance metric wrt. cluster centers (medoids),
# and return these distances
return self.distance_func(X, Y=self.cluster_centers_)
def predict(self, X):
check_is_fitted(self, "cluster_centers_")
# Check that the array is good and attempt to convert it to
# Numpy array if possible
X = check_array(X)
# Apply distance metric wrt. cluster centers (medoids)
D = self.distance_func(X, Y=self.cluster_centers_)
# Assign data points to clusters based on
# which cluster assignment yields
# the smallest distance
labels = np.argmin(D, axis=1)
return labels
def inertia(self, X):
# Map the original X to the distance-space
Xt = self.transform(X)
# Define inertia as the sum of the sample-distances
# to closest cluster centers
inertia = np.sum(np.min(Xt, axis=1))
return inertia
def _get_initial_medoid_indices(self, D, n_clusters):
if self.init == 'random': # Random initialization
# Pick random k medoids as the initial ones.
medoids = self.random_state_.permutation(D.shape[0])[:n_clusters]
elif self.init == 'heuristic': # Initialization by heuristic
# Pick K first data points that have the smallest sum distance
# to every other point. These are the initial medoids.
medoids = list(np.argsort(np.sum(D, axis=1))[:n_clusters])
else:
raise ValueError("Initialization not implemented for method: " +
"'{}'".format(self.init))
return medoids
|
[
"jan.snajder@fer.hr"
] |
jan.snajder@fer.hr
|
87dbff658ae770b9bca26bd7a9f89f92a20076a0
|
06c2bc496f9e285f06e4c3c71f14d5716f411d89
|
/source/webapp/migrations/0021_auto_20210610_1547.py
|
fa679f2b5e70fab614b61a495f398519a08f1cbb
|
[] |
no_license
|
Beknasar/Coin_collection
|
37a9e77cc00270dfcb9d0cb5916f985cec4c591d
|
091860f98e7dc81d460ab0cbcb6ca1d7fdeffda8
|
refs/heads/master
| 2023-06-09T16:25:30.473134
| 2021-06-25T09:31:13
| 2021-06-25T09:31:13
| 365,229,399
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 526
|
py
|
# Generated by Django 2.2 on 2021-06-10 15:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('webapp', '0020_auto_20210610_1547'),
]
operations = [
migrations.AlterField(
model_name='nominal',
name='currency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nominals', to='webapp.Currency', verbose_name='валюта'),
),
]
|
[
"680633@gmail.com"
] |
680633@gmail.com
|
363f94f384d15bce02adc5109174fd90c3c397e8
|
f93e8ea5ae86019a7e836f8a33422d3c4522e341
|
/thief/solution.py
|
1e150692539f8fa6d069f7b3d47d2ea7969b6e3e
|
[] |
no_license
|
Jack2ee-dev/NKLCB_homework
|
3c65c29c047486f1f8121c0c303e9e13b7e9da99
|
f4d479bac4a6ae36790fc1908f0a6e3b6acd30b4
|
refs/heads/master
| 2023-03-29T13:46:36.631531
| 2021-04-09T06:34:05
| 2021-04-09T06:34:05
| 350,281,274
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 878
|
py
|
def solution(money):
dp1 = [0] * len(money)
dp1[0] = money[0]
dp1[1] = max(money[0], money[1])
for i in range(2, len(money) - 1):
dp1[i] = max(dp1[i - 1], money[i] + dp1[i - 2])
dp2 = [0] * len(money)
dp2[0] = money[0]
dp2[1] = money[1]
for i in range(2, len(money)):
dp2[i] = max(dp2[i - 1], money[i] + dp2[i - 2])
return max(max(dp1), max(dp2))
def solution2(money):
answer = 0
n = len(money)
dp = [0] * (n - 1)
dp[0], dp[1] = money[0], max(money[:2])
for i in range(2, n - 1):
dp[i] = max(dp[i - 2] + money[i], dp[i - 1])
answer = dp[-1]
dp = [0] * n
dp[0], dp[1] = 0, money[1]
for i in range(2, n):
dp[i] = max(dp[i - 2] + money[i], dp[i - 1])
answer = max(answer, dp[-1])
return answer
if __name__ == "__main__":
print(solution([1, 2, 3, 1]))
|
[
"Jack2ee.dev@gmail.com"
] |
Jack2ee.dev@gmail.com
|
5c48cd3176e4d1e95a401bc2936dcae87e3de95c
|
4be610e4c817203001d9847d46d0425b3aa5a0af
|
/10-organizing-files/backupToZip.py
|
8af9507e1c3dd13a8c5c231cee3f04a6bb170e44
|
[
"CC-BY-NC-SA-3.0",
"LicenseRef-scancode-cc-by-nc-sa-3.0-us"
] |
permissive
|
UIHackyHour/AutomateTheBoringSweigart
|
600430ae3552b1c668d0d3582880df2deb73e474
|
d482d0e0401bf29f9a570ca01bb5beaf21beb7a4
|
refs/heads/main
| 2023-04-10T10:08:27.545266
| 2021-04-28T21:29:19
| 2021-04-28T21:29:19
| 327,053,819
| 0
| 0
|
CC0-1.0
| 2021-01-05T22:39:23
| 2021-01-05T16:21:17
| null |
UTF-8
|
Python
| false
| false
| 1,300
|
py
|
#! python3
# Backing Up a Folder into a ZIP File
import zipfile, os
def backupToZip(folder):
# Back up the entire contents of "folder" into a ZIP file.
folder = os.path.abspath(folder) # make sure folder is absolute
# Figure out the filename this code should use based on
# what files already exist.
number = 1
while True:
zipFilename = os.path.basename(folder) + '_' + str(number) + '.zip'
if not os.path.exists(zipFilename):
break
number = number + 1
# Create the ZIP file.
print(f'Creating {zipFilename}...')
backupZip = zipfile.ZipFile(zipFilename, 'w')
# Walk the entire folder tree and compress the files in each folder.
for foldername, subfolders, filenames in os.walk(folder):
print(f'Adding files in {foldername}...')
# Add the current folder to the ZIP file.
backupZip.write(foldername)
# Add all the files in this folder to the ZIP file.
for filename in filenames:
newBase = os.path.basename(folder) + '_'
if filename.startswith(newBase) and filename.endswith('.zip'):
continue # don't back up the backup ZIP files
backupZip.write(os.path.join(foldername, filename))
backupZip.close()
print('Done.')
|
[
"trevorcline@Trevors-MacBook-Pro.local"
] |
trevorcline@Trevors-MacBook-Pro.local
|
8b3a17b5b6b215cc6f286f8e146cb719e391fb49
|
0a4105df9a214c6e80daf2f812d5a8a81109d42e
|
/dishes/apps.py
|
a0a5084647b3d9d329b21af0b8d240c936e8393c
|
[] |
no_license
|
IrinaGnatiuk/Pizza_Views
|
f4d60ac1a167d49cafbd99276ae580f6c2a1e1ef
|
70084dc99dd307cfd07406bfc26ad7cfd07cdc26
|
refs/heads/master
| 2022-12-11T12:39:06.432326
| 2020-01-20T01:05:02
| 2020-01-20T01:05:02
| 221,529,972
| 0
| 0
| null | 2022-12-08T06:52:39
| 2019-11-13T18:54:19
|
Python
|
UTF-8
|
Python
| false
| false
| 119
|
py
|
from django.apps import AppConfig
class DishesConfig(AppConfig):
name = 'dishes'
verbose_name = 'Блюда'
|
[
"sestr2008@gmail.com"
] |
sestr2008@gmail.com
|
e4c482d19e461b1f257c7f1aa5ba7118597292f3
|
b4dea654e49c28230236113770d0a1c17f0a8968
|
/DFS/Permutation/LT10. String Permutation II.py
|
7a4ab4f26bdb0c217e807c30b5ff08ef5867cd3c
|
[] |
no_license
|
ruifengli-cs/leetcode
|
cd751905526c7d8effa0fb95b296deb6e7124b7a
|
70abc023ede5f5039fbf336c7401d2596069fdc5
|
refs/heads/master
| 2023-05-07T02:52:34.303865
| 2021-05-24T12:57:35
| 2021-05-24T12:57:35
| 255,133,282
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,590
|
py
|
class Solution:
"""
@param str: A string
@return: all permutations
"""
def stringPermutation2(self, str):
res = []
if str is None:
return res
s = sorted(str)
visited = [False] * len(s)
self.dfs(s, visited, [], res)
return res
def dfs(self, s, visited, perm, res):
if len(perm) == len(s):
res.append("".join(perm))
return
for i in range(len(s)):
if visited[i]:
continue
if i > 0 and s[i] == s[i - 1] and visited[i - 1] == False:
continue
perm.append(s[i])
visited[i] = True
self.dfs(s, visited, perm, res)
visited[i] = False
perm.pop()
# Treat as string
# def stringPermutation2(self, str):
# # write your code here
# res = []
# if str is None:
# return res
# visited = [False] * len(str)
# str = "".join(sorted(str))
# self.dfs(str, visited, "", res)
# return res
# def dfs(self, A, visited, perm, A_perms):
# if len(perm) == len(A):
# A_perms.append(perm)
# return
# for i in range(len(A)):
# if visited[i]:
# continue
# if i > 0 and A[i] == A[i - 1] and visited[i - 1] == False:
# continue
# perm += A[i]
# visited[i] = True
# self.dfs(A, visited, perm, A_perms)
# visited[i] = False
# perm = perm[: -1]
|
[
"ruifengl@asu.edu"
] |
ruifengl@asu.edu
|
a74b5a47749eecdcdd70e0505f3335a12e7c9777
|
789819586d2c194f8e17d1863485980eec4afa50
|
/Feature_extraction/example.py
|
6e7e076304d8e4b0806898cca3923686cf396158
|
[] |
no_license
|
naveentrtumkur/AUAV-Research-Project
|
f758fe631379ce46e7d4763d69bfe96ca68fdece
|
5bc11394b761860b9876041a40b62e00fa935cf9
|
refs/heads/master
| 2018-10-04T21:25:15.840744
| 2018-07-09T03:24:43
| 2018-07-09T03:24:43
| 118,203,022
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,650
|
py
|
res=[]
with open('JSONfile58.json','rt') as myfile:
for line in myfile:
#print(line)
res.append(line.strip("{}"))
print(res[0])
list = res[0].split(',')
print(list)
print(list[2])
path_taken = list[2].split(':')[1].strip('\"')
print(path_taken)
pos = []
for char in path_taken:
pos += char
print(pos)
y_dist = 0
# check whether our list contains 'UP' or 'DW'
if 'U'in pos and 'P' in pos:
print("UP is present")
y_dist = 1
#pos.strip("UP")
[s.strip('U') for s in pos]
[s.strip('P') for s in pos]
elif "DW" in pos:
print("DW is present")
y_dist = -1
#pos.strip("DW")
else:
print("Drone in central slice")
mylist = pos
mylist = ' '.join(mylist).replace('U','').replace('P','').replace('_','').split()
#mylist = ' '.join(mylist).replace('P','').split()
print(mylist)
pos =mylist
'''for char in pos:
for case in switch(char):
if case('C'):
print('C')
break
if case('L'):
print('L')
if case(): # This is like a default case
print("This is something else")
'''
print(len(pos))
n = len(pos)
print(pos[n-1])
cur_pos = pos[n-1]
cube= []
if cur_pos == 'C':
cube.append(1)
cube.append(1+y_dist)
cube.append(1)
elif cur_pos == 'L':
cube.append(0)
cube.append(1+y_dist)
cube.append(1)
elif cur_pos == 'B':
cube.append(0)
cube.append(1+y_dist)
cube.append(0)
elif cur_pos == 'R':
cube.append(1)
cube.append(1+y_dist)
cube.append(0)
elif cur_pos == 'F':
cube.append(1)
cube.append(1+y_dist)
cube.append(1)
print(cube)
#for elem in res:
## print(elem)
|
[
"naveentrrko@gmail.com"
] |
naveentrrko@gmail.com
|
a9541f2263a36a67d536e3b5f976870bd7d1b6d2
|
ae36cf5a7106651c48ef2bc1a717c91560d0fb1d
|
/source.py
|
e47d63698ff6cc1ba2237613478490925bb4ac0b
|
[] |
no_license
|
shilyaev/dollar
|
596fdf2bdeca58eb1a790a3b02cd6f209fae4c4d
|
7f5ee9fe5145c2b52cec773cc6713f08fb46d49e
|
refs/heads/master
| 2020-04-05T18:42:41.561255
| 2018-11-11T18:48:13
| 2018-11-11T18:48:13
| 157,109,933
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
py
|
print('veweeerev')
input_file = open('kursUSD.txt', 'r')
max_dolr=1
max_data= " "
for line in input_file:
date,d = map(str, line.split())
dolr= float(d)
if dolr > max_dolr:
max_dolr=dolr
max_data=date
print(max_data,max_dolr)
|
[
"noreply@github.com"
] |
shilyaev.noreply@github.com
|
ce29d63af7d7b777817311449a9f21203a59a0d0
|
de8e3ddb8de718ff8eda90c5a5e397bc95da1fec
|
/news/models.py
|
f9854b87596f90d394c0ba99dabb793df4fc3903
|
[] |
no_license
|
OoGod/test3
|
03138a3629f1f00d82c4b0d8edc1b8a333870c03
|
2aefeeaa92eab40589e9f69d5b3317c6f3492fe9
|
refs/heads/master
| 2022-09-24T14:49:10.562480
| 2020-06-03T10:11:04
| 2020-06-03T10:11:04
| 268,289,046
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 644
|
py
|
from django.db import models
# Create your models here.
class Reporter(models.Model):
full_name = models.CharField(max_length=70)
def __str__(self):
return self.full_name
class Article(models.Model):
pub_date = models.DateField()
headline = models.CharField(max_length=200)
content = models.TextField()
reporter = models.ForeignKey(Reporter,on_delete=models.CASCADE)
def __str__(self):
return self.headline
class Comment(models.Model):
article = models.ForeignKey(Article,on_delete=models.CASCADE)
text = models.TextField('comment')
timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.text
|
[
"1617605689@qq.com"
] |
1617605689@qq.com
|
445b3eac094beeefe440e61283fe081b1907dc3f
|
cd33a8a9d028aaf4acb4f39e040e512efc1ac30a
|
/funcs.py
|
a9da1ac4db5f9f7a3c74df1acb4499deac7e0e32
|
[] |
no_license
|
TazzerLabs/LegV8Interpreter
|
41b27e09132e4855de43cd9602c23a1727a49eff
|
292eea585e30b0e0708f461c4f1c67d93ea75a97
|
refs/heads/master
| 2020-05-07T16:20:07.937545
| 2019-03-27T15:08:30
| 2019-03-27T15:08:30
| 180,678,062
| 1
| 0
| null | 2019-04-10T23:18:58
| 2019-04-10T23:18:58
| null |
UTF-8
|
Python
| false
| false
| 1,995
|
py
|
from state import registers, flags
rM = 0
rN = 0
rD = 0
iM = 0
#Takes a uint and returns a int64. If the original value would
#require more than 64 bits, MSBs will be dropped until result
#is within range
def s64(value):
return -(value & 0x8000000000000000) | (value & 0x7fffffffffffffff)
def add(args):
rD = int(args[0][1::])
rN = int(args[1][1::])
rM = int(args[2][1::])
registers[rD].int = registers[rN].int + registers[rM].int
def addi(args):
rD = int(args[0][1::])
rN = int(args[1][1::])
iM = int(args[2])
registers[rD].int = registers[rN].int + iM
def adds(args):
rD = int(args[0][1::])
rN = int(args[1][1::])
rM = int(args[2][1::])
registers[rD].int = s64(registers[rN].int + registers[rM].int)
#show stored result as int and hex
print(registers[rD].int, ' ', registers[rD])
if not registers[rD].int:
flags[0] = 1
if registers[rD].int < 0:
flags[1] = 1
if (registers[rD].int < registers[rN].int + registers[rM].int):
flags[2] = 1
flags[3] = 1
#show flags: [0] = Z, [1] = N, [2] = C, [3] = V
print(flags)
def addis(args):
return
def aand(args):
return
def andi(args):
return
def ands(args):
return
def andis(args):
return
def b(args):
return
def bcond(args):
return
def bl(args):
return
def br(args):
return
def cbz(args):
return
def cbnz(args):
return
def eor(args):
return
def eori(args):
return
def ldur(args):
return
def ldurb(args):
return
def ldurh(args):
return
def ldursw(args):
return
def ldxr(args):
return
def lsl(args):
return
def lsr(args):
return
def orr(args):
return
def orri(args):
return
def sub(args):
return
def subi(args):
return
def subs(args):
return
def subis(args):
return
def stur(args):
return
def sturb(args):
return
def sturh(args):
return
def sturw(args):
return
def stxr(args):
return
|
[
"lizzieaxworthy@gmail.com"
] |
lizzieaxworthy@gmail.com
|
74d2f25d719bb7c0f4488281596ce6c285b03291
|
19e53b7c76a96fb2f9ec4af616d8ef58d544e606
|
/PythonWebProject/app/__init__.py
|
0eb132a4e2d6d70d117ec359f25e0bbf432d2045
|
[] |
no_license
|
nhatminh7599/QuanLyBanVeMayBay
|
ace7130e24dccddde6ce63bb64e62f0cf7f5e4f5
|
7655f1e948481cb8943259f5e310b75a8a87764b
|
refs/heads/master
| 2022-12-20T20:45:36.777569
| 2020-09-15T03:07:43
| 2020-09-15T03:07:43
| 287,444,711
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 477
|
py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_admin import Admin
from flask_login import LoginManager
app = Flask(__name__)
app.secret_key = "ui21eui2g"
app.config["SQLALCHEMY_DATABASE_URI"] = "mysql+pymysql://root:mysql@localhost/ql_maybay?charset=utf8mb4"
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = True
db = SQLAlchemy(app=app)
admin = Admin(app=app, name="Quan Ly Ve May Bay", template_mode="bootstrap3")
login = LoginManager(app=app)
|
[
"55686100+nhatminh7599@users.noreply.github.com"
] |
55686100+nhatminh7599@users.noreply.github.com
|
1112c20c4c94f80596425d484a9e11c10ad8aa7c
|
cbb54ff7d0f99f236f52d610c660e96ff6172994
|
/portfolio/migrations/0003_extractmodel.py
|
10a1142cce62663ff0062973978d987722211587
|
[] |
no_license
|
anvesh001/myproject1
|
8cb006fa28c1a9f5e44a43833e0d6c3e0500ec2d
|
a11c9914069e557b41a7e52800d143094113a2c6
|
refs/heads/master
| 2020-06-19T03:28:35.006550
| 2019-07-16T05:16:07
| 2019-07-16T05:16:07
| 196,547,927
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,200
|
py
|
# Generated by Django 2.0.7 on 2019-07-10 11:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('portfolio', '0002_auto_20190710_1603'),
]
operations = [
migrations.CreateModel(
name='ExtractModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('email', models.EmailField(max_length=254)),
('phone', models.CharField(max_length=200)),
('requirements', models.CharField(choices=[('Register Your Business', 'Register Your Business'), ('simply your procces', 'simply your procces'), ('Tax Filling and Audit', 'Tax Filling and Audit'), ('Financial Services', 'Financial Services'), ('Get Trademark and File Patents', 'Get Trademark and File Patents'), ('Reports and Agreements', 'Reports and Agreements'), ('Closure & Changein Business', 'Closure & Changein Business'), ('Secretarial Compliances', 'Secretarial Compliances')], default='Register Your Business', max_length=30)),
],
),
]
|
[
"anveshkumarnaidu402@gmail.com"
] |
anveshkumarnaidu402@gmail.com
|
12300c616f6416b65f3f2160cf6344be27afc91e
|
1dcd99bb96d9c51b2b561e7c2e54615cf2bc5ced
|
/Question/BOJ/boj-7576.py
|
6dacfcda86b8ca16034623d1077da164c19ec709
|
[] |
no_license
|
dongsik93/HomeStudy
|
62bbcad93be49ed396fe9d50e840f921bb751d4e
|
3a28ff8c0b522a546ea2ed07c939f49bac3699c7
|
refs/heads/master
| 2020-04-15T02:12:36.615830
| 2019-04-05T14:31:10
| 2019-04-05T14:31:10
| 164,306,391
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,101
|
py
|
import sys
import collections
input = sys.stdin.readline
di = [0, 1, 0, -1]
dj = [1, 0, -1, 0]
def bfs(n, m):
while(len(q) != 0):
x = q.popleft()
y = q.popleft()
for k in range(4):
dx = x + di[k]
dy = y + dj[k]
if(0<= dx and dx < n and 0 <= dy and dy < m):
if(pot[dx][dy] == 0 and visited[dx][dy] == 0):
visited[dx][dy] = visited[x][y] + 1
q.append(dx)
q.append(dy)
return visited
m, n = map(int, input().split())
pot = []
for i in range(n):
pot.append(list(map(int, input().split())))
q = collections.deque([])
visited = [[0]*m for i in range(n)]
# q =[]
for i in range(n):
for j in range(m):
if(pot[i][j] == 1):
q.append(i)
q.append(j)
visited[i][j] = 1
elif(pot[i][j] == -1):
visited[i][j] = -1
a = bfs(n,m)
res = 0
s = 0
for i in a:
if(s < max(i)):
s = max(i)
if(s == 1):
res = 0
else:
res = s -1
for i in a:
if(0 in i):
res = -1
print(res)
|
[
"ehdtlr9376@naver.com"
] |
ehdtlr9376@naver.com
|
aff6901869ffbfb81b0fc9252912176484570d69
|
a9267d6bb30cf626fc5311f3ea3b51604e4f2f40
|
/bixi_import/bixi_import.py
|
9f26d4be16f73cabf2f9ea0e75fb9141eda2b15b
|
[] |
no_license
|
AndresUrregoAngel/Python
|
df1cefd18382f4458586fc17bd8ca2ef334f3db9
|
47c9fafb7bbce5db882e771c80ab90245c73cefd
|
refs/heads/master
| 2021-07-21T00:21:12.700393
| 2019-08-07T23:22:54
| 2019-08-07T23:22:54
| 102,917,152
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 786
|
py
|
import requests
import datetime
import json
'''
def lambda_handler(event, context):
#print("Received event: " + json.dumps(event, indent=2))
print("value1 = " + event['key1'])
print("value2 = " + event['key2'])
print("value3 = " + event['key3'])
return event['key1'] # Echo back the first key value
#raise Exception('Something went wrong')
'''
def lambda_handler(event, context):
file = 'website-here'
result = requests.get(file)
data = result.json()
now = datetime.datetime.now()
hour = now.hour
day = now.day
month = now.month
mins = now.minute
with open('Bixi_%s_%s_%s-%s.json' % (month,day,hour,mins) , 'w') as outfile:
json.dump(data, outfile)
return print("download succeed")
lambda_handler('n','n')
|
[
"ingenieroandresangel@gmail.com"
] |
ingenieroandresangel@gmail.com
|
c2d0a6384e9d3292575cb6d5553dc3245695a38c
|
6a2bbf187700b40e94a3296a083ebcf72b4b6400
|
/runCFile.py
|
fcd9ff44fd217908db5f547eddf70ee1fdba6d9a
|
[] |
no_license
|
JusperLee/Accelerator
|
0a2f5cbb05b9bce74a1558ee71f4fc8ea3d4820d
|
036bfb28b2f290be5581616aab89ccb25f48224f
|
refs/heads/master
| 2020-04-13T19:37:50.522003
| 2018-12-28T12:33:21
| 2018-12-28T12:33:21
| 163,408,442
| 7
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 477
|
py
|
# coding:GBK
import os
def function1(n):
s = ""
for i in range(10):
mystr = os.popen(r".\bing.exe")
str1 = mystr.read()
str1=str(str1)
s_list=str1.split(" ")
print(s_list)
if i != 9:
start = str1.find("Time")
s = s + s_list[2]+"\n"
if i == 9:
start = str1.find("Time")
s = s + s_list[2]
ff = open("time"+str(n)+".txt", 'w')
ff.write(s)
ff.close()
|
[
"likai614020758@126.com"
] |
likai614020758@126.com
|
d96d68a7a4db92296593d1bedb049cd728c7abf7
|
69dee5cfda2e9ae20745f6f3f588a007e7321a52
|
/blog/urls.py
|
a1af804e6596799bb8ea6c72115c07192660989b
|
[] |
no_license
|
PhilBug/django-blog
|
0bb134529b33eb0bd79c3d4925bd047e1f7d01dc
|
1c92fc2efa3058a8221120e810b38889e705e96e
|
refs/heads/master
| 2023-02-19T22:44:06.764192
| 2021-01-26T01:18:31
| 2021-01-26T01:18:31
| 332,064,010
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 375
|
py
|
from django.conf.urls import include
from django.contrib import admin
from django.urls import path
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('', include('pages.urls')),
path('posts/', include('posts.urls')),
path('admin/', admin.site.urls),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
[
"fbugaj@match-trade.com"
] |
fbugaj@match-trade.com
|
76820a662c6161e2368acd522923d6b0fdef9ade
|
a16cd1cfc9ec3b0f0008cadd633b8cb0e82dcd44
|
/Quad2d.py
|
6e0edb782b0e00b7241b392fe31648be419172f8
|
[] |
no_license
|
rprospero/PAPA-Control
|
59cbbcbfb9c9d8d72d13d431f21550add197f949
|
1e8baa472d778a2b85ad75415b9de84e35bece2f
|
refs/heads/master
| 2021-01-01T04:25:50.310931
| 2012-07-27T14:57:43
| 2012-07-27T14:57:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,109
|
py
|
from reader import PelFile
from monfile import MonFile
import matplotlib.pyplot as plt
import Combiner
import numpy as np
from optparse import OptionParser
from QuadReduction import getf
basedir = "C:/Documents and Settings/sesaadmin/My Documents/Neutron Data/"
def raw(run,name,start=50,end=100,size=512):
p = PelFile(basedir+"%04i/" % run + name+".pel")
mon = MonFile(basedir+"%04i/" % run + name+
".pel.txt",False)
val = np.sum(p.make3d(size)[:,:,start:end],axis=2)
print val.shape
spectrum_total = np.sum(mon.spec)
return val/spectrum_total,np.sqrt(val)/spectrum_total
def clean_data(out):
out[np.isnan(out)]=0
out[np.isposinf(out)]=1000
out[np.isneginf(out)]=-1000
if __name__=='__main__':
parser = OptionParser()
choices = {None:None,"flipper":0,"guides":1,
"phase":2,"sample":3,"1":4,"2":5,
"3":6,"4":7,"5":8,"6":9,"7":10,"8":11}
parser.add_option("--mon",action="store",
type="float",
help="Minimum monitor value",
default=8)
parser.add_option("--vmin",action="store",
type="float",
help="Minimum efficiency value",
default=0)
parser.add_option("--cutoff",action="store",
type="float",
help="Minimum count rate",
default=1e-6)
parser.add_option("--plot",action="store_true")
parser.add_option("--save",action="store",
type="string",
help="File to save data")
parser.add_option("--start",action="store",
type="int",help="Beginning wavelength in 1/10 Angstrom units (e.g. 57 = 5.7 Angstoms). The default value is 50",default=50)
parser.add_option("--stop",action="store",
type="int",help="Ending wavelength in 1/10 Angstrom units (e.g. 57 = 5.7 Angstoms). The default value is 100",default=100)
parser.add_option("--display",action="store",
type="choice",
choices=["cryo","solenoid",
"instrument"],
default="cryo",
help="Whether to plot the " +
"efficiency of the cryo flipper,"+
"the solenoid"+
" flipper, or the instrument")
parser.add_option("--size",action="store",type="int",default=512,
help="Pixel resolution for rebinning. Defaults to native 512x512 image.")
(options,runs) = parser.parse_args()
runs = [int(x) for x in runs]
start=options.start
stop=options.stop
(w,dw) = raw(runs[-1],"w_new",start,stop,options.size)
data= getf((w,dw),
raw(runs[-1],"x_new",start,stop,options.size),
raw(runs[-1],"y_new",start,stop,options.size),
raw(runs[-1],"z_new",start,stop,options.size))
f,df,f1,df1,papb,dpapb,n,dn = data
if options.plot:
if options.display=="cryo":
plotdata=f
if options.display=="solenoid":
plotdata=f1
if options.display=="instrument":
plotdata=papb
plotdata[w<options.cutoff] = 0
plt.spectral()
#plt.gray()
#plt.spring()
plt.imshow(plotdata,vmin=options.vmin,vmax=1)
plt.show()
if options.save:
with open(options.save,"w") as outfile:
outfile.write(
"x\ty\tcryo\tcryoerr\tsolenoid\t"+
"solenoiderr\tinstrument\t"+
"instrumenterr\tintensity\n")
[clean_data(x) for x in (f,df,f1,df1,papb,dpapb,w)]
for x in range(512):
for y in range(512):
outfile.write(
("%i\t%i\t%e\t%e\t%e\t%e\t"+
"%e\t%e\t%e\n")%
(x,y,f[x,y],df[x,y],
f1[x,y],df1[x,y],
papb[x,y],dpapb[x,y],
w[x,y]))
|
[
"adlwashi@indiana.edu"
] |
adlwashi@indiana.edu
|
4955a3ad84b413d2bc9da658fc39c7bb9d546863
|
32d39e0729d4a16b65e2550c84aa6b37b228ca08
|
/remote_tx2/remote_catkin_ws_src/src/start_ros_serial/scripts/start_serial.py
|
f713b7f5c4e7d1e77371e26af1fdde26b0693c9a
|
[] |
no_license
|
BishrutSubedi/DNC_LAB_ACDA
|
b6a9fa82c87010929e478186df8c2f72de9c3f88
|
ad86cad62586b69b11c9432b1879a88241a12bc8
|
refs/heads/master
| 2022-04-18T12:28:18.386526
| 2020-03-09T21:42:36
| 2020-03-09T21:42:36
| 241,815,884
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 508
|
py
|
#!/usr/bin/env python
import rospy
import os
import sys
from std_msgs.msg import String
global i
i=1
def callback(data):
global i
if i==1:
os.system("roslaunch /home/nvidia/catkin_ws/devel/sshnodes/remote_serial.launch")
rospy.loginfo(rospy.get_caller_id() + "I heard %s, &d", data.data)
i=i+1
def listener():
rospy.init_node('remote_serial', anonymous=False)
rospy.Subscriber("remote_ros_serial", String, callback)
rospy.spin()
if __name__ == '__main__':
listener()
|
[
"bishrut.subedi@mavs.uta.edu"
] |
bishrut.subedi@mavs.uta.edu
|
a4cd0950566d7dfff895e27838a57325bbe29f5a
|
bd8fe48d13ac36cffe19865b7e2c4252d814e47e
|
/hw2/code/HW2_test.py
|
b66b485f40529b39c0dbb8804c66a2bb57e2efce
|
[] |
no_license
|
alvinsunyixiao/ece551-fa20
|
904e5ded28fca223cc5d21c6bdc60e9b037bd7ba
|
8b6e59b0f5f7cac7e3079a3446e241d6bd0ce6a9
|
refs/heads/master
| 2023-01-27T11:31:52.633656
| 2020-12-12T02:53:37
| 2020-12-12T02:53:37
| 294,875,748
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,755
|
py
|
import numpy as np
from HW2 import CGS, MGS
import unittest
# Enables testing window functions and convolution
# Overrides the later fine-tuned controls
# Test Classical Gram-Schmidt
TEST_CGS = True
# Test Modified Gram-Schmidt
TEST_MGS= True
class OrthonormalTest_CGS(unittest.TestCase):
""" Spot Checks for CGS implementations
Tests are randomized """
def test_orthogonal(self):
a = 10*np.random.randn(10,10)
ortho = CGS(a)
test_mat = np.matmul(ortho,np.transpose(ortho))
for i in range(10):
test_mat[i,i] = 0
np.testing.assert_allclose(test_mat, np.zeros((10,10)), atol=1e-6)
def test_normal(self):
a = 10*np.random.randn(10,10)
ortho = CGS(a)
length_list = [np.linalg.norm(ortho[i,:]) for i in range(10)]
np.testing.assert_allclose(length_list, np.ones(10), atol=1e-6)
class OrthonormalTest_MGS(unittest.TestCase):
""" Spot Checks for MGS implementations """
def test_orthogonal(self):
a = 10*np.random.randn(10,10)
ortho = MGS(a)
test_mat = np.matmul(ortho,np.transpose(ortho))
for i in range(10):
test_mat[i,i] = 0
np.testing.assert_allclose(test_mat, np.zeros((10,10)), atol=1e-6)
def test_normal(self):
a = 10*np.random.randn(10,10)
ortho = MGS(a)
length_list = [np.linalg.norm(ortho[i,:]) for i in range(10)]
np.testing.assert_allclose(length_list, np.ones(10), atol=1e-6)
# Convolution Tests
tests = unittest.TestSuite()
if TEST_CGS:
tests.addTest(unittest.makeSuite(OrthonormalTest_CGS))
if TEST_MGS:
tests.addTest(unittest.makeSuite(OrthonormalTest_MGS))
runner=unittest.TextTestRunner(verbosity=2)
runner.run(tests)
|
[
"alvinsunyixiao@gmail.com"
] |
alvinsunyixiao@gmail.com
|
2751322dd0b9dc7698eed0d0be2c08eef3eefb56
|
52632757620e5d3f4577e9c8c92a99bcdfa1c058
|
/MiV400Automation/lib/WebUIComponent.py
|
db37439c398e44b5ef943a899901a8a46c8be85f
|
[] |
no_license
|
vijayp530/MiV400Automation
|
5dbf96ae566135fe30ee732bb12b283b6df581e9
|
00b4f2eb7d888f23f99ae04ed27308e1bef62da0
|
refs/heads/main
| 2023-05-01T09:44:48.997067
| 2021-05-10T13:07:32
| 2021-05-10T13:07:32
| 366,044,075
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 65,607
|
py
|
import sys
import os
import time
import yaml
import csv
import logging
import pysftp
# framework_path = os.path.join(os.path.dirname((os.path.dirname(os.path.dirname(__file__)))), "Framework")
# sys.path.append(os.path.join(framework_path, "phone_wrappers"))
# from PhoneInterface import PhoneInterface
from robot.api.logger import console
from robot.api import logger
from selenium import webdriver
# from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
# from Var import *
from selenium.common.exceptions import *
# from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.select import *
# from selenium.webdriver.common.action_chains import ActionChains
# from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.remote.remote_connection import LOGGER as seleniumLogger
from urllib3.connectionpool import log as urllibLogger
seleniumLogger.setLevel(logging.WARNING)
urllibLogger.setLevel(logging.WARNING)
class WebUIComponent(object):
ROBOT_LIBRARY_SCOPE = 'GLOBAL'
driver = 'GLOBAL'
yaml_path = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Variables/WebUIDetails.yaml")
yaml_file = yaml.load(open(yaml_path), Loader=yaml.FullLoader)
webUIDetails = {}
for k, v in yaml_file.items():
if isinstance(v, dict):
for k1, v1 in v.items():
webUIDetails[k1] = v1
else:
webUIDetails[k] = v
# console(webUIDetails)
def browser(self):
"""
Browser Handler for execution.
:return: None
:created by: Ramkumar. G
:creation date:
:last update by: Ramkumar. G
:last update date: 04/09/2020
"""
# check chrome driver version with chrome if you are facing some issue in opening chrome
chromepath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Browser_driver/chromedriver.exe")
# edgepath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Browser_driver/MicrosoftWebDriver.exe")
downloadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads")
options = webdriver.ChromeOptions()
# options = webdriver.Edge()
# This "prefs" parameter is used for downloading files in desired folder
prefs = {
"download.default_directory": downloadPath,
"download.prompt_for_download": False,
"download.directory_upgrade": True,
"download.extensions_to_open": 'cfg',
"safebrowsing.enabled": 'false'
}
# options.desired_capabilities('prefs', prefs)
options.add_experimental_option('prefs', prefs)
options.add_argument('--disable-download-notification')
options.add_argument('--ignore-certificate-errors')
options.add_experimental_option('excludeSwitches', ['enable-logging'])
self.driver = webdriver.Chrome(chromepath, chrome_options=options)
# self.driver = webdriver.Edge(edgepath, prefs)
def loginWebUI(self, **kwargs):
"""
This method is used to login to the WebUI of the phone.
Keyword Args:
:phone: PhoneComponent Object of the phone to login to.
:UserName: Username to be used for login (Optional)
:password: Password to be used for login (Optional)
:secured: Login securely (using https) (Optional)
:return: None
:created by: Ramkumar G.
:creation date:
:last update by: Sharma
:last update date: 07/09/2020
"""
try:
phone = kwargs['phone']
userName = kwargs.get('UserName', self.yaml_file['userid'])
accessCode = kwargs.get('password', self.yaml_file['passcode'])
secured = bool(kwargs.get("secured", False))
ip = phone.phone_obj.phone_obj.phone.ipAddress
# console("IPADDRESS: " + ip)
# ip = phone
if secured:
if userName == 'user':
url = str("https://" + str(userName) + "@" + ip + "/")
console(url)
else:
url = str("https://" + str(userName) + ":" + str(accessCode) + "@" + ip + "/")
# self.browser()
# self.driver.maximize_window()
# time.sleep(1)
# self.driver.get(url)
# self.driver.find_element_by_xpath('//*[@id="details-button"]').click()
# self.driver.find_element_by_xpath('//*[@id="proceed-link"]').click()
# try:
# WebDriverWait(self.driver, 5).until(EC.alert_is_present(),
# 'Timed out waiting for PA creation ' +
# 'confirmation popup to appear.')
# loginPopup = self.driver.switch_to.active_element
# console("Text of Login Popup")
# console(loginPopup.text)
# except TimeoutException:
# print("no alert")
else:
# print str("http://" + str(userName) + ":" + str(accessCode) + "@" + ip + "/")
if userName == 'user':
url = str("http://" + str(userName) + ":" + "@" + ip + "/")
else:
url = str("http://" + str(userName) + ":" + str(accessCode) + "@" + ip + "/")
self.browser()
self.driver.maximize_window()
time.sleep(1)
self.driver.get(url)
time.sleep(5)
except SessionNotCreatedException as e:
version = str(e)[str(e).rfind(' ')+1:]
raise Exception("The supported Chrome Version is " + version + ". If you are not using this version, "
+ "read README.txt from the ProjectFolder/lib/ directory.")
except Exception as e:
console("LOGIN EXCEPT")
raise e
def goToSection(self, **kwargs):
"""
:param:
:option: Option to navigate to (xpath)
:return: None
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
try:
# console(kwargs)
options = str(kwargs['option'])
# value = kwargs.get('value', '')
self.click(options)
time.sleep(2)
# if value:
# if type(value) is dict:
# console("in value")
# for k, v in value.items():
# value = v
# self.click(value)
# time.sleep(5)
except:
self.LogOff()
raise Exception
def LogOff(self, deleteConfig=True):
"""
This method is used to log off the Phone's Web UI portal.
:return: None
:created by: Ramkumar G.
:creation date:
:last update by: Vikhyat Sharma
:last update date: 20/11/2020
"""
try:
logger.info("Logging off the Web UI of phone")
console("Logging off phone")
import shutil
self.driver.find_element(By.XPATH, '//*[@id="header"]//ul//a').click()
time.sleep(3)
self.driver.quit()
if deleteConfig:
downloadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads")
isDir = os.path.isdir(downloadPath)
if isDir:
shutil.rmtree(downloadPath)
except NoSuchElementException as e:
logger.error("----ELEMENT NOT FOUND----")
raise Exception(e)
except (TimeoutException, ElementNotVisibleException) as e:
console(e)
except WebDriverException:
logger.warn("Web Browser window is already closed!!")
except AttributeError:
logger.warn("Web Browser window is already closed!!")
# except ElementNotVisibleException as e:
# logger.error(e)
# finally:
# self.driver.quit()
def isChecked(self, **kwargs):
"""
Below method is used to check if the passed element is checked/selected or not.
:param:
:kwargs:
:option: Option on the webpage to check
:value: Condition to check i.e., Checked/Selected/Unchecked/Unselected
:return: None
:created by: Vikhyat Sharma
:creation date:
:last update by:
:last update date:
"""
try:
option = str(kwargs['option'])
value = kwargs['value']
checked = self.driver.find_element(By.XPATH, option).is_selected()
console(checked)
if value == "selected" or value == "Checked":
if not checked:
self.LogOff()
raise Exception("CheckBox for " + self.yaml_file.get(option) + " is not selected.")
else:
logger.info("The checkbox is selected.", also_console=True)
elif value == "unselected" or value == "Unchecked":
if checked:
self.LogOff()
raise Exception("CheckBox is selected.")
else:
logger.info("The checkbox is not selected.", also_console=True)
except NoSuchElementException as e:
self.LogOff()
raise NoSuchElementException(e)
def click(self, args):
"""
This method is used to click on the specified option.
:param:
:args: Option to click (xpath)
:return: None
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
# try:
console("Clicking option")
self.driver.find_element(By.XPATH, args).click()
time.sleep(3)
# except:
# console("CLICK EXCEPT")
# self.LogOff()
def verifyFileDownloaded(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
# try:
filename = str(kwargs['fileName'])
console('FileName:' + filename)
downloadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads")
time.sleep(15)
isDir = os.path.isdir(downloadPath)
if isDir:
files = (os.listdir(downloadPath))
else:
console("Download folder is not available")
raise Exception
# console("Files stored: ")
# console(files)
# console(downloadPath + '/' + filename)
if filename in files:
# console("File Size: " + str(os.stat(downloadPath + '/' + filename).st_size))
if os.stat(downloadPath + '/' + filename).st_size > 0:
logger.info(filename + " File is present in the Downloads Folder", also_console=True)
else:
self.LogOff()
raise Exception("File not Found")
def selectOption(self, **kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
try:
from selenium.webdriver.support.ui import Select
givenOption = kwargs['option']
value = kwargs['value']
option = Select(self.driver.find_element(By.XPATH, givenOption))
option.select_by_visible_text(value)
except Exception:
self.LogOff()
# def createFile(self, **kwargs):
# """
#
# :param kwargs:
# :return:
# :created by: Ramkumar G.
# :creation date:
# :last update by:
# :last update date:
# """
# try:
# downloadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads")
# phone = str(kwargs['phone'].phone_obj.phone_obj.phone.extensionNumber)
# filename = str(kwargs['fileName'])
# time.sleep(10)
# if not os.path.exists(downloadPath):
# os.mkdir(os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads/"))
# console("Directory Created ")
# else:
# console("Directory already exists")
#
# csvFilePath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads/" + filename)
# with open(csvFilePath, 'w+') as csvfile:
# csvwriter = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
# rows = ['A', '', '', '','', '','', '','', '','', '','', '','', '','','1','1','1',phone,'-1','V2']
# csvwriter.writerow(rows)
# except:
# self.LogOff()
# raise Exception
def uploadFile(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
try:
filename = str(kwargs['fileName'])
uploadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads/" + filename)
self.driver.send_keys(uploadPath)
self.driver.send_keys(Keys.RETURN)
except:
self.LogOff()
raise Exception
def clearLineEntries(self, **kwargs):
"""
:param kwargs:
:return:
:created by: Vikhyat Sharma
:creation date:
:last update by:
:last update date:
"""
line = kwargs['line']
deleteLimit = kwargs['deleteLimit']
if line == 'all':
# while row < 20:
screen_name = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[2]/td[2]/input')
screen_name.click()
screen_name.clear()
time.sleep(1)
screen_name2 = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[3]/td[2]/input')
screen_name2.click()
screen_name2.clear()
time.sleep(1)
phoneNumber = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[4]/td[2]/input')
phoneNumber.click()
phoneNumber.clear()
time.sleep(1)
callerID = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[5]/td[2]/input')
callerID.click()
callerID.clear()
time.sleep(1)
authName = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[6]/td[2]/input')
authName.click()
authName.clear()
time.sleep(1)
password = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[7]/td[2]/input')
password.click()
password.clear()
time.sleep(1)
if deleteLimit == 'Fully':
proxyServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
time.sleep(1)
else:
proxyServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
proxyServer.send_keys('0.0.0.0')
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
proxyPort.send_keys('0')
time.sleep(1)
backupProxyServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[15]/td[2]/input')
backupProxyServer.click()
backupProxyServer.clear()
backupProxyServer.send_keys('0.0.0.0')
backupProxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[16]/td[2]/input')
backupProxyPort.click()
backupProxyPort.clear()
backupProxyPort.send_keys('0')
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
outProxy.send_keys('0.0.0.0')
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
outProxyPort.send_keys('0')
time.sleep(1)
backupOutboundProxyServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[19]/td[2]/input')
backupOutboundProxyServer.click()
backupOutboundProxyServer.clear()
backupOutboundProxyServer.send_keys('0.0.0.0')
time.sleep(1)
backupOutboundProxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[20]/td[2]/input')
backupOutboundProxyPort.click()
backupOutboundProxyPort.clear()
backupOutboundProxyPort.send_keys('0')
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
registrarServer.send_keys('0.0.0.0')
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
registrarPort.send_keys('0')
time.sleep(1)
backupRegistrarServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[23]/td[2]/input')
backupRegistrarServer.click()
backupRegistrarServer.clear()
backupRegistrarServer.send_keys('0.0.0.0')
time.sleep(1)
backupRegistrarPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[24]/td[2]/input')
backupRegistrarPort.click()
backupRegistrarPort.clear()
backupRegistrarPort.send_keys('0')
time.sleep(1)
# saving the settings
self.driver.find_element_by_xpath('//*[@id="globalSIPform"]/p/input').click()
else:
screen_name = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[2]/td[2]/input')
screen_name.click()
screen_name.clear()
time.sleep(1)
screen_name2 = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[3]/td[2]/input')
screen_name2.click()
screen_name2.clear()
time.sleep(1)
phoneNumber = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[4]/td[2]/input')
phoneNumber.click()
phoneNumber.clear()
time.sleep(1)
callerID = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[5]/td[2]/input')
callerID.click()
callerID.clear()
time.sleep(1)
authName = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[6]/td[2]/input')
authName.click()
authName.clear()
time.sleep(1)
password = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[7]/td[2]/input')
password.click()
password.clear()
time.sleep(1)
if deleteLimit == 'Fully':
proxyServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
time.sleep(1)
else:
proxyServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
proxyServer.send_keys('0.0.0.0')
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
proxyPort.send_keys('0')
time.sleep(1)
backupProxyServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[15]/td[2]/input')
backupProxyServer.click()
backupProxyServer.clear()
backupProxyServer.send_keys('0.0.0.0')
backupProxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[16]/td[2]/input')
backupProxyPort.click()
backupProxyPort.clear()
backupProxyPort.send_keys('0')
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
outProxy.send_keys('0.0.0.0')
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
outProxyPort.send_keys('0')
time.sleep(1)
backupOutboundProxyServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[19]/td[2]/input')
backupOutboundProxyServer.click()
backupOutboundProxyServer.clear()
backupOutboundProxyServer.send_keys('0.0.0.0')
time.sleep(1)
backupOutboundProxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[20]/td[2]/input')
backupOutboundProxyPort.click()
backupOutboundProxyPort.clear()
backupOutboundProxyPort.send_keys('0')
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
registrarServer.send_keys('0.0.0.0')
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
registrarPort.send_keys('0')
time.sleep(1)
backupRegistrarServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[23]/td[2]/input')
backupRegistrarServer.click()
backupRegistrarServer.clear()
backupRegistrarServer.send_keys('0.0.0.0')
time.sleep(1)
backupRegistrarPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[24]/td[2]/input')
backupRegistrarPort.click()
backupRegistrarPort.clear()
backupRegistrarPort.send_keys('0')
time.sleep(1)
# saving the settings
self.driver.find_element_by_xpath('//*[@id="sipLineSettingsForm"]/p/input').click()
def unRegisterPhone(self, **kwargs):
"""
This method is used to unregister the phone using its Web UI.
This method needs the Web UI to be login in a new browser window beforehand.
:param:
:linesToUnregister: The line to unregister i.e., 1 - 24
:deleteLimit: Fully or Partially
Fully will delete the SIP network settings along with the authentication settings
but Partially will change network settings to default ones.
:phone: Phone to unregister
:return: None
:created by: Sharma
:creation date:
:last update by:
:last update date:
"""
line = str(kwargs['lineToUnregister'])
deleteLimit = kwargs.get('deleteLimit', 'Fully')
phone = kwargs['phone'].phone_obj.phone_obj.phone.extensionNumber
logger.info("Unregistered Line: <b>" + line + "</b> using the Web UI of extension: " + phone, html=True)
console("Unregistered Line: " + line + " using the Web UI of extension: " + phone)
try:
if line == 'all' or line == 'All':
globalSIP = self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[2]/a')
globalSIP.click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
logger.info("Unregistering global SIP entries on " + phone)
elif line == '1':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[3]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '2':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[4]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '3':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[5]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '4':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[6]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '5':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[7]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '6':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[8]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '7':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[9]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '8':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[10]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
elif line == '9':
self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[11]/a').click()
self.clearLineEntries(line=line, deleteLimit=deleteLimit)
logger.info("Unregistered Line: <b>" + line + "</b> using the Web UI of extension: " + phone)
console("Unregistered Line: " + line + " line using the Web UI of extension: " + phone)
except Exception as e:
self.LogOff()
raise Exception('Could not unregister ' + line + 'line on extension: ' + phone + '. \nDETAILS: ' + e)
def fillLineEntries(self, **kwargs):
"""
Below method is used to fill the entries in the registration page of the phone.
Keyword Args:
linesToRegister: Line to Register
phoneToEnter: Phone whose details should be entered
:return:
:created by: Vikhyat Sharma
:creation date:
:last update by:
:last update date:
"""
line = kwargs['linesToRegister']
phone = kwargs['phoneToEnter']
number = phone.phone_obj.phone_obj.phone.extensionNumber
# number = '4165142501'
pbxUsed = self.yaml_file.get((kwargs['pbx']) + 'IP')
# pbx = '10.112.123.89'
if line == 'all' or line == 'All':
screen_name = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[2]/td[2]/input')
screen_name.click()
screen_name.clear()
time.sleep(1)
screen_name.send_keys(number)
time.sleep(1)
screen_name2 = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[3]/td[2]/input')
screen_name2.click()
screen_name2.clear()
time.sleep(1)
screen_name2.send_keys(number)
time.sleep(1)
phoneNumber = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[4]/td[2]/input')
phoneNumber.click()
phoneNumber.clear()
time.sleep(1)
phoneNumber.send_keys(number)
time.sleep(1)
callerID = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[5]/td[2]/input')
callerID.click()
callerID.clear()
time.sleep(1)
callerID.send_keys(number)
time.sleep(1)
authName = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[6]/td[2]/input')
authName.click()
authName.clear()
time.sleep(1)
authName.send_keys(number)
time.sleep(1)
password = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[7]/td[2]/input')
password.click()
password.clear()
time.sleep(1)
password.send_keys(number)
time.sleep(1)
proxyServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
time.sleep(1)
proxyServer.send_keys(pbxUsed)
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
time.sleep(1)
proxyPort.send_keys('5060')
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
time.sleep(1)
outProxy.send_keys(pbxUsed)
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
time.sleep(1)
outProxyPort.send_keys('5060')
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
time.sleep(1)
registrarServer.send_keys(pbxUsed)
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="globalSIPform"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
time.sleep(1)
registrarPort.send_keys('5060')
time.sleep(1)
# saving the settings
self.driver.find_element_by_xpath('//*[@id="globalSIPform"]/p/input').click()
else:
screen_name = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[2]/td[2]/input')
screen_name.click()
screen_name.clear()
time.sleep(1)
screen_name.send_keys(number)
time.sleep(1)
screen_name2 = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[3]/td[2]/input')
screen_name2.click()
screen_name2.clear()
time.sleep(1)
screen_name2.send_keys(number)
time.sleep(1)
phoneNumber = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[4]/td[2]/input')
phoneNumber.click()
phoneNumber.clear()
time.sleep(1)
phoneNumber.send_keys(number)
time.sleep(1)
callerID = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[5]/td[2]/input')
callerID.click()
callerID.clear()
time.sleep(1)
callerID.send_keys(number)
time.sleep(1)
authName = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[6]/td[2]/input')
authName.click()
authName.clear()
time.sleep(1)
authName.send_keys(number)
time.sleep(1)
password = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[7]/td[2]/input')
password.click()
password.clear()
time.sleep(1)
password.send_keys(number)
time.sleep(1)
proxyServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[13]/td[2]/input')
proxyServer.click()
proxyServer.clear()
time.sleep(1)
proxyServer.send_keys(pbxUsed)
time.sleep(1)
proxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[14]/td[2]/input')
proxyPort.click()
proxyPort.clear()
time.sleep(1)
proxyPort.send_keys('5060')
time.sleep(1)
outProxy = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[17]/td[2]/input')
outProxy.click()
outProxy.clear()
time.sleep(1)
outProxy.send_keys(pbxUsed)
time.sleep(1)
outProxyPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[18]/td[2]/input')
outProxyPort.click()
outProxyPort.clear()
time.sleep(1)
outProxyPort.send_keys('5060')
time.sleep(1)
registrarServer = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[21]/td[2]/input')
registrarServer.click()
registrarServer.clear()
time.sleep(1)
registrarServer.send_keys(pbxUsed)
time.sleep(1)
registrarPort = self.driver.find_element_by_xpath('//*[@id="mainTable"]//tr[22]/td[2]/input')
registrarPort.click()
registrarPort.clear()
time.sleep(1)
registrarPort.send_keys('5060')
time.sleep(1)
# saving the settings
self.driver.find_element_by_xpath('//*[@id="sipLineSettingsForm"]/p/input').click()
def registerPhone(self, **kwargs):
"""
This method is used to register phones' lines with an extension of same number or other number.
Keyword Args:
phoneToOpen: Phone To Edit
phoneToEnter: Phone with which the line will be registered
lineToRegister: Line to Register i.e., 1, 2, or 3 or 4-9.
:return: None
:created by: Vikhyat Sharma
:creation date:
:last update by: Ramkumar G.
:last update date: 10/06/2020
"""
lineToRegister = str(kwargs['linesToRegister'])
phone = kwargs['phoneToOpen']
phoneToEnter = kwargs['phoneToEnter']
try:
if lineToRegister == 'all':
logger.info("Registering all the lines on the extension "
+ phone.phone_obj.phone_obj.phone.extensionNumber + " with IP: "
+ phone.phone_obj.phone_obj.phone.ipAddress + " with extension: "
+ phoneToEnter.phone_obj.phone_obj.phone.extensionNumber)
console("Registering all the lines on the extension "
+ phone.phone_obj.phone_obj.phone.extensionNumber + " with IP: "
+ phone.phone_obj.phone_obj.phone.ipAddress + " with extension: "
+ phoneToEnter.phone_obj.phone_obj.phone.extensionNumber)
globalSIP = self.driver.find_element_by_xpath('//*[@id="sidebar"]/ul[4]/li[2]/a')
globalSIP.click()
self.fillLineEntries(**kwargs)
elif "-" in lineToRegister:
line = int(lineToRegister[-1])
for i in range(1, int(line) + 1):
lines = i + 2
xpath = '//*[@id="sidebar"]/ul[4]/li[' + str(lines) + ']/a'
self.driver.find_element_by_xpath(xpath).click()
self.fillLineEntries(**kwargs)
else:
logger.info("Registering Line: " + lineToRegister + " on the extension "
+ phone.phone_obj.phone_obj.phone.extensionNumber + " with IP: "
+ phone.phone_obj.phone_obj.phone.ipAddress + " with extension: "
+ phoneToEnter.phone_obj.phone_obj.phone.extensionNumber)
console("Registering Line: " + lineToRegister + " on the extension "
+ phone.phone_obj.phone_obj.phone.extensionNumber + " with IP: "
+ phone.phone_obj.phone_obj.phone.ipAddress + " with extension: "
+ phoneToEnter.phone_obj.phone_obj.phone.extensionNumber)
line = int(lineToRegister[-1]) + 2
xpath = '//*[@id="sidebar"]/ul[4]/li[' + str(line) + ']/a'
self.driver.find_element_by_xpath(xpath).click()
self.fillLineEntries(**kwargs)
logger.info("Registered Line: <b>" + lineToRegister + "</b> on extension: <b>"
+ phone.phone_obj.phone_obj.phone.extensionNumber + "</b>", html=True)
console("Registered Line: " + lineToRegister + " on extension: "
+ phone.phone_obj.phone_obj.phone.extensionNumber)
except Exception:
self.LogOff()
# raise Exception("Could not register Phone.")
def verifyRegisteredLine(self, **kwargs):
"""
Below method is used to verify the passed line is registered on the phone.
:param:
:kwargs:
:lineToVerify: Line to Verify for registration
:phone: Phone To Open
:return: None
:created by: Vikhyat Sharma
:creation date:
:last update by: Sharma
:last update date: 07/09/2020
"""
lineToVerify = kwargs['lineToVerify']
phone = kwargs['phone']
phoneNumber = str(kwargs['phone'].phone_obj.phone_obj.phone.extensionNumber)
# phoneNumber = phone
sysInfo = self.yaml_file.get('SystemInfoMenu')
logger.info("Verifying Line: <b>" + lineToVerify + "</b> is registered on Web UI of extension: <b>"
+ phoneNumber + "</b>", html=True)
console("Verifying Line: " + lineToVerify + " is registered on Web UI of extension: " + phoneNumber)
try:
statusRow = 21
if phone.phone_obj.phone_type == 'Mitel6930':
statusRow = 22
self.goToSection(option=sysInfo)
if lineToVerify.isdigit():
lineStatus = self.driver.find_element_by_xpath('//*[@id="content"]//tr['
+ str(statusRow + int(lineToVerify)) + ']/td[3]')
else:
raise Exception("INVALID ARGUMENT PASSED FOR 'lineToVerify'")
# if lineToVerify == '1':
# lineStatus = self.driver.find_element_by_xpath('//*[@id="content"]//tr[' + str(lineStatus+1) + ']/td[3]')
# elif lineToVerify == '2':
# lineStatus = self.driver.find_element_by_xpath('//*[@id="content"]//tr[24]/td[3]')
# elif lineToVerify == '3':
# lineStatus = self.driver.find_element_by_xpath('//*[@id="content"]//tr[25]/td[3]')
# else:
# raise Exception("INVALID ARGUMENT PASSED FOR 'lineToVerify'")
# console("----------------------LINE--------------------------")
# console(lineStatus)
# console(lineStatus.is_displayed())
# console(lineStatus.text)
if lineStatus.is_displayed() and lineStatus.text == 'Registered':
logger.info("Verified Line <b>" + lineToVerify + "</b> is registered on phone: <b>" + phoneNumber
+ "</b>", html=True)
console("Verified Line " + lineToVerify + " is registered on phone: " + phoneNumber)
else:
raise Exception("Could not verify line " + lineToVerify + " is registered on phone: " + phoneNumber
+ ". Got line status as '" + lineStatus.text + "'")
except ElementNotVisibleException:
logger.error("ELEMENT NOT INTRACTABLE")
self.LogOff()
raise Exception("ELEMENT NOT INTRACTABLE")
except NoSuchElementException:
self.LogOff()
raise Exception("ELEMENT NOT PRESENT")
except Exception as e:
self.LogOff()
raise Exception(e)
def findPageElement(self, xpath):
"""
:param xpath:
:return:
:created by: Vikhyat Sharma
:creation date:
:last update by:
:last update date:
"""
try:
if xpath == 'Forbidden Page Title':
if self.driver.title == '403 Forbidden':
logger.info("Phone Web UI is locked.")
else:
raise Exception("Phone Web UI is not locked.")
else:
present = self.driver.find_element_by_xpath(xpath).is_displayed()
if present:
logger.info("Element is present on the page.", also_console=True)
# console("Element is present on the page.")
else:
raise Exception("Element is not present on the page.")
except:
self.LogOff()
raise Exception("Exception occurred while finding element on the page.")
def verifyElementIsFound(self, element):
"""
This method is used to verify the presence of element on the Web UI of the phone.
:param:
:element: Text to verify or Element's xpath
:return: None
:created by: Sharma
:creation date:
:last update by:
:last update date:
"""
try:
if element.startswith("//*"):
if self.driver.find_element_by_xpath(element).is_displayed():
console("Element is present on the Web UI.")
else:
raise NoSuchElementException
else:
if element in self.driver.page_source:
console(element + " is present on the Page.")
else:
raise NoSuchElementException
except NoSuchElementException as e:
print(e.__class__)
self.LogOff()
raise Exception("Element is not present on the page !!!")
except Exception:
self.LogOff()
raise Exception("Could not verify the presence of the element !!")
def verifyElementNotFound(self, element):
"""
This method is used to verify the negative presence of the element on the Web UI of the phone.
:param:
:element: Text or Element to verify
:return: None
:created by: Vikhyat Sharma
:creation date:
:last update by:
:last update date:
"""
try:
if element == 'Forbidden Page Title':
console("Verifying Forbidden Page Title")
if self.driver.title == '403 Forbidden':
logger.error("Phone Web UI is locked.")
else:
logger.info("Phone Web UI is not locked.", also_console=True)
else:
console("Verifying Element is not present on the screen")
present = self.driver.find_element_by_xpath(element).is_displayed()
if not present:
logger.info("Element is not present on the page.", also_console=True)
else:
raise Exception
except NoSuchElementException:
logger.info("Element is not present on the screen.", also_console=True)
except WebDriverException:
self.LogOff()
raise Exception("Try to do something")
except Exception:
raise Exception("Could not verify the presence of the element on the Web UI.")
def clearKeys(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
try:
option = kwargs['option']
self.driver.find_element(By.XPATH, option).clear()
time.sleep(3)
except:
console("clearKeys EXCEPT")
self.LogOff()
def sendKeys(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
try:
option = kwargs['option']
value = str(kwargs['value'])
self.clearKeys(**kwargs)
self.driver.find_element(By.XPATH, option).send_keys(value)
time.sleep(3)
except:
console("clearSendKeys EXCEPT")
self.LogOff()
def verifyText(self, **kwargs):
"""
The method is used to verify the value passed against the option available on the Web UI of the phone.
Keyword Args:
:option: Option on the Web UI of the phone
:value: The value to verify against the option.
:return: None
:created by: Vikhyat Sharma
:created on:
:updated on: 03/02/2021
:updated by: Milind Patil
"""
try:
option = str(kwargs.get('option'))
value = str(kwargs['value'])
onPage = bool(kwargs.get('OnPage', False))
console("Verifying " + value + " on the phone WEB UI.")
time.sleep(1)
if onPage:
if value in self.driver.page_source:
logger.info("Value: " + value + " is present on the WEB UI.")
else:
raise Exception("The specified value: '" + value + "' could not be found on the phone WUI !!!")
else:
element = self.driver.find_element_by_xpath(option)
if element.tag_name == 'select':
console('Element is dropdown')
element = Select(self.driver.find_element_by_xpath(option))
text = element.first_selected_option.text
else:
text = self.driver.find_element(By.XPATH, option).text
if len(text) == 0:
text = str(self.driver.find_element(By.XPATH, option).get_attribute('value'))
if 'verifiedPhone' in kwargs:
phoneDetails = self.yaml_file.get('PhoneDetails')
for key, val in phoneDetails.items():
if value == key:
verifiedPhone = kwargs['verifiedPhone']
value = getattr(verifiedPhone.phone_obj.phone_obj.phone, val)
if key == "PhoneModel":
value = value[-4:]
break
if text == value:
logger.info("'<b>" + value + "</b>' is verified on the phone WEB UI.", html=True)
console("'" + value + "' is verified on the phone WEB UI.")
else:
raise Exception("The specified value: '" + value + "' could not be found on the phone WUI !!!. Got "
+ text + " instead.")
except NoSuchElementException as e:
self.LogOff()
raise Exception(e)
except ElementNotVisibleException as e:
self.LogOff()
raise Exception(e)
def sendFileToUpload(self, **kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by: Sharma
:last update date:
"""
console(kwargs)
try:
option = kwargs['option']
filename = kwargs['fileName']
filePath = 'configuration/' + filename
# actions = ActionChains(self.driver)
# elementLocator = self.driver.find_element(By.XPATH, option)
# actions.double_click(elementLocator).click().perform()
# time.sleep(3)
# directoryFilePath = r'..\configuration\directoryFile.csv'
directoryFilePath = os.path.join(os.path.dirname(os.path.dirname(__file__)), filePath)
# os.system(directoryFilePath)
time.sleep(5)
# if filename =="Directory"
self.driver.find_element_by_xpath(option).send_keys(directoryFilePath)
self.driver.find_element(By.XPATH, '//*[@id="content"]//tr[6]/td[2]/input').click()
time.sleep(10)
except Exception as e:
console("CLICK EXCEPT")
self.LogOff()
raise e
def closeWindow(self):
"""
Below method is used to close the opened browser windows.
:return: None
:created by: Vikhyat Sharma
:creation date: 16/03/2020
:last update by:
:last update date:
"""
self.driver.quit()
def serverConnection(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
console(kwargs)
hostname = kwargs["parameter"]['Hostname']
Username = kwargs["parameter"]['Username']
Password = kwargs["parameter"]['Password']
fileName = kwargs['fileName']
console(fileName)
remoteFilePath = self.yaml_file['remoteFilePath']+"/"+fileName
console(remoteFilePath)
localFilePath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads/" + fileName)
console(localFilePath)
sftp = pysftp.Connection(host=hostname, username=Username, password=Password)
directory_structure = sftp.listdir_attr()
sftp.cwd('/var/www/html/configuration/')
directory_structure = sftp.listdir_attr()
sftp.put(localFilePath, remoteFilePath)
for attr in directory_structure:
print ("file has moved")
def addParamtersToCfgfile(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
fileName = kwargs['fileName']
parameter = kwargs['parameter']
if not os.path.exists("C:/Robo_SVN_5.1/Desktop_Automation/Downloads/"):
os.makedirs("C:/Robo_SVN_5.1/Desktop_Automation/Downloads/")
write_file = open("C:/Robo_SVN_5.1/Desktop_Automation/Downloads/" +fileName, "wb")
write_file.write("$telnet enabled: Telnet for support1410!$\n")
write_file.write("sip register blocking: 0\n")
for k,v in parameter.items():
write_file.write(k + '\n')
write_file.write(v + '\n')
def delete_file(self,**kwargs):
"""
:param kwargs:
:return:
:created by: Ramkumar G.
:creation date:
:last update by:
:last update date:
"""
fileName = kwargs['fileName']
if os.path.exists("C:/Robo_SVN_5.1/Desktop_Automation/Downloads"+"/"+fileName):
os.remove("C:/Robo_SVN_5.1/Desktop_Automation/Downloads"+"/"+fileName)
def verifyOpenedURL(self, **kwargs):
"""
This method verifies the currently opened URL with the passed URL entry.
:param
:kwargs:
:url: URL to verify on the browser window
:return: None
:created by: Vikhyat Sharma
:creation date: 07/05/2020
:last update by:
:last update on:
"""
url = kwargs['url']
currentURL = self.driver.current_url
if url in currentURL:
logger.info("Correct URL is opened.")
else:
raise Exception("Incorrect URL opened.")
def capturePackets(self, **kwargs):
"""
This method can start/stop the packets capturing and download the captured file using phone's Web UI.
Keyword Args:
action: action to perform i.e., Start/Stop the capture or Download
phoneObj: PhoneComponent object of the phone
portNumber: PortNumber of the protocol for filtering or all for every protocol
timeout: Automatic Timeout period for the capture
:return: None
:created by: Vikhyat Sharma
:creation date: 11/05/2020
:last update by:
:last update date:
"""
# console(kwargs)
action = str(kwargs.get('action'))
phoneObj = kwargs.get('phoneObj')
captureOptions = self.yaml_file.get("CaptureLink")
self.goToSection(option=self.yaml_file.get("CaptureMenu"))
port = self.driver.find_element_by_xpath(captureOptions['Port'])
saveSettings = self.driver.find_element_by_xpath(captureOptions['SaveSetting'])
if action == 'Download':
pcapFilePath = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)),
'Downloads\\captureFile.pcap'))
convertedFilePath = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)),
'Downloads\\convertedFile.txt'))
downloadPath = os.path.join(os.path.dirname((os.path.dirname(__file__))), "Downloads")
if os.path.isdir(downloadPath):
if os.path.exists(pcapFilePath):
os.remove(pcapFilePath)
if os.path.exists(convertedFilePath):
os.remove(convertedFilePath)
self.driver.find_element_by_xpath(captureOptions['GetCapture']).click()
time.sleep(10)
logger.info("Successfully downloaded the capture file.", also_console=True)
else:
timeout = kwargs.get('timeout')
if timeout is not None:
self.driver.find_element_by_xpath(captureOptions['TimeOut']).clear()
self.driver.find_element_by_xpath(captureOptions['TimeOut']).send_keys(timeout)
portNumber = str(kwargs.get('portNumber', ''))
if portNumber == 'default':
portNumber = 5060
if portNumber == 'all':
port.click()
port.clear()
saveSettings.click()
time.sleep(3)
self.goToSection(option=self.yaml_file.get('CaptureMenu'))
else:
port.click()
port.clear()
time.sleep(2)
port.send_keys(portNumber)
startStopButton = self.driver.find_element_by_xpath(captureOptions['Start'])
if action == 'Start':
if startStopButton.text == 'Stop':
logger.warn("Capture is already going on IP: {}!! Stopping the current capture and "
"starting again.".format(phoneObj.phone_obj.phone_obj.phone.ipAddress))
startStopButton.click()
self.capturePackets(action='Start', phoneObj=phoneObj, portNumber=portNumber)
logger.info("Started the capture on extension: " + phoneObj.phone_obj.phone_obj.phone.extensionNumber)
elif action == 'Stop':
if startStopButton.text == 'Start':
raise Exception("Capture was not running on extension: {} with IP: {}".format(
phoneObj.phone_obj.phone_obj.phone.extensionNumber,
phoneObj.phone_obj.phone_obj.phone.ipAddress))
else:
logger.info('Stopped capture on extension: ' + phoneObj.phone_obj.phone_obj.phone.extensionNumber)
startStopButton.click()
def verifyElementIsClickable(self, **kwargs):
"""
This method is used to verify the passed element is clickable/enabled or not on the web page.
:param:
:kwargs:
:element: Element to verify on the web page.
:clickable: Verify enabled/not enabled i.e., True to verify element is enabled
and False to verify element is not enabled.
:return: None
:created by: Vikhyat Sharma
:creation date: 18/05/2020
:last update by:
:last update date:
"""
element = kwargs['element']
clickable = str(kwargs['clickable'])
elementToVerify = self.driver.find_element_by_xpath(element)
if clickable == 'True':
if elementToVerify.is_enabled():
logger.info("The " + self.webUIDetails.keys()[self.webUIDetails.values().index(element)] +
" element is enabled on the Web UI.", also_console=True)
else:
raise Exception("The " + self.webUIDetails.keys()[self.webUIDetails.values().index(element)] +
" element should be enabled but is not enabled.")
else:
if not elementToVerify.is_enabled():
logger.info("The " + self.webUIDetails.keys()[self.webUIDetails.values().index(element)] +
" element is not enabled on the Web UI.", also_console=True)
else:
raise Exception("The " + self.webUIDetails.keys()[self.webUIDetails.values().index(element)] +
" element should not be enabled, but is enabled.")
def verifyTextNotPresent(self, **kwargs):
"""
This method is used to verify the passed option is not present on the Web UI of the phone.
This method should be used if the option is not present anytime on the Web UI. If the option can be
made available or unavailable on the Web UI, then see method:
self.verifyElementIsFound or self.verifyElementNotFound
:param:
:kwargs:
:text: Text/Option to Verify on the display
:return: None
:created by: Sharma
:creation date: 14/08/2020
:last update by:
:last update date:
"""
textToVerify = kwargs['text']
if textToVerify not in self.driver.page_source:
logger.info("The option: " + textToVerify + " is not present on the Web UI.")
else:
raise Exception("The option: " + textToVerify + " is present on the Web UI.")
def getMACAddress(self, phone):
"""
This method is used to get the MAC Address of the phone using its WUI.
:param:
:phone: Phone of which MAC Address is needed (PhoneComponent Object)
:return: MAC Address of the phone (String)
:created by: Sharma
:creation date: 08/09/2020
:last updated by:
:last update date:
"""
try:
self.loginWebUI(phone=phone)
self.goToSection(option='//*[@id="sidebar"]/ul[1]/li[1]/a')
macAddress = self.driver.find_element_by_xpath('//*[@id="content"]//tr[10]/td[2]').text
self.LogOff(deleteConfig=False)
macAddress = macAddress.replace(':', '')
console(macAddress)
return macAddress
except NoSuchElementException as e:
self.LogOff()
raise Exception("Not able to find the MAC Address option on the Web UI.")
def getTextValue(self, element):
"""
This method is used to get Text value for the webelement
:param kwargs:
:return:
:created by: Milind Patil.
:creation date: 3/02/2021
:last update by:
:last update date:
"""
try:
logger.info(element)
textValue = self.driver.find_element(By.XPATH, element).text
if len(textValue) == 0:
textValue = str(self.driver.find_element(By.XPATH, element).get_attribute('value'))
return textValue
except:
console("clearSendKeys EXCEPT")
self.LogOff()
# //*[@id="details-button"] advanced
# //*[@id="proceed-link"] proceed link
# # position = Select(driver.find_element(By.XPATH, position_path))
# # position.select_by_visible_text(functionkey)
if __name__ == "__main__":
# os.system('''start cmd /K cd C:/Program Files/Wireshark
# tshark -V -r captureFile.pcap > convertedFile.txt'''.replace('\n', '&'))
# import win32com.shell.shell as shell
# ASADMIN = 'asadmin'
# print sys.argv[-1]
# print sys.argv[0]
# if sys.argv[-1] != ASADMIN:
# script = os.path.abspath(sys.argv[0])
# params = ' '.join([script] + sys.argv[1:] + [ASADMIN])
# print params
# shell.ShellExecuteEx(lpVerb='runas', lpFile=sys.executable, lpParameters=params)
# sys.exit(0)
# os.chdir("C:/Program Files/Wireshark")
# os.system('tshark -V -r captureFile.pcap > convertedFile.txt', )\
# phone_details = {"phoneModel": "Mitel6920", "ipAddress": "10.112.123.146", "extensionNumber": "4165142503",
# "phoneName": "4165142503"}
# phone_obj = PhoneComponent(**phone_details)
obj = WebUIComponent()
# obj.pcap_converter(local_pcap_download_file='C:\Robo_SVN_5.1\Desktop_Automation\lib\captureFile.pcap',
# outputFile='C:\Robo_SVN_5.1\Desktop_Automation\lib\covertedFile.txt')
obj.loginWebUI(phone='10.112.123.45')
# obj.goToSection(option='//*[@id="sidebar"]/ul[2]/li[5]/a')
# obj.doubleClick(option='//*[@id="file0"]', fileName='directoryList.csv')
# obj.isChecked(value='Checked', option='//*[@id="content"]//tr[24]//input')
# print(obj.getMACAddress(phone='10.112.123.21'))
# obj.verifyRegisteredLine(lineToVerify='1', phone='4165142526')
# obj.registerPhone(linesToRegister='all', phoneToEnter='4165142501', phoneToOpen='4165142501')
# obj.verifyElementIsFound('//*[@id="content"]/h')
# print(obj.registerPhone(phone='10.112.123.144', linesToRegister='3', pbx='Asterisk'))
# obj.verifyRegisteredLine(lineToVerify='3')
# obj.LogOff()
# obj.unRegisterPhone(linesToUnregister='1')
# obj.loginWebUI(phone='10.112.123.155')
# url = str(obj.driver.current_url)
# print(type(url))
# print(url.equals('10.112.123.155'))
# obj.verifyText(option='//*[@id="header"]/div[2]//li[1]', value="6920")
# quit()
# phoneDetails = obj.yaml_file.get('PhoneDetails')
# for k, v in phoneDetails.items():
# print(k, v)
# SessionNotCreatedException:
quit()
|
[
"Vijay.Pawar@mitel.com"
] |
Vijay.Pawar@mitel.com
|
ecd426d08def8b51ef4bf7172607e4690d2fe7ae
|
efa152f1838156eef73f2e92b40df8598113cfef
|
/solid-principles/open-closed-principle.py
|
1ddf6a1d179a17d043e103c70f8d2a7d9b853130
|
[] |
no_license
|
lexuancuong/SoftwareDesign
|
0b18586839e72cdc18440b7541015ae8664ffe5f
|
49126bc3bddf802cf50884dd47a67ce0bd5d421e
|
refs/heads/master
| 2023-07-06T02:41:49.569421
| 2021-08-07T11:11:29
| 2021-08-07T11:11:29
| 255,578,946
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
#[[OBJECT OR ENTITIES SHOULD BE OPEN FOR EXTENSION BUT CLOSED FOR MODIFICATION]]
# This principle mean that you need to abstractize the base class. If you want to change something, you need to customize it in the base classes. Dont change it in ther top classes
# I have implement an example code that using abstract class in single responsibility principle.
# I use Shape as base class, that Square and Circle inherit it
|
[
"cuongle.software@gmail.com"
] |
cuongle.software@gmail.com
|
98d18f60db6f9d172118700744b25c66f8263cdd
|
9db05efc1aeafa3a60823c053b9b6e5e6f900e4c
|
/setup.py
|
c66d6199914a7e3421f0337a37c1986388a849db
|
[] |
no_license
|
Henri-Laiho/optiline-side
|
ded1a12a6c07af9f0ca8982dbbf03c42b90332b7
|
910adaf513ac3049afef137b72a2913bbee3ba3a
|
refs/heads/master
| 2022-12-30T15:21:06.950461
| 2020-10-19T09:08:35
| 2020-10-19T09:08:35
| 302,873,955
| 1
| 1
| null | 2020-10-13T13:02:37
| 2020-10-10T10:16:43
|
Python
|
UTF-8
|
Python
| false
| false
| 186
|
py
|
from setuptools import setup
setup(
name='optiline-side',
version='1.1',
packages=[''],
url='',
license='',
author='',
author_email='',
description=''
)
|
[
"gghalfer@gmail.com"
] |
gghalfer@gmail.com
|
41eb1f95e04baf4c1693f5cd76d8c952fb22ac2b
|
0e1e643e864bcb96cf06f14f4cb559b034e114d0
|
/Exps_7_v3/I_to_M_Gk3_no_pad/pyramid_size256/pyr_4s/bce_s001_tv_s0p1_L7/step11_L2345678.py
|
d32ad63e9e2ef902bab64c6f49cccaa8d83a9971
|
[] |
no_license
|
KongBOy/kong_model2
|
33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307
|
1af20b168ffccf0d5293a393a40a9fa9519410b2
|
refs/heads/master
| 2022-10-14T03:09:22.543998
| 2022-10-06T11:33:42
| 2022-10-06T11:33:42
| 242,080,692
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 46,385
|
py
|
#############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
###############################################################################################################################################################################################################
# 按F5執行時, 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~ 才可 import step10_a.py 喔!
code_exe_dir = os.path.dirname(code_exe_path) ### 目前執行 step10_b.py 的 dir
if(os.getcwd() != code_exe_dir): ### 如果 不是在 step10_b.py 的資料夾, 自動幫你切過去~
os.chdir(code_exe_dir)
# print("current_path:", os.getcwd())
###############################################################################################################################################################################################################
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_size256.pyr_0s.bce_s001_tv_s0p1_L7.step10_a as L7_0side
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_size256.pyr_1s.bce_s001_tv_s0p1_L7.step10_a as L7_1side
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_size256.pyr_2s.bce_s001_tv_s0p1_L7.step10_a as L7_2side
import Exps_7_v3.I_to_M_Gk3_no_pad.pyramid_size256.pyr_3s.bce_s001_tv_s0p1_L7.step10_a as L7_3side
import step10_a as L7_4side
#################################################################################################################################################################################################################################################################################################################################################################################################
########
# 1side_1
########
### 2side_1
ch032_1side_1_2side_1_34side_all = [
[L7_2side.ch032_1side_1__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_1__2side_1__3side_1, L7_4side.ch032_1side_1__2side_1__3side_1_4side_1 , ],
]
########
# 1side_2
########
### 2side_1
ch032_1side_2_2side_1_34side_all = [
[L7_2side.ch032_1side_2__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_2__2side_1__3side_1, L7_4side.ch032_1side_2__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_2_2side_2_34side_all = [
[L7_2side.ch032_1side_2__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_2__2side_2__3side_1, L7_4side.ch032_1side_2__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_2__2side_2__3side_2, L7_4side.ch032_1side_2__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_2__2side_2__3side_2_4side_2 , ],
]
########
# 1side_3
########
### 2side_1
ch032_1side_3_2side_1_34side_all = [
[L7_2side.ch032_1side_3__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_1__3side_1, L7_4side.ch032_1side_3__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_3_2side_2_34side_all = [
[L7_2side.ch032_1side_3__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_2__3side_1, L7_4side.ch032_1side_3__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_2__3side_2, L7_4side.ch032_1side_3__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_3__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_3_2side_3_34side_all = [
[L7_2side.ch032_1side_3__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_3__3side_1, L7_4side.ch032_1side_3__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_3__3side_2, L7_4side.ch032_1side_3__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_3__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_3__2side_3__3side_3, L7_4side.ch032_1side_3__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_3__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_3__2side_3__3side_3_4side_3 , ],
]
########
# 1side_4
########
### 2side_1
ch032_1side_4_2side_1_34side_all = [
[L7_2side.ch032_1side_4__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_1__3side_1, L7_4side.ch032_1side_4__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_4_2side_2_34side_all = [
[L7_2side.ch032_1side_4__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_2__3side_1, L7_4side.ch032_1side_4__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_2__3side_2, L7_4side.ch032_1side_4__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_4__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_4_2side_3_34side_all = [
[L7_2side.ch032_1side_4__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_3__3side_1, L7_4side.ch032_1side_4__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_3__3side_2, L7_4side.ch032_1side_4__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_4__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_3__3side_3, L7_4side.ch032_1side_4__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_4__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_4__2side_3__3side_3_4side_3 , ],
]
### 2side_4
ch032_1side_4_2side_4_34side_all = [
[L7_2side.ch032_1side_4__2side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_4__3side_1, L7_4side.ch032_1side_4__2side_4__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_4__3side_2, L7_4side.ch032_1side_4__2side_4__3side_2_4side_1 , L7_4side.ch032_1side_4__2side_4__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_4__3side_3, L7_4side.ch032_1side_4__2side_4__3side_3_4side_1 , L7_4side.ch032_1side_4__2side_4__3side_3_4side_2 , L7_4side.ch032_1side_4__2side_4__3side_3_4side_3 , L7_4side.empty , ],
[L7_3side.ch032_1side_4__2side_4__3side_4, L7_4side.ch032_1side_4__2side_4__3side_4_4side_1 , L7_4side.ch032_1side_4__2side_4__3side_4_4side_2 , L7_4side.ch032_1side_4__2side_4__3side_4_4side_3 , L7_4side.ch032_1side_4__2side_4__3side_4_4side_4 , ],
]
########
# 1side_5
########
### 2side_1
ch032_1side_5_2side_1_34side_all = [
[L7_2side.ch032_1side_5__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_1__3side_1, L7_4side.ch032_1side_5__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_5_2side_2_34side_all = [
[L7_2side.ch032_1side_5__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_2__3side_1, L7_4side.ch032_1side_5__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_2__3side_2, L7_4side.ch032_1side_5__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_5__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_5_2side_3_34side_all = [
[L7_2side.ch032_1side_5__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_3__3side_1, L7_4side.ch032_1side_5__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_3__3side_2, L7_4side.ch032_1side_5__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_5__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_3__3side_3, L7_4side.ch032_1side_5__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_5__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_5__2side_3__3side_3_4side_3 , ],
]
### 2side_4
ch032_1side_5_2side_4_34side_all = [
[L7_2side.ch032_1side_5__2side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_4__3side_1, L7_4side.ch032_1side_5__2side_4__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_4__3side_2, L7_4side.ch032_1side_5__2side_4__3side_2_4side_1 , L7_4side.ch032_1side_5__2side_4__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_4__3side_3, L7_4side.ch032_1side_5__2side_4__3side_3_4side_1 , L7_4side.ch032_1side_5__2side_4__3side_3_4side_2 , L7_4side.ch032_1side_5__2side_4__3side_3_4side_3 , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_4__3side_4, L7_4side.ch032_1side_5__2side_4__3side_4_4side_1 , L7_4side.ch032_1side_5__2side_4__3side_4_4side_2 , L7_4side.ch032_1side_5__2side_4__3side_4_4side_3 , L7_4side.ch032_1side_5__2side_4__3side_4_4side_4 , ],
]
### 2side_5
ch032_1side_5_2side_5_34side_all = [
[L7_2side.ch032_1side_5__2side_5 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_5__3side_1, L7_4side.ch032_1side_5__2side_5__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_5__3side_2, L7_4side.ch032_1side_5__2side_5__3side_2_4side_1 , L7_4side.ch032_1side_5__2side_5__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_5__3side_3, L7_4side.ch032_1side_5__2side_5__3side_3_4side_1 , L7_4side.ch032_1side_5__2side_5__3side_3_4side_2 , L7_4side.ch032_1side_5__2side_5__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_5__3side_4, L7_4side.ch032_1side_5__2side_5__3side_4_4side_1 , L7_4side.ch032_1side_5__2side_5__3side_4_4side_2 , L7_4side.ch032_1side_5__2side_5__3side_4_4side_3 , L7_4side.ch032_1side_5__2side_5__3side_4_4side_4 , L7_4side.empty , ],
[L7_3side.ch032_1side_5__2side_5__3side_5, L7_4side.ch032_1side_5__2side_5__3side_5_4side_1 , L7_4side.ch032_1side_5__2side_5__3side_5_4side_2 , L7_4side.ch032_1side_5__2side_5__3side_5_4side_3 , L7_4side.ch032_1side_5__2side_5__3side_5_4side_4 , L7_4side.ch032_1side_5__2side_5__3side_5_4side_5 , ],
]
########
# 1side_6
########
### 2side_1
ch032_1side_6_2side_1_34side_all = [
[L7_2side.ch032_1side_6__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_1__3side_1, L7_4side.ch032_1side_6__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_6_2side_2_34side_all = [
[L7_2side.ch032_1side_6__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_2__3side_1, L7_4side.ch032_1side_6__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_2__3side_2, L7_4side.ch032_1side_6__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_6__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_6_2side_3_34side_all = [
[L7_2side.ch032_1side_6__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_3__3side_1, L7_4side.ch032_1side_6__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_3__3side_2, L7_4side.ch032_1side_6__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_6__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_3__3side_3, L7_4side.ch032_1side_6__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_6__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_6__2side_3__3side_3_4side_3 , ],
]
### 2side_4
ch032_1side_6_2side_4_34side_all = [
[L7_2side.ch032_1side_6__2side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_4__3side_1, L7_4side.ch032_1side_6__2side_4__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_4__3side_2, L7_4side.ch032_1side_6__2side_4__3side_2_4side_1 , L7_4side.ch032_1side_6__2side_4__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_4__3side_3, L7_4side.ch032_1side_6__2side_4__3side_3_4side_1 , L7_4side.ch032_1side_6__2side_4__3side_3_4side_2 , L7_4side.ch032_1side_6__2side_4__3side_3_4side_3 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_4__3side_4, L7_4side.ch032_1side_6__2side_4__3side_4_4side_1 , L7_4side.ch032_1side_6__2side_4__3side_4_4side_2 , L7_4side.ch032_1side_6__2side_4__3side_4_4side_3 , L7_4side.ch032_1side_6__2side_4__3side_4_4side_4 , ],
]
### 2side_5
ch032_1side_6_2side_5_34side_all = [
[L7_2side.ch032_1side_6__2side_5 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_5__3side_1, L7_4side.ch032_1side_6__2side_5__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_5__3side_2, L7_4side.ch032_1side_6__2side_5__3side_2_4side_1 , L7_4side.ch032_1side_6__2side_5__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_5__3side_3, L7_4side.ch032_1side_6__2side_5__3side_3_4side_1 , L7_4side.ch032_1side_6__2side_5__3side_3_4side_2 , L7_4side.ch032_1side_6__2side_5__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_5__3side_4, L7_4side.ch032_1side_6__2side_5__3side_4_4side_1 , L7_4side.ch032_1side_6__2side_5__3side_4_4side_2 , L7_4side.ch032_1side_6__2side_5__3side_4_4side_3 , L7_4side.ch032_1side_6__2side_5__3side_4_4side_4 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_5__3side_5, L7_4side.ch032_1side_6__2side_5__3side_5_4side_1 , L7_4side.ch032_1side_6__2side_5__3side_5_4side_2 , L7_4side.ch032_1side_6__2side_5__3side_5_4side_3 , L7_4side.ch032_1side_6__2side_5__3side_5_4side_4 , L7_4side.ch032_1side_6__2side_5__3side_5_4side_5 , ],
]
### 2side_6
ch032_1side_6_2side_6_34side_all = [
[L7_2side.ch032_1side_6__2side_6 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_1, L7_4side.ch032_1side_6__2side_6__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_2, L7_4side.ch032_1side_6__2side_6__3side_2_4side_1 , L7_4side.ch032_1side_6__2side_6__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_3, L7_4side.ch032_1side_6__2side_6__3side_3_4side_1 , L7_4side.ch032_1side_6__2side_6__3side_3_4side_2 , L7_4side.ch032_1side_6__2side_6__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_4, L7_4side.ch032_1side_6__2side_6__3side_4_4side_1 , L7_4side.ch032_1side_6__2side_6__3side_4_4side_2 , L7_4side.ch032_1side_6__2side_6__3side_4_4side_3 , L7_4side.ch032_1side_6__2side_6__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_5, L7_4side.ch032_1side_6__2side_6__3side_5_4side_1 , L7_4side.ch032_1side_6__2side_6__3side_5_4side_2 , L7_4side.ch032_1side_6__2side_6__3side_5_4side_3 , L7_4side.ch032_1side_6__2side_6__3side_5_4side_4 , L7_4side.ch032_1side_6__2side_6__3side_5_4side_5 , L7_4side.empty , ],
[L7_3side.ch032_1side_6__2side_6__3side_6, L7_4side.ch032_1side_6__2side_6__3side_6_4side_1 , L7_4side.ch032_1side_6__2side_6__3side_6_4side_2 , L7_4side.ch032_1side_6__2side_6__3side_6_4side_3 , L7_4side.ch032_1side_6__2side_6__3side_6_4side_4 , L7_4side.ch032_1side_6__2side_6__3side_6_4side_5 , L7_4side.ch032_1side_6__2side_6__3side_6_4side_6 , ],
]
########
# 1side_7
########
### 2side_1
ch032_1side_7_2side_1_34side_all = [
[L7_2side.ch032_1side_7__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_1__3side_1, L7_4side.ch032_1side_7__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_7_2side_2_34side_all = [
[L7_2side.ch032_1side_7__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_2__3side_1, L7_4side.ch032_1side_7__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_2__3side_2, L7_4side.ch032_1side_7__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_7_2side_3_34side_all = [
[L7_2side.ch032_1side_7__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_3__3side_1, L7_4side.ch032_1side_7__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_3__3side_2, L7_4side.ch032_1side_7__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_3__3side_3, L7_4side.ch032_1side_7__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_7__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_7__2side_3__3side_3_4side_3 , ],
]
### 2side_4
ch032_1side_7_2side_4_34side_all = [
[L7_2side.ch032_1side_7__2side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_4__3side_1, L7_4side.ch032_1side_7__2side_4__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_4__3side_2, L7_4side.ch032_1side_7__2side_4__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_4__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_4__3side_3, L7_4side.ch032_1side_7__2side_4__3side_3_4side_1 , L7_4side.ch032_1side_7__2side_4__3side_3_4side_2 , L7_4side.ch032_1side_7__2side_4__3side_3_4side_3 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_4__3side_4, L7_4side.ch032_1side_7__2side_4__3side_4_4side_1 , L7_4side.ch032_1side_7__2side_4__3side_4_4side_2 , L7_4side.ch032_1side_7__2side_4__3side_4_4side_3 , L7_4side.ch032_1side_7__2side_4__3side_4_4side_4 , ],
]
### 2side_5
ch032_1side_7_2side_5_34side_all = [
[L7_2side.ch032_1side_7__2side_5 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_5__3side_1, L7_4side.ch032_1side_7__2side_5__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_5__3side_2, L7_4side.ch032_1side_7__2side_5__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_5__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_5__3side_3, L7_4side.ch032_1side_7__2side_5__3side_3_4side_1 , L7_4side.ch032_1side_7__2side_5__3side_3_4side_2 , L7_4side.ch032_1side_7__2side_5__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_5__3side_4, L7_4side.ch032_1side_7__2side_5__3side_4_4side_1 , L7_4side.ch032_1side_7__2side_5__3side_4_4side_2 , L7_4side.ch032_1side_7__2side_5__3side_4_4side_3 , L7_4side.ch032_1side_7__2side_5__3side_4_4side_4 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_5__3side_5, L7_4side.ch032_1side_7__2side_5__3side_5_4side_1 , L7_4side.ch032_1side_7__2side_5__3side_5_4side_2 , L7_4side.ch032_1side_7__2side_5__3side_5_4side_3 , L7_4side.ch032_1side_7__2side_5__3side_5_4side_4 , L7_4side.ch032_1side_7__2side_5__3side_5_4side_5 , ],
]
### 2side_6
ch032_1side_7_2side_6_34side_all = [
[L7_2side.ch032_1side_7__2side_6 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_1, L7_4side.ch032_1side_7__2side_6__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_2, L7_4side.ch032_1side_7__2side_6__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_6__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_3, L7_4side.ch032_1side_7__2side_6__3side_3_4side_1 , L7_4side.ch032_1side_7__2side_6__3side_3_4side_2 , L7_4side.ch032_1side_7__2side_6__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_4, L7_4side.ch032_1side_7__2side_6__3side_4_4side_1 , L7_4side.ch032_1side_7__2side_6__3side_4_4side_2 , L7_4side.ch032_1side_7__2side_6__3side_4_4side_3 , L7_4side.ch032_1side_7__2side_6__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_5, L7_4side.ch032_1side_7__2side_6__3side_5_4side_1 , L7_4side.ch032_1side_7__2side_6__3side_5_4side_2 , L7_4side.ch032_1side_7__2side_6__3side_5_4side_3 , L7_4side.ch032_1side_7__2side_6__3side_5_4side_4 , L7_4side.ch032_1side_7__2side_6__3side_5_4side_5 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_6__3side_6, L7_4side.ch032_1side_7__2side_6__3side_6_4side_1 , L7_4side.ch032_1side_7__2side_6__3side_6_4side_2 , L7_4side.ch032_1side_7__2side_6__3side_6_4side_3 , L7_4side.ch032_1side_7__2side_6__3side_6_4side_4 , L7_4side.ch032_1side_7__2side_6__3side_6_4side_5 , L7_4side.ch032_1side_7__2side_6__3side_6_4side_6 , ],
]
### 2side_7
ch032_1side_7_2side_7_34side_all = [
[L7_2side.ch032_1side_7__2side_7 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_1, L7_4side.ch032_1side_7__2side_7__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_2, L7_4side.ch032_1side_7__2side_7__3side_2_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_3, L7_4side.ch032_1side_7__2side_7__3side_3_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_3_4side_2 , L7_4side.ch032_1side_7__2side_7__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_4, L7_4side.ch032_1side_7__2side_7__3side_4_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_4_4side_2 , L7_4side.ch032_1side_7__2side_7__3side_4_4side_3 , L7_4side.ch032_1side_7__2side_7__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_5, L7_4side.ch032_1side_7__2side_7__3side_5_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_5_4side_2 , L7_4side.ch032_1side_7__2side_7__3side_5_4side_3 , L7_4side.ch032_1side_7__2side_7__3side_5_4side_4 , L7_4side.ch032_1side_7__2side_7__3side_5_4side_5 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_6, L7_4side.ch032_1side_7__2side_7__3side_6_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_6_4side_2 , L7_4side.ch032_1side_7__2side_7__3side_6_4side_3 , L7_4side.ch032_1side_7__2side_7__3side_6_4side_4 , L7_4side.ch032_1side_7__2side_7__3side_6_4side_5 , L7_4side.ch032_1side_7__2side_7__3side_6_4side_6 , L7_4side.empty , ],
[L7_3side.ch032_1side_7__2side_7__3side_7, L7_4side.ch032_1side_7__2side_7__3side_7_4side_1 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_2 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_3 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_4 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_5 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_6 , L7_4side.ch032_1side_7__2side_7__3side_7_4side_7 , ],
]
########
# 1side_8
########
### 2side_1
ch032_1side_8_2side_1_34side_all = [
[L7_2side.ch032_1side_8__2side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_1__3side_1, L7_4side.ch032_1side_8__2side_1__3side_1_4side_1 , ],
]
### 2side_2
ch032_1side_8_2side_2_34side_all = [
[L7_2side.ch032_1side_8__2side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_2__3side_1, L7_4side.ch032_1side_8__2side_2__3side_1_4side_1 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_2__3side_2, L7_4side.ch032_1side_8__2side_2__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_2__3side_2_4side_2 , ],
]
### 2side_3
ch032_1side_8_2side_3_34side_all = [
[L7_2side.ch032_1side_8__2side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_3__3side_1, L7_4side.ch032_1side_8__2side_3__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_3__3side_2, L7_4side.ch032_1side_8__2side_3__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_3__3side_2_4side_2 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_3__3side_3, L7_4side.ch032_1side_8__2side_3__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_3__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_3__3side_3_4side_3 , ],
]
### 2side_4
ch032_1side_8_2side_4_34side_all = [
[L7_2side.ch032_1side_8__2side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_4__3side_1, L7_4side.ch032_1side_8__2side_4__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_4__3side_2, L7_4side.ch032_1side_8__2side_4__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_4__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_4__3side_3, L7_4side.ch032_1side_8__2side_4__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_4__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_4__3side_3_4side_3 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_4__3side_4, L7_4side.ch032_1side_8__2side_4__3side_4_4side_1 , L7_4side.ch032_1side_8__2side_4__3side_4_4side_2 , L7_4side.ch032_1side_8__2side_4__3side_4_4side_3 , L7_4side.ch032_1side_8__2side_4__3side_4_4side_4 , ],
]
### 2side_5
ch032_1side_8_2side_5_34side_all = [
[L7_2side.ch032_1side_8__2side_5 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_5__3side_1, L7_4side.ch032_1side_8__2side_5__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_5__3side_2, L7_4side.ch032_1side_8__2side_5__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_5__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_5__3side_3, L7_4side.ch032_1side_8__2side_5__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_5__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_5__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_5__3side_4, L7_4side.ch032_1side_8__2side_5__3side_4_4side_1 , L7_4side.ch032_1side_8__2side_5__3side_4_4side_2 , L7_4side.ch032_1side_8__2side_5__3side_4_4side_3 , L7_4side.ch032_1side_8__2side_5__3side_4_4side_4 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_5__3side_5, L7_4side.ch032_1side_8__2side_5__3side_5_4side_1 , L7_4side.ch032_1side_8__2side_5__3side_5_4side_2 , L7_4side.ch032_1side_8__2side_5__3side_5_4side_3 , L7_4side.ch032_1side_8__2side_5__3side_5_4side_4 , L7_4side.ch032_1side_8__2side_5__3side_5_4side_5 , ],
]
### 2side_6
ch032_1side_8_2side_6_34side_all = [
[L7_2side.ch032_1side_8__2side_6 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_1, L7_4side.ch032_1side_8__2side_6__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_2, L7_4side.ch032_1side_8__2side_6__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_6__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_3, L7_4side.ch032_1side_8__2side_6__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_6__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_6__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_4, L7_4side.ch032_1side_8__2side_6__3side_4_4side_1 , L7_4side.ch032_1side_8__2side_6__3side_4_4side_2 , L7_4side.ch032_1side_8__2side_6__3side_4_4side_3 , L7_4side.ch032_1side_8__2side_6__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_5, L7_4side.ch032_1side_8__2side_6__3side_5_4side_1 , L7_4side.ch032_1side_8__2side_6__3side_5_4side_2 , L7_4side.ch032_1side_8__2side_6__3side_5_4side_3 , L7_4side.ch032_1side_8__2side_6__3side_5_4side_4 , L7_4side.ch032_1side_8__2side_6__3side_5_4side_5 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_6__3side_6, L7_4side.ch032_1side_8__2side_6__3side_6_4side_1 , L7_4side.ch032_1side_8__2side_6__3side_6_4side_2 , L7_4side.ch032_1side_8__2side_6__3side_6_4side_3 , L7_4side.ch032_1side_8__2side_6__3side_6_4side_4 , L7_4side.ch032_1side_8__2side_6__3side_6_4side_5 , L7_4side.ch032_1side_8__2side_6__3side_6_4side_6 , ],
]
### 2side_7
ch032_1side_8_2side_7_34side_all = [
[L7_2side.ch032_1side_8__2side_7 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_1, L7_4side.ch032_1side_8__2side_7__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_2, L7_4side.ch032_1side_8__2side_7__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_3, L7_4side.ch032_1side_8__2side_7__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_7__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_4, L7_4side.ch032_1side_8__2side_7__3side_4_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_4_4side_2 , L7_4side.ch032_1side_8__2side_7__3side_4_4side_3 , L7_4side.ch032_1side_8__2side_7__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_5, L7_4side.ch032_1side_8__2side_7__3side_5_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_5_4side_2 , L7_4side.ch032_1side_8__2side_7__3side_5_4side_3 , L7_4side.ch032_1side_8__2side_7__3side_5_4side_4 , L7_4side.ch032_1side_8__2side_7__3side_5_4side_5 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_6, L7_4side.ch032_1side_8__2side_7__3side_6_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_6_4side_2 , L7_4side.ch032_1side_8__2side_7__3side_6_4side_3 , L7_4side.ch032_1side_8__2side_7__3side_6_4side_4 , L7_4side.ch032_1side_8__2side_7__3side_6_4side_5 , L7_4side.ch032_1side_8__2side_7__3side_6_4side_6 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_7__3side_7, L7_4side.ch032_1side_8__2side_7__3side_7_4side_1 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_2 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_3 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_4 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_5 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_6 , L7_4side.ch032_1side_8__2side_7__3side_7_4side_7 , ],
]
### 2side_8
ch032_1side_8_2side_8_34side_all = [
[L7_2side.ch032_1side_8__2side_8 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_1, L7_4side.ch032_1side_8__2side_8__3side_1_4side_1 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_2, L7_4side.ch032_1side_8__2side_8__3side_2_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_2_4side_2 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_3, L7_4side.ch032_1side_8__2side_8__3side_3_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_3_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_3_4side_3 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_4, L7_4side.ch032_1side_8__2side_8__3side_4_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_4_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_4_4side_3 , L7_4side.ch032_1side_8__2side_8__3side_4_4side_4 , L7_4side.empty , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_5, L7_4side.ch032_1side_8__2side_8__3side_5_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_5_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_5_4side_3 , L7_4side.ch032_1side_8__2side_8__3side_5_4side_4 , L7_4side.ch032_1side_8__2side_8__3side_5_4side_5 , L7_4side.empty , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_6, L7_4side.ch032_1side_8__2side_8__3side_6_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_6_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_6_4side_3 , L7_4side.ch032_1side_8__2side_8__3side_6_4side_4 , L7_4side.ch032_1side_8__2side_8__3side_6_4side_5 , L7_4side.ch032_1side_8__2side_8__3side_6_4side_6 , L7_4side.empty , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_7, L7_4side.ch032_1side_8__2side_8__3side_7_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_3 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_4 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_5 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_6 , L7_4side.ch032_1side_8__2side_8__3side_7_4side_7 , L7_4side.empty , ],
[L7_3side.ch032_1side_8__2side_8__3side_8, L7_4side.ch032_1side_8__2side_8__3side_8_4side_1 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_2 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_3 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_4 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_5 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_6 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_7 , L7_4side.ch032_1side_8__2side_8__3side_8_4side_5 , ],
]
|
[
"s89334roy@yahoo.com.tw"
] |
s89334roy@yahoo.com.tw
|
be390ae9f202c71ab02345f1e03035e96da92b35
|
2c18bbedd58fbe6985ed483c14e194d0757807e2
|
/src/deploy/osp_deployer/checkpoints.py
|
5d267007f76c8650c6ce6c564d4e149dc6217d09
|
[
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
kholohan/JetPack
|
ec5d406b327cc20367ffcb79af39934ee9704220
|
d3a6a80881b833105578461882fd49313e5e9d8f
|
refs/heads/master
| 2021-05-05T22:26:54.511952
| 2017-12-18T15:25:31
| 2017-12-18T15:25:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 24,604
|
py
|
# Copyright (c) 2015-2017 Dell Inc. or its subsidiaries.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from auto_common import Ssh
from osp_deployer.settings.config import Settings
from osp_deployer.settings_sanity import DeployerSanity
import time
import logging
import subprocess
logger = logging.getLogger("osp_deployer")
class Checkpoints():
def __init__(self):
self.settings = Settings.settings
self.ping_success = "packets transmitted, 1 received"
self.director_ip = self.settings.director_node.public_api_ip
self.sah_ip = self.settings.sah_node.public_api_ip
self.rhscon_ip = self.settings.rhscon_node.public_api_ip
self.verify_rhsm_status = self.settings.verify_rhsm_status
@staticmethod
def verify_deployer_settings():
logger.info("==== Running environment sanity tests")
checks = DeployerSanity()
checks.check_os_volume_size()
checks.check_network_settings()
checks.check_files()
checks.check_network_overlaps()
checks.check_duplicate_ips()
checks.verify_overcloud_name()
checks.verify_iha_dependency_on_fencing()
@staticmethod
def verify_subscription_status(public_api_ip, user, password, retries):
i = 0
subscription_status = Ssh.execute_command(
public_api_ip,
user,
password,
"subscription-manager status")[0]
while "Current" not in subscription_status and i < retries:
if "Unknown" in subscription_status:
return subscription_status
time.sleep(60)
subscription_status = \
Ssh.execute_command(public_api_ip,
user,
password,
"subscription-manager status")[0]
i += 1
return subscription_status
@staticmethod
def verify_pools_attached(ip_addr, user, password, logfile):
# check the xxxxx-posts.log for pool id's/repo's related errors.
log_out = \
Ssh.execute_command(ip_addr, user, password, "cat " + logfile)[0]
error1 = 'No subscriptions are available from the pool with'
error2 = 'Removed temporarly as this error will show when ' \
'not pulling from the cdn but internal repos'
error3 = 'Could not find an OpenStack pool to attach to'
if error1 in log_out or error2 in log_out or error3 in log_out:
logger.info("*** post install log ***")
logger.info(log_out)
return False
return True
def ping_host(self, external_ip, user, passwd, target_host):
for i in range(1, 30):
ping_status = Ssh.execute_command(external_ip,
user,
passwd,
"ping " + target_host +
" -c 1 -w 30 ")[0]
if self.ping_success in ping_status:
logger.debug(
"Ping {} successful on attempt #{}".format(target_host, i))
break
# noinspection PyUnboundLocalVariable
return ping_status
def sah_health_check(self):
logger.info("SAH node health check")
if self.verify_rhsm_status:
logger.debug("*** Verify the SAH node registered properly ***")
for _ in range(60):
subscription_status = self.verify_subscription_status(
self.sah_ip,
"root",
self.settings.sah_node.root_password,
self.settings.subscription_check_retries)
if "Current" in subscription_status:
break
time.sleep(2)
else:
raise AssertionError(
"SAH did not register properly : " + subscription_status)
logger.debug("*** Verify the SAH can ping its public gateway")
gateway = self.settings.public_api_gateway
test = self.ping_host(self.sah_ip,
"root",
self.settings.sah_node.root_password,
gateway)
if self.ping_success not in test:
raise AssertionError(
"SAH cannot ping its public gateway : " + test)
logger.debug("*** Verify the SAH can ping the outside world (ip)")
test = self.ping_host(self.sah_ip,
"root",
self.settings.sah_node.root_password,
"8.8.8.8")
if self.ping_success not in test:
raise AssertionError(
"SAH cannot ping the outside world (ip) : " + test)
logger.debug("*** Verify the SAH can ping the outside world (dns)")
test = self.ping_host(self.sah_ip,
"root",
self.settings.sah_node.root_password,
"google.com")
if self.ping_success not in test:
raise AssertionError(
"SAH cannot ping the outside world (dns) : " + test)
logger.debug("*** Verify the SAH can ping the idrac network")
test = self.ping_host(self.sah_ip,
"root",
self.settings.sah_node.root_password,
self.settings.sah_node.idrac_ip)
if self.ping_success not in test:
raise AssertionError(
"SAH cannot ping idrac networkn (ip) : " + test)
logger.debug("*** Verify the SAH has KVM enabled *** ")
cmd = 'ls -al /dev/kvm'
if "No such file" in \
Ssh.execute_command(self.sah_ip,
"root",
self.settings.sah_node.root_password,
cmd)[1]:
raise AssertionError(
"KVM Not running on the SAH node - make sure "
"the node has been DTK'ed/Virtualization enabled "
"in the Bios")
def director_vm_health_check(self):
setts = self.settings
logger.info("Director VM health checks")
if self.verify_rhsm_status:
logger.debug("*** Verify the Director VM registered properly ***")
subscription_status = self.verify_subscription_status(
self.director_ip,
"root",
setts.director_node.root_password,
setts.subscription_check_retries)
if "Current" not in subscription_status:
raise AssertionError(
"Director VM did not register properly : " +
subscription_status)
logger.debug(
"*** Verify all pools registered & repositories subscribed ***")
if self.verify_pools_attached(self.director_ip,
"root",
setts.director_node.root_password,
"/root/" + setts.director_node.hostname +
"-posts.log") is False:
raise AssertionError(
"Director vm did not subscribe/attach "
"repos properly, see log.")
logger.debug("*** Verify the Director VM can ping its public gateway")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
setts.public_api_gateway)
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping its public gateway : " + test)
logger.debug(
"*** Verify the Director VM can ping the outside world (ip)")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
"8.8.8.8")
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping the outside world (ip) : " + test)
logger.debug(
"*** Verify the Director VM can ping the outside world (dns)")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
"google.com")
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping the outside world (dns) : " + test)
logger.debug(
"*** Verify the Director VM can ping the SAH node "
"through the provisioning network")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
setts.sah_node.provisioning_ip)
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping the SAH node through "
"the provisioning network : " + test)
logger.debug(
"*** Verify the Director VM can ping the SAH node "
"through the public network")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
self.sah_ip)
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping the SAH node through "
"the provisioning network : " + test)
logger.debug("*** Verify the Director VM can ping the idrac network")
test = self.ping_host(self.director_ip,
"root",
setts.director_node.root_password,
self.sah_ip)
if self.ping_success not in test:
raise AssertionError(
"Director VM cannot ping idrac network (ip) : " + test)
def rhscon_vm_health_check(self):
logger.info("Storage Console VM health checks")
if self.verify_rhsm_status:
logger.debug(
"*** Verify the Storage Console VM registered properly ***")
subscription_status = self.verify_subscription_status(
self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
self.settings.subscription_check_retries)
if "Current" not in subscription_status:
raise AssertionError(
"Storage Console VM did not register properly : " +
subscription_status)
logger.debug(
"*** Verify the Storage Console VM can ping its public gateway")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
self.settings.public_api_gateway)
if self.ping_success not in test:
raise AssertionError(
"RHSCON VM cannot ping its public gateway : " + test)
logger.debug(
"*** Verify the Storage Console VM " +
"can ping the outside world (IP)")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
"8.8.8.8")
if self.ping_success not in test:
raise AssertionError(
"Storage Console VM cannot ping the outside world (IP) : " +
test)
logger.debug("*** Verify the Storage Console VM can ping "
"the outside world (DNS)")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
"google.com")
if self.ping_success not in test:
raise AssertionError(
"Storage Console VM cannot ping the outside world (DNS) : " +
test)
logger.debug(
"*** Verify the Storage Console VM can ping the SAH node "
"through the storage network")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
self.settings.sah_node.storage_ip)
if self.ping_success not in test:
raise AssertionError(
"Storage Console VM cannot ping the SAH node "
"through the storage network : " + test)
logger.debug(
"*** Verify the Storage Console VM can ping the SAH "
"node through the public network")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
self.sah_ip)
if self.ping_success not in test:
raise AssertionError(
"Storage Console VM cannot ping the SAH node through "
"the public network : " + test)
logger.debug(
"*** Verify the Storage Console VM can ping the Director VM "
"through the public network")
test = self.ping_host(self.rhscon_ip,
"root",
self.settings.rhscon_node.root_password,
self.director_ip)
if self.ping_success not in test:
raise AssertionError(
"Storage Console VM cannot ping the Director VM through "
"the provisioning network : " + test)
def verify_nodes_registered_in_ironic(self):
logger.debug("Verify the expected amount of nodes imported in ironic")
cmd = "source ~/stackrc;ironic node-list | grep None"
setts = self.settings
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
ls_nodes = re[0].split("\n")
ls_nodes.pop()
expected_nodes = len(self.settings.controller_nodes) + len(
self.settings.compute_nodes) + len(
self.settings.ceph_nodes)
if len(ls_nodes) != expected_nodes:
raise AssertionError(
"Expected amount of nodes registered in Ironic "
"does not add up " +
str(len(ls_nodes)) + "/" + str(expected_nodes))
def verify_introspection_sucessfull(self):
logger.debug("Verify the introspection did not encounter any errors")
cmd = "source ~/stackrc;ironic node-list | grep None"
setts = self.settings
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
# TODO :: i fnode failed introspection - set to to PXE - reboot
ls_nodes = re[0].split("\n")
ls_nodes.pop()
for node in ls_nodes:
state = node.split("|")[5]
if "available" not in state:
raise AssertionError(
"Node state not available post bulk introspection" +
"\n " + re[0])
def verify_undercloud_installed(self):
logger.debug("Verify the undercloud installed properly")
cmd = "stat ~/stackrc"
setts = self.settings
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if "No such file or directory" in re[0]:
raise AssertionError(
"Director & Undercloud did not install properly, "
"check /pilot/install-director.log for details")
cmd = " grep \"Undercloud install complete\" " \
"~/pilot/install-director.log"
setts = self.settings
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if "Undercloud install complete." not in re[0]:
raise AssertionError(
"Director & Undercloud did not install properly,"
" check /pilot/install-director.log for details")
cmd = "cat "\
"~/pilot/install-director.log"
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if "There are no enabled repos" in re[0]:
raise AssertionError(
"Unable to attach to pool ID while updating the overcloud\
image")
cmd = "source ~/stackrc;glance image-list"
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if "overcloud-full" not in re[0]:
raise AssertionError(
"Unable to find the overcloud image in glance - "
"check the install-director.log for possible package"
"download errors")
def verify_computes_virtualization_enabled(self):
logger.debug("*** Verify the Compute nodes have KVM enabled *** ")
cmd = "source ~/stackrc;nova list | grep compute"
ssh_opts = (
"-o StrictHostKeyChecking=no "
"-o UserKnownHostsFile=/dev/null "
"-o KbdInteractiveDevices=no")
setts = self.settings
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
computes = re[0].split("\n")
computes.pop()
for each in computes:
provisioning_ip = each.split("|")[6].split("=")[1]
cmd = "ssh %s heat-admin@%s 'ls -al /dev/kvm'" % (
ssh_opts, provisioning_ip)
re = Ssh.execute_command_tty(
self.director_ip,
self.settings.director_install_account_user,
self.settings.director_install_account_pwd, cmd)
if "No such file" in re[0]:
raise AssertionError(
"KVM Not running on Compute node '{}' -"
" make sure the node has been DTK'ed/Virtualization "
"enabled in the Bios".format(
provisioning_ip))
def retreive_switches_config(self):
if self.settings.switches == 0:
return
logger.info("Retreiving switch(es) configuration")
for each in self.settings.switches:
logger.info(
"Retreiving configuration for switch " + each.switch_name)
logger.info(
self.execute_as_shell(each.ip, each.user, each.password,
'show version'))
logger.info(
self.execute_as_shell(each.ip,
each.user,
each.password,
'copy running-config scp://' +
self.settings.bastion_host_user + ':' +
self.settings.bastion_host_password +
'@' + self.settings.bastion_host_ip +
'//auto_results/switch-config-' +
each.switch_name))
@staticmethod
def execute_as_shell(address, usr, pwd, command):
import paramiko
conn = paramiko.SSHClient()
conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())
conn.connect(address, username=usr, password=pwd)
channel = conn.invoke_shell(term='vt100', width=800, height=1000,
width_pixels=0,
height_pixels=0)
time.sleep(1)
channel.recv(9999)
channel.send(command + "\n")
buff = ''
while not buff.endswith('#'):
resp = channel.recv(9999)
buff += resp
def verify_backends_connectivity(self):
dellsc_be = self.settings.enable_dellsc_backend
if dellsc_be:
setts = self.settings
cmd = "source ~/stackrc;nova list | grep compute"
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
ls = re[0].split("\n")
ls.pop()
compute_node_ip = ls[0].split("|")[6].split("=")[1]
cmd = "source ~/stackrc;nova list | grep controller"
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
ls = re[0].split("\n")
ls.pop()
controller_node_ip = ls[0].split("|")[6].split("=")[1]
if self.settings.enable_dellsc_backend:
logger.debug("Verifying dellsc backend connectivity")
logger.debug("Verify Controller nodes can ping the san ip")
cmd = "ssh heat-admin@" + controller_node_ip +\
" sudo ping " + self.settings.dellsc_san_ip +\
" -c 1 -w 30 "
re = Ssh.execute_command_tty(self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if self.ping_success not in re[0]:
raise AssertionError(controller_node_ip +
" cannot ping the dellsc san ip " +
self.settings.dellsc_san_ip)
logger.debug("Verify Make sure ISCSI access work from Compute \
& Controller nodes")
for each in compute_node_ip, controller_node_ip:
cmd = "ssh heat-admin@" + each +\
" sudo iscsiadm -m discovery -t sendtargets -p " +\
self.settings.dellsc_iscsi_ip_address +\
":" + self.settings.dellsc_iscsi_port
re = Ssh.execute_command_tty(
self.director_ip,
setts.director_install_account_user,
setts.director_install_account_pwd,
cmd)
if "com.compellent" not in re[0]:
raise AssertionError(
each +
" not able to validate ISCSI access to " +
self.settings.dellsc_iscsi_ip_address +
":" + self.settings.dellsc_iscsi_port)
|
[
"gael01@gmail.com"
] |
gael01@gmail.com
|
f9dbfac65b4793686f531cc4ec8b1d6f0a2a6c52
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02881/s975712873.py
|
e950898d06b66329af5a0e6c71bd6e8d7d05e1f4
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 120
|
py
|
N=int(input())
ans=N-1
x=1
while x*x<=N:
if N%x==0:
y=N//x
ans=min(x-1+y-1,ans)
x+=1
print(ans)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
560d128f80170488cbfc0912c7031cbc66274cbc
|
bedef4c203bb7921aab99324c4f7be9d4ad86993
|
/ros2/src/airsim_ros_pkgs/launch/airsim_node.launch.py
|
8343f596eef5a14fc794d8af31662a7d019c654a
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
DavidLSmyth/AirSim
|
801438f61f37588c0a0ff2aa79f2128e2e3f4407
|
4266b898ac5b20f35ebca3f2e3fa821b336dec69
|
refs/heads/master
| 2021-12-15T14:55:55.554906
| 2021-12-08T07:36:03
| 2021-12-08T07:36:03
| 112,334,210
| 5
| 2
|
NOASSERTION
| 2021-12-09T11:50:26
| 2017-11-28T12:44:31
|
C++
|
UTF-8
|
Python
| false
| false
| 1,825
|
py
|
import os
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument, IncludeLaunchDescription
from launch.substitutions import LaunchConfiguration
from launch_ros.actions import Node
from launch.launch_description_sources import PythonLaunchDescriptionSource
from ament_index_python.packages import get_package_share_directory
def generate_launch_description():
output = DeclareLaunchArgument(
"output",
default_value='log')
publish_clock = DeclareLaunchArgument(
"publish_clock",
default_value='False')
is_vulkan = DeclareLaunchArgument(
"is_vulkan",
default_value='True')
host = DeclareLaunchArgument(
"host",
default_value='localhost')
airsim_node = Node(
package='airsim_ros_pkgs',
executable='airsim_node',
name='airsim_node',
output='screen',
parameters=[{
'is_vulkan': False,
'update_airsim_img_response_every_n_sec': 0.05,
'update_airsim_control_every_n_sec': 0.01,
'update_lidar_every_n_sec': 0.01,
'publish_clock': LaunchConfiguration('publish_clock'),
'host_ip': LaunchConfiguration('host')
}])
static_transforms = IncludeLaunchDescription(
PythonLaunchDescriptionSource(
os.path.join(get_package_share_directory('airsim_ros_pkgs'), 'launch/static_transforms.launch.py')
)
)
# Create the launch description and populate
ld = LaunchDescription()
# Declare the launch options
ld.add_action(output)
ld.add_action(publish_clock)
ld.add_action(is_vulkan)
ld.add_action(host)
ld.add_action(static_transforms)
ld.add_action(airsim_node)
return ld
|
[
"alonfaraj@gmail.com"
] |
alonfaraj@gmail.com
|
2d66c5a02e0c6f47e834a2bb9f8225ef2d8c18fc
|
5e95083d63ce1e76385dd34c96c13c7ac382aa28
|
/Минимальный делитель числа.py
|
37f2eb646e8d945ff4aacad95d43082c25716f5f
|
[] |
no_license
|
oOoSanyokoOo/Course-Python-Programming-Basics
|
1ee3bff98259951d87d8656af1519884fb089f41
|
88a8ada069da45a882942ef83dd3d3bcb9cb3b0d
|
refs/heads/main
| 2023-06-14T20:57:25.347205
| 2021-07-08T07:09:20
| 2021-07-08T07:09:20
| 384,029,659
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 268
|
py
|
def MinDivisor(n):
i = 2
while i <= n:
if i * i > n:
print(n)
i = n + 1
elif n % i != 0:
i += 1
elif n % i == 0:
print(i)
break
n = int(input())
MinDivisor(n)
|
[
"noreply@github.com"
] |
oOoSanyokoOo.noreply@github.com
|
718e8aec118b7aaafe59d9c70370ed5893a1d19f
|
404e6736f71e80e07d85152120ca497a43bd248a
|
/platforms.py
|
e312c733ceebca3904b1492e65e3eeebddcea696
|
[] |
no_license
|
ravenusmc/aleppo_main
|
1f37cedc5700ad9ec60ae032edd634ec7fd7557e
|
ed4909d87824bc669101c1204d38405830531d28
|
refs/heads/master
| 2021-01-13T13:56:09.602232
| 2016-12-01T15:13:04
| 2016-12-01T15:13:04
| 72,948,576
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,085
|
py
|
#The code in this file will be dealing with the platforms in the game.
#Importing other modules that will be used in this file.
import pygame
from spritesheet_functions import SpriteSheet
#These are the platforms that will be used in the game. The first two numbers
#are the x,y location of the image on the sprite sheet. The third number is the
#width and finally, the last number, is the height.
STONE_PLATFORM_LEFT = (432, 720, 70, 40)
STONE_PLATFORM_MIDDLE = (648, 648, 70, 40)
STONE_PLATFORM_RIGHT = (792, 648, 70, 40)
#Creating a platform class.
class Platform(pygame.sprite.Sprite):
def __init__(self, sprite_sheet_data):
pygame.sprite.Sprite.__init__(self)
#Getting the tiles from the sprite sheet and saving it as a
#variable sprite_sheet.
sprite_sheet = SpriteSheet("tiles_spritesheet.png")
# Grab the image for this platform. The [0] represents the x value,
#the [1] is the y value, the [2] is the width and finally, the [3]
#is the height.
self.image = sprite_sheet.get_image(sprite_sheet_data[0],
sprite_sheet_data[1],
sprite_sheet_data[2],
sprite_sheet_data[3])
self.rect = self.image.get_rect()
#Setting up a class for the moving platform
class MovingPlatform(Platform):
#Setting the change in direction variables.
change_x = 0
change_y = 0
#Setting the boundaries for the movement of the platforms.
boundary_top = 0
boundary_bottom = 0
boundary_left = 0
boundary_right = 0
#An update method to update movement of the platforms
def update(self):
#Moving the platforms left/right
self.rect.x += self.change_x
#This line will see if the player collides with a platform
hit = pygame.sprite.collide_rect(self, self.player)
if hit:
# If we are moving right, set our right side
# to the left side of the item we hit
if self.change_x < 0:
self.player.rect.right = self.rect.left
else:
# Otherwise if we are moving left, do the opposite.
self.player.rect.left = self.rect.right
# Move up/down
self.rect.y += self.change_y
# Check and see if we the player
hit = pygame.sprite.collide_rect(self, self.player)
if hit:
# Reset our position based on the top/bottom of the object.
if self.change_y < 0:
self.player.rect.bottom = self.rect.top
else:
self.player.rect.top = self.rect.bottom
# Check the boundaries and see if we need to reverse
# direction.
if self.rect.bottom > self.boundary_bottom or self.rect.top < self.boundary_top:
self.change_y *= -1
cur_pos = self.rect.x - self.level.world_shift
if cur_pos < self.boundary_left or cur_pos > self.boundary_right:
self.change_x *= -1
|
[
"mcuddy77@gmail.com"
] |
mcuddy77@gmail.com
|
243b9e15dc0826f09dbd54f87d5d0bbd32327525
|
cb90b245dd9e5e111e421020da36c494422fdfb9
|
/Project Euler/Problem 34/euler_34_BG.py
|
e81527585c1a94b10b16cf2aff86ef60dcb322eb
|
[] |
no_license
|
bcgbr5/Interview_Questions
|
68b864ffecdab2af58835d4d1e8b3e3bbfac92b6
|
2953bc9dfc9fa1a33b409672cf68a87d013c4c72
|
refs/heads/master
| 2020-09-10T20:59:00.238815
| 2020-03-17T20:37:30
| 2020-03-17T20:37:30
| 221,832,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 490
|
py
|
#Project Euler Problem 34
#Solution : Brandon Greer
import math
max_num = math.factorial(9)*4#This is extremly arbitrary, but works
def evaluate_digit_factorial(number):
num_fact_total = 0
for digit in str(number):
num_fact_total += math.factorial(int(digit))
if num_fact_total == number:
return True
digit_factorials = list()
for num in range(3,max_num):
if evaluate_digit_factorial(num):
digit_factorials.append(num)
print(sum(digit_factorials))
|
[
"brandonchrisgreer@gmail.com"
] |
brandonchrisgreer@gmail.com
|
594b6f09ed52ac9a7dfb70ddee396015b9475aa5
|
db9c14d24b854679dd0e89c490061fc31d12a579
|
/Notebooks/Plotter.py
|
7413dc64bcba568c0f221e70fd0f91f6612a6cfd
|
[
"MIT"
] |
permissive
|
a-parida12/NumericalMethods
|
74c2dd7e16fe418f733d85d2bb12549796315900
|
a8b9438b9c70d1ab950f34784a2edbcd0baf092f
|
refs/heads/master
| 2021-01-25T13:47:55.913628
| 2018-10-29T00:16:35
| 2018-10-29T00:16:35
| 123,615,570
| 0
| 4
|
MIT
| 2018-10-29T00:16:36
| 2018-03-02T18:28:43
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 3,436
|
py
|
# Written by Abhijeet Parida(abhijeet.parida@tum.de)
from matplotlib.pyplot import figure, subplot, plot, title, xlabel, ylabel,legend
from numpy import arange,exp
def plotter_exp(p_exp,dt_all,tend,labels):
import matplotlib.pyplot as plt
figure()
##subplot for dt=1/2
dt=dt_all[0]
subplot(2, 2, 1)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 10/(1+(9*exp(-tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/4
dt=dt_all[1]
subplot(2, 2, 2)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 10/(1+(9*exp(-tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/8
dt=dt_all[2]
subplot(2, 2, 3)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 10/(1+(9*exp(-tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/16
dt=dt_all[3]
subplot(2, 2, 4)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 10/(1+(9*exp(-tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
plt.show
return
def plotter_imp(p_exp,dt_all,tend,labels):
import matplotlib.pyplot as plt
figure()
##subplot for dt=1/2
dt=dt_all[0]
subplot(2, 2, 1)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 200/(20-(10*exp(-7*tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/4
dt=dt_all[1]
subplot(2, 2, 2)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 200/(20-(10*exp(-7*tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/8
dt=dt_all[2]
subplot(2, 2, 3)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 200/(20-(10*exp(-7*tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
##subplot for dt=1/16
dt=dt_all[3]
subplot(2, 2, 4)
t = arange(0,tend,dt)
tip = arange(0,tend,dt_all[-1])
p = 200/(20-(10*exp(-7*tip)))
title("function p(t) v/s t for dt="+str(dt))
xlabel("t")
ylabel("p(t)")
handle1,=plot(tip,p,'b',label='Analytic Soln')
handle2,=plot(t,p_exp[dt],'r',label=labels)
legend(handles=[handle1,handle2])
plt.show()
return
|
[
"abhijeet.parida@tum.de"
] |
abhijeet.parida@tum.de
|
c4208d3020bec73223ae95b201c61962e6608d9c
|
94e74c49ce7db9f24dc343d09036036312547ca4
|
/tests/test_mtcaptcha.py
|
7e3bc9c357e85def0fd8258f4b55dafad585d77d
|
[
"Apache-2.0"
] |
permissive
|
rwigo/mtcaptcha-python
|
cf538bb4c2dbfeeeb0d015c767924193864fd099
|
c14841d2f4066d5241283f110514ea8ff400521c
|
refs/heads/master
| 2021-04-16T19:09:02.994482
| 2020-03-30T06:47:20
| 2020-03-30T06:47:20
| 249,378,376
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,215
|
py
|
from typing import List, Dict
from mtcaptcha import MTCaptchaTokenDecoderAndChecker
def run_one(privatekey: str, token: str, tokenJson: str,
expectedDomains: List[str], expectedAction: str,
isProductionEnv: bool):
'''
Run a single test case
'''
decoder = MTCaptchaTokenDecoderAndChecker(tokenMaxAgeSeconds=300000000000)
# -----------DECODE THE TOKEN-------------- #
di = decoder.decodeMTToken(privatekey, token)
print("DecodeError: \t" + str(di.decodeErrorMsg))
print("MatchesExpectedJson: \t" + str(tokenJson == di.tokenInfoJson))
print("TokenInfoJson: \t" + str(di.tokenInfoJson))
print("TokenInfoPojo: \t" + str(di.tokenInfoPojo.__dict__))
print()
# ------------CHECK THE TOKEN------------- #
decoder.checkMTTokenSuccess(di, expectedDomains, expectedAction,
isProductionEnv)
first = di.checkSuccess
print("CheckFailMsg:\t" + str(di.checkFailMsg))
print("CheckSuccess:\t" + str(di.checkSuccess))
decoder.checkMTTokenSuccess(di, expectedDomains, expectedAction,
isProductionEnv)
second = di.checkSuccess
print("CheckFailMsg:\t" + str(di.checkFailMsg))
print("CheckSuccess:\t" + str(di.checkSuccess))
print()
print()
return (first, second)
def test_all():
token1 = "v1(4a73c0ca,8793eb1b,MTPublic-hal9000uJ,adc8dad64a0dbc89c8adbfb315135a9e,eR9SmMaGRafgcFQsIKXvxW8r4nymbmBnlynA4jwsgOt_XO_IaxFa55c1O-qsQJQiNwPilInS4UBN_skpTQa_JyR1-aPWO_PxjlBUJr3djAk5vxQ9cITkL1rf-gRPr-ho8cEfK5AiAc_GJAyeI65UblJ4AZFg7en5dOsSpTHVEA6ISj-q1Ye5fqUf9e0nHQXu01XyIn4xY6QHhqNVSfVKCG3l8MLDuf8EOCyPsmPx8zmxe-5Dd6UJ8F43sWe_PZeDFrxuab5QzUeVDlbXbiWAcQetWAbtaqbrd-3PyydnnlqftfWPfs9ihC6qI6evMmVz5ZCiAnNvO0QX_NuCJYpYDQ**)"
token1Json = "{\"v\":\"1.0\",\"code\":201,\"codeDesc\":\"valid:captcha-solved\",\"tokID\":\"adc8dad64a0dbc89c8adbfb315135a9e\",\"timestampSec\":981173106,\"timestampISO\":\"2001-02-03T04:05:06Z\",\"hostname\":\"some.example.com\",\"isDevHost\":false,\"action\":\"\",\"ip\":\"10.10.10.10\"}"
token2 = "v1(0e798202,5d5f720c,MTPublic-hal9000uJ,ed0a316d94101c86886f5408cb0efa91,6i9SkZMiBmDRUfSi2YgZKsFn8_oVAFwqDG9eGW8gfed9-zz_2STbkWIynDodBfMzURDYCaORsbB2X0rU7CqNv8SBKbKv1jnatsJvhtbkwfj75lJxEFf1W_YtZTV1AL_MMl8lyPc5UcTEIWiApANWlnN83KkeC6MONXH_TzGwbjTuKbyW2Sf4HgVH3qiP60snBuKhI9DgXdvYB23mBUduzs1COlpQk4jZa8Tb-WfKEpHzA0VDM7XvQw4HQmtlt7V49JAk7F0qHO-VHFRVH3dLOqLqPPkGCHNAZJbGf79wEUrzL095-OhFfVMa5lVv1gt9vTQmsLUsQZSQfvyW4pnesw**)"
token2Json = "{\"v\":\"1.0\",\"code\":211,\"codeDesc\":\"valid:ip-whitelisted\",\"tokID\":\"ed0a316d94101c86886f5408cb0efa91\",\"timestampSec\":981173106,\"timestampISO\":\"2001-02-03T04:05:06Z\",\"hostname\":\"more.example.com\",\"isDevHost\":true,\"action\":\"login\",\"ip\":\"10.10.10.10\"}"
privatekey = "MTPrivat-hal9000uJ-WsPXwe3BatWpGZaEbja2mcO5r7h1h1PkFW2fRoyGRrp4ZH6yfq"
sitekey = "MTPublic-hal9000uJ"
expectedDomains = {
"another.example.com", "some.example.com", "more.example.com"
}
expectedAction = ""
isProductionEnv = False
assert (True, False) == run_one(privatekey, token1, token1Json,
expectedDomains, '', True)
assert (False, False) == run_one(privatekey, token1, token1Json,
expectedDomains, '', False)
assert (False, False) == run_one(privatekey, token1, token1Json,
expectedDomains, 'login', True)
assert (False, False) == run_one(privatekey, token1, token1Json,
expectedDomains, 'login', False)
assert (False, False) == run_one(privatekey, token2, token2Json,
expectedDomains, '', True)
assert (False, False) == run_one(privatekey, token2, token2Json,
expectedDomains, '', False)
assert (False, False) == run_one(privatekey, token2, token2Json,
expectedDomains, 'login', True)
assert (True, False) == run_one(privatekey, token2, token2Json,
expectedDomains, 'login', False)
assert True
|
[
"nicolas@rwigo.com"
] |
nicolas@rwigo.com
|
b273b99806127ec1498dff9872db2ac470d542b9
|
b74320ad439e37dfa48cd8db38dab3b7a20a36ff
|
/tests/pipelines/stable_diffusion_2/test_stable_diffusion_inpaint.py
|
68a4b5132375ade162505b9c3ad94abe7b80bfe2
|
[
"Apache-2.0"
] |
permissive
|
huggingface/diffusers
|
c82beba1ec5f0aba01b6744040a5accc41ec2493
|
5eeedd9e3336882d598091e191559f67433b6427
|
refs/heads/main
| 2023-08-29T01:22:52.237910
| 2023-08-28T18:16:27
| 2023-08-28T18:16:27
| 498,011,141
| 17,308
| 3,158
|
Apache-2.0
| 2023-09-14T20:57:44
| 2022-05-30T16:04:02
|
Python
|
UTF-8
|
Python
| false
| false
| 10,042
|
py
|
# coding=utf-8
# Copyright 2023 HuggingFace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gc
import random
import unittest
import numpy as np
import torch
from PIL import Image
from transformers import CLIPTextConfig, CLIPTextModel, CLIPTokenizer
from diffusers import AutoencoderKL, PNDMScheduler, StableDiffusionInpaintPipeline, UNet2DConditionModel
from diffusers.utils import floats_tensor, load_image, load_numpy, torch_device
from diffusers.utils.testing_utils import enable_full_determinism, require_torch_gpu, slow
from ..pipeline_params import TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS, TEXT_GUIDED_IMAGE_INPAINTING_PARAMS
from ..test_pipelines_common import PipelineKarrasSchedulerTesterMixin, PipelineLatentTesterMixin, PipelineTesterMixin
enable_full_determinism()
class StableDiffusion2InpaintPipelineFastTests(
PipelineLatentTesterMixin, PipelineKarrasSchedulerTesterMixin, PipelineTesterMixin, unittest.TestCase
):
pipeline_class = StableDiffusionInpaintPipeline
params = TEXT_GUIDED_IMAGE_INPAINTING_PARAMS
batch_params = TEXT_GUIDED_IMAGE_INPAINTING_BATCH_PARAMS
image_params = frozenset(
[]
) # TO-DO: update image_params once pipeline is refactored with VaeImageProcessor.preprocess
image_latents_params = frozenset([])
def get_dummy_components(self):
torch.manual_seed(0)
unet = UNet2DConditionModel(
block_out_channels=(32, 64),
layers_per_block=2,
sample_size=32,
in_channels=9,
out_channels=4,
down_block_types=("DownBlock2D", "CrossAttnDownBlock2D"),
up_block_types=("CrossAttnUpBlock2D", "UpBlock2D"),
cross_attention_dim=32,
# SD2-specific config below
attention_head_dim=(2, 4),
use_linear_projection=True,
)
scheduler = PNDMScheduler(skip_prk_steps=True)
torch.manual_seed(0)
vae = AutoencoderKL(
block_out_channels=[32, 64],
in_channels=3,
out_channels=3,
down_block_types=["DownEncoderBlock2D", "DownEncoderBlock2D"],
up_block_types=["UpDecoderBlock2D", "UpDecoderBlock2D"],
latent_channels=4,
sample_size=128,
)
torch.manual_seed(0)
text_encoder_config = CLIPTextConfig(
bos_token_id=0,
eos_token_id=2,
hidden_size=32,
intermediate_size=37,
layer_norm_eps=1e-05,
num_attention_heads=4,
num_hidden_layers=5,
pad_token_id=1,
vocab_size=1000,
# SD2-specific config below
hidden_act="gelu",
projection_dim=512,
)
text_encoder = CLIPTextModel(text_encoder_config)
tokenizer = CLIPTokenizer.from_pretrained("hf-internal-testing/tiny-random-clip")
components = {
"unet": unet,
"scheduler": scheduler,
"vae": vae,
"text_encoder": text_encoder,
"tokenizer": tokenizer,
"safety_checker": None,
"feature_extractor": None,
}
return components
def get_dummy_inputs(self, device, seed=0):
# TODO: use tensor inputs instead of PIL, this is here just to leave the old expected_slices untouched
image = floats_tensor((1, 3, 32, 32), rng=random.Random(seed)).to(device)
image = image.cpu().permute(0, 2, 3, 1)[0]
init_image = Image.fromarray(np.uint8(image)).convert("RGB").resize((64, 64))
mask_image = Image.fromarray(np.uint8(image + 4)).convert("RGB").resize((64, 64))
if str(device).startswith("mps"):
generator = torch.manual_seed(seed)
else:
generator = torch.Generator(device=device).manual_seed(seed)
inputs = {
"prompt": "A painting of a squirrel eating a burger",
"image": init_image,
"mask_image": mask_image,
"generator": generator,
"num_inference_steps": 2,
"guidance_scale": 6.0,
"output_type": "numpy",
}
return inputs
def test_stable_diffusion_inpaint(self):
device = "cpu" # ensure determinism for the device-dependent torch.Generator
components = self.get_dummy_components()
sd_pipe = StableDiffusionInpaintPipeline(**components)
sd_pipe = sd_pipe.to(device)
sd_pipe.set_progress_bar_config(disable=None)
inputs = self.get_dummy_inputs(device)
image = sd_pipe(**inputs).images
image_slice = image[0, -3:, -3:, -1]
assert image.shape == (1, 64, 64, 3)
expected_slice = np.array([0.4727, 0.5735, 0.3941, 0.5446, 0.5926, 0.4394, 0.5062, 0.4654, 0.4476])
assert np.abs(image_slice.flatten() - expected_slice).max() < 1e-2
def test_inference_batch_single_identical(self):
super().test_inference_batch_single_identical(expected_max_diff=3e-3)
@slow
@require_torch_gpu
class StableDiffusionInpaintPipelineIntegrationTests(unittest.TestCase):
def tearDown(self):
# clean up the VRAM after each test
super().tearDown()
gc.collect()
torch.cuda.empty_cache()
def test_stable_diffusion_inpaint_pipeline(self):
init_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main"
"/sd2-inpaint/init_image.png"
)
mask_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-inpaint/mask.png"
)
expected_image = load_numpy(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-inpaint"
"/yellow_cat_sitting_on_a_park_bench.npy"
)
model_id = "stabilityai/stable-diffusion-2-inpainting"
pipe = StableDiffusionInpaintPipeline.from_pretrained(model_id, safety_checker=None)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
pipe.enable_attention_slicing()
prompt = "Face of a yellow cat, high resolution, sitting on a park bench"
generator = torch.manual_seed(0)
output = pipe(
prompt=prompt,
image=init_image,
mask_image=mask_image,
generator=generator,
output_type="np",
)
image = output.images[0]
assert image.shape == (512, 512, 3)
assert np.abs(expected_image - image).max() < 9e-3
def test_stable_diffusion_inpaint_pipeline_fp16(self):
init_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main"
"/sd2-inpaint/init_image.png"
)
mask_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-inpaint/mask.png"
)
expected_image = load_numpy(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-inpaint"
"/yellow_cat_sitting_on_a_park_bench_fp16.npy"
)
model_id = "stabilityai/stable-diffusion-2-inpainting"
pipe = StableDiffusionInpaintPipeline.from_pretrained(
model_id,
torch_dtype=torch.float16,
safety_checker=None,
)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
pipe.enable_attention_slicing()
prompt = "Face of a yellow cat, high resolution, sitting on a park bench"
generator = torch.manual_seed(0)
output = pipe(
prompt=prompt,
image=init_image,
mask_image=mask_image,
generator=generator,
output_type="np",
)
image = output.images[0]
assert image.shape == (512, 512, 3)
assert np.abs(expected_image - image).max() < 5e-1
def test_stable_diffusion_pipeline_with_sequential_cpu_offloading(self):
torch.cuda.empty_cache()
torch.cuda.reset_max_memory_allocated()
torch.cuda.reset_peak_memory_stats()
init_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main"
"/sd2-inpaint/init_image.png"
)
mask_image = load_image(
"https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main/sd2-inpaint/mask.png"
)
model_id = "stabilityai/stable-diffusion-2-inpainting"
pndm = PNDMScheduler.from_pretrained(model_id, subfolder="scheduler")
pipe = StableDiffusionInpaintPipeline.from_pretrained(
model_id,
safety_checker=None,
scheduler=pndm,
torch_dtype=torch.float16,
)
pipe.to(torch_device)
pipe.set_progress_bar_config(disable=None)
pipe.enable_attention_slicing(1)
pipe.enable_sequential_cpu_offload()
prompt = "Face of a yellow cat, high resolution, sitting on a park bench"
generator = torch.manual_seed(0)
_ = pipe(
prompt=prompt,
image=init_image,
mask_image=mask_image,
generator=generator,
num_inference_steps=2,
output_type="np",
)
mem_bytes = torch.cuda.max_memory_allocated()
# make sure that less than 2.65 GB is allocated
assert mem_bytes < 2.65 * 10**9
|
[
"noreply@github.com"
] |
huggingface.noreply@github.com
|
cb8573de44df8d3f3b0ce42fca5ed11939bcd3a5
|
029aea05dad17d1e6485aa5d3439ff16584a40e3
|
/Ejer3/areas/models.py
|
3abf1aab6bfc5f82a8356d2fcd53df979aca3b54
|
[] |
no_license
|
glenda123/ejercicio3_django
|
b82bbd64300c9243edc646bdd4034069c4b64ec3
|
c1e84bbbdb81f5e9ca000f88c6d27fd4364371a1
|
refs/heads/main
| 2023-01-12T22:40:37.938247
| 2020-11-20T04:21:59
| 2020-11-20T04:21:59
| 314,444,669
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 419
|
py
|
from django.db import models
# from datetime import date
# Create your models here.
class Area(models.Model):
nombre=models.CharField(max_length=150)
direccion=models.CharField(max_length=300)
telefono=models.CharField(max_length=20)
created=models.DateTimeField(auto_created=True, null=True)
updated=models.DateTimeField( auto_now=True, null=True)
def __str__(self):
return self.nombre
|
[
"glendaguerrerof@gmail.com"
] |
glendaguerrerof@gmail.com
|
b7cc102132b5ff1057994e7947abc77f3e863f4a
|
1b9d3d991a7b720e0686682f37b9d695cc9f7b35
|
/utilities/milvus_utilities.py
|
91ffab86849d3985dad39f89f54559f2d1f0d88c
|
[] |
no_license
|
pedrojrv/milvus_image_retrieval
|
505870a1bd59337e9172c0fe9124c9fe905ac04b
|
86437471aa70df7450b79bb6ce7be71f1afbfe1f
|
refs/heads/main
| 2023-03-20T10:09:23.699448
| 2021-03-13T05:24:14
| 2021-03-13T05:24:14
| 346,561,666
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,805
|
py
|
import numpy as np
import os
from shutil import copyfile
from milvus import Milvus, IndexType, MetricType, Status
def create_collection(client, collection_name, embedding_dim, reset=False):
"""Creates a milvus collection.
Args:
client (object): milvus client.
collection_name (str): given name for the collection to create.
embedding_dim (int): dimensionality of the vectors to be hosted in the collection.
reset (bool, optional): If True, the collection will be removed and re-created if it already exists. Defaults to False.
Returns:
None
"""
status, ok = client.has_collection(collection_name)
param = {
'collection_name': collection_name,
'dimension': embedding_dim,
'metric_type': MetricType.L2 # optional
}
if ok:
print("Collection already exists!")
if reset:
print("Resetting collection...")
status = client.drop_collection(collection_name)
client.create_collection(param)
print("Succesfully created collection!")
else:
client.create_collection(param)
print("Succesfully created collection!")
return None
def insert_embeddings(client, collection_name, embedding_vectors, buffer_size=256):
"""Given a milvus client, the embedding_vectors will be inserted into the given collection.
Args:
client (object): milvus client.
collection_name (str): name of the collection.
embedding_vectors (np.array): numpy array of vectors to insert into the collection.
buffer_size (int, optional): buffer size specified in the server_config.yaml file. Defaults to 256.
Returns:
list: milvus ids of all inserted vectors.
"""
embedding_size_mb = embedding_vectors.nbytes * 1e-6
if embedding_size_mb > buffer_size:
chunks = np.ceil(embedding_size_mb/buffer_size)
print("Warning: Embeddings size are above the buffer size. Will insert recursively.")
array_chunks = np.array_split(embedding_vectors, chunks)
all_ids = []
for i in array_chunks:
status, ids = client.insert(collection_name=collection_name, records=i)
if not status.OK():
print("Insert failed: {}".format(status))
raise
else:
print("Insertion succesfull.")
all_ids.extend(ids)
else:
status, all_ids = client.insert(collection_name=collection_name, records=embedding_vectors)
if not status.OK():
print("Insert failed: {}".format(status))
raise
else:
print("Insertion succesfull.")
return all_ids
def download_nearest_files(results, inventory, path):
"""Downloads the nearest neighbor files for a given result.
The inventory argument must be a pandas DataFrame and must at least contain two features named
image_path and milvus_id. The former representing the path to the images in the filesystem and
the latter representing the assigned milvus ids.
Args:
results (milvus.client.abstract.TopKQueryResult): resulting object from milvus query.
inventory (pd.DataFrame): Dataframe containing the image inventory. Read above for more information.
path (str): Path-like string indicating directory where the files will be saved to.
Returns:
None
"""
if not os.path.isdir(path):
os.makedirs(path)
for i in results.id_array[0]:
resulting_df = inventory[inventory.milvus_ids == i]
image_path = resulting_df.image_path.values[0]
image_name = os.path.basename(image_path)
new_path = os.path.join(path, image_name)
copyfile(image_path, new_path)
return None
|
[
"pedro.vicentevz@berkeley.edu"
] |
pedro.vicentevz@berkeley.edu
|
2bd2f6b56eb8ae623b368dd4be8aa018ec7464ff
|
a6b916b1e28e0760f7acab7aefc59d861cb77652
|
/app/core/migrations/0006_recipe_image.py
|
1ec0fbe7fed273bc3d65345fac4a10f6c07ddfab
|
[
"MIT"
] |
permissive
|
muteshi/Recipe-app-api
|
e9212e0681125302fd6d3cc7c5b2312d7e72275e
|
68a660cbde211c04dc0cd1e4181a45dbd60ea854
|
refs/heads/main
| 2023-01-22T13:42:26.502420
| 2020-12-07T17:01:50
| 2020-12-07T17:01:50
| 317,596,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 430
|
py
|
# Generated by Django 3.1.4 on 2020-12-07 12:22
import core.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0005_recipe'),
]
operations = [
migrations.AddField(
model_name='recipe',
name='image',
field=models.ImageField(null=True, upload_to=core.models.recipe_image_file_path),
),
]
|
[
"lumteshi@gmail.com"
] |
lumteshi@gmail.com
|
c11c7c26ed8d9043c872a44a803ac999ca7ef233
|
2ac060a05264e88c89f82a256b89bbdd7f28d196
|
/news/migrations/0007_article_article_image.py
|
642764893b294dd17cb0d26308bfed304f62780f
|
[] |
no_license
|
mazimpakar/News
|
b0fea60b6c323d75528c283e7c3b1ebb9d25ca51
|
ffc5c9c96a1295f4937b42463bc36bdd8d103034
|
refs/heads/master
| 2020-05-02T20:09:15.843686
| 2019-03-28T11:06:27
| 2019-03-28T11:06:27
| 178,182,001
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 474
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-03-13 09:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0006_editor_phone_number'),
]
operations = [
migrations.AddField(
model_name='article',
name='article_image',
field=models.ImageField(blank=True, upload_to='articles/'),
),
]
|
[
"rosemazimpaka2@gmail.com"
] |
rosemazimpaka2@gmail.com
|
bb4229113ac14af148ced82c737a946e08c29415
|
f1db92d41eb71007f30e7dded93867ef6b5c0b46
|
/homework/Day027HW.py
|
d525b48c70da5e474d01c71ea6ece6703dad7fe0
|
[] |
no_license
|
hsiyu1121/1st-PyCrawlerMarathon
|
dae1ff155f95e85e0fd9752e9f6d900770700df9
|
918b6b376e6e94011b6cba70f9840063af3692a1
|
refs/heads/master
| 2020-09-13T02:29:33.711034
| 2020-02-18T07:33:29
| 2020-02-18T07:33:29
| 222,633,592
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,890
|
py
|
# -*- coding: utf-8 -*-
import scrapy
from multiprocessing import Process
from bs4 import BeautifulSoup
import re
class Day027hwSpider(scrapy.Spider):
name = 'Day027HW'
allowed_domains = ['www.ptt.cc']
start_urls = ['https://www.ptt.cc/bbs/Gossiping/M.1557928779.A.0C1.html']
cookies = {'over18':'1'}
def start_requests(self):
for url in self.start_urls:
yield scrapy.Request(url=url, callback=self.parse, cookies=self.cookies)
def parse(self, response):
if response.status != 200:
print('Error - {} is not available to access'.format(response.url))
return
soup = BeautifulSoup(response.text)
main_content=soup.find(id='main-content')
metas=main_content.select('div.article-metaline')
author=''
title=''
date=''
if metas:
if metas[0].select('span.article-meta-value')[0]:
author=metas[0].select('span-article-meta-value')[0].string
if metas[1].select('span.article-meta-value')[0]:
title=metas[1].select('span-article-meta-value')[0].string
if metas[2].select('span.article-meta-value')[0]:
date=metas[2].select('span-article-meta-value')[0].string
# 取得留言區主體
pushes = main_content.find_all('div', class_='push')
for p in pushes:
p.extract()
# 假如文章中有包含「※ 發信站: 批踢踢實業坊(ptt.cc), 來自: xxx.xxx.xxx.xxx」的樣式
# 透過 regular expression 取得 IP
# 因為字串中包含特殊符號跟中文, 這邊建議使用 unicode 的型式 u'...'
try:
ip = main_content.find(text=re.compile(u'※ 發信站:'))
ip = re.search('[0-9]*\.[0-9]*\.[0-9]*\.[0-9]*', ip).group()
except Exception as e:
ip = ''
# 移除文章主體中 '※ 發信站:', '◆ From:', 空行及多餘空白 (※ = u'\u203b', ◆ = u'\u25c6')
# 保留英數字, 中文及中文標點, 網址, 部分特殊符號
#
# 透過 .stripped_strings 的方式可以快速移除多餘空白並取出文字, 可參考官方文件
# - https://www.crummy.com/software/BeautifulSoup/bs4/doc/#strings-and-stripped-strings
filtered = []
for v in main_content.stripped_strings:
# 假如字串開頭不是特殊符號或是以 '--' 開頭的, 我們都保留其文字
if v[0] not in [u'※', u'◆'] and v[:2] not in [u'--']:
filtered.append(v)
# 定義一些特殊符號與全形符號的過濾器
expr = re.compile(u'[^一-龥。;,:“”()、?《》\s\w:/-_.?~%()]')
for i in range(len(filtered)):
filtered[i] = re.sub(expr, '', filtered[i])
# 移除空白字串, 組合過濾後的文字即為文章本文 (content)
filtered = [i for i in filtered if i]
content = ' '.join(filtered)
# 處理留言區
# p 計算推文數量
# b 計算噓文數量
# n 計算箭頭數量
p, b, n = 0, 0, 0
messages = []
for push in pushes:
# 假如留言段落沒有 push-tag 就跳過
if not push.find('span', 'push-tag'):
continue
# 整理文章資訊
data = Day027Item()
article_id = str(Path(urlparse(response.url).path).stem)
data['url'] = response.url
data['article_id'] = article_id
data['article_author'] = author
data['article_title'] = title
data['article_date'] = date
data['article_content'] = content
data['ip'] = ip
data['messages'] = messages
yield data
|
[
"noreply@github.com"
] |
hsiyu1121.noreply@github.com
|
4baa80a2e7a6a8a78beafe4cf781490f0ba2ab24
|
7cfa822ac8985c2b6289294070a0effba24ccc9b
|
/Random Numbers From Certain Integers.py
|
4c566535555f45ae552783624dedc1c24dc0e432
|
[] |
no_license
|
fkusz/pythonfiles
|
e329b7b37e91b46bcb7b3dc41964f3e760f0cc63
|
1c35dc776eb031bd3199c7c7fc33c6d50b16422e
|
refs/heads/main
| 2023-05-01T19:05:57.860980
| 2021-05-23T07:04:27
| 2021-05-23T07:04:27
| 367,215,708
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 337
|
py
|
import random
def randPM(n,d):
Choices = [2,3,4,6,7,8,9]
Integers = []
s=''
h = 1
while h <= n:
Con = []
for x in range(d):
rand = random.randint(0,6)
Con.append(str(Choices[rand]))
h += 1
Int = int(s.join(Con))
Integers.append(Int)
|
[
"noreply@github.com"
] |
fkusz.noreply@github.com
|
a6c0f9635b4b787f16e2206ce98312a1409d1c27
|
ea83a2f4857fe820ee9e16b1ee07e09458c440ac
|
/backend/apps/apartments_analyzer/signals.py
|
e82c70eddc5d3e8ed175a442727e52e99a3f3f9b
|
[] |
no_license
|
MrLokans/portfoliosite
|
f2d817f657f60eb33b6eaa7af4684be74f349e69
|
18b9610e8afe3400a99eefb2dc4d7f200dc93f60
|
refs/heads/develop
| 2022-11-29T00:23:17.432448
| 2020-05-03T06:32:12
| 2020-05-03T06:32:12
| 43,129,461
| 2
| 2
| null | 2022-11-22T04:58:53
| 2015-09-25T09:50:47
|
Python
|
UTF-8
|
Python
| false
| false
| 303
|
py
|
from django.db.models.signals import pre_save
from django.dispatch import receiver
from apps.apartments_analyzer.models import UserSearch
@receiver(pre_save, sender=UserSearch)
def update_search_version(sender, instance: UserSearch, **kwargs):
if instance.pk:
instance.increase_version()
|
[
"mrlokans@gmail.com"
] |
mrlokans@gmail.com
|
e22889a5438effe365c9037574d58d78ab2fceef
|
9dd600803d53620fe5ade41206b2dc1e27c9af40
|
/tree_trimmer/condor/doCondor_mu.py
|
a43175e913a2bb972abc93670052509d40430d10
|
[] |
no_license
|
sschier/higgsino-fortnight
|
3150a536f914ce1ab01cd6032e31b5380962fc6d
|
4f20e44aa03a86206194c5bdf3f86378d8a5231c
|
refs/heads/master
| 2020-12-30T15:22:40.467513
| 2018-12-21T23:17:40
| 2018-12-21T23:17:40
| 91,135,365
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,960
|
py
|
#!/usr/bin/env python
import condor
import commands, glob
import time, datetime
exe = '../tree_trimmer.py'
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y%m%d%H%M')
base_dir = '/export/share/data/sschier/FakeLepton/June22_2017_Stop1L_Dijet_Ntuple_AB_2.4.32_WithMoreIsolationVars/'
arg_template = '-s skim %s /export/home/sschier/workarea/tree_trimmer/skims/skim-ff-mu.py -t wjet_Nom -b /export/home/sschier/workarea/tree_trimmer/boff/bon-mu.txt -o skim.root'
input_files = glob.glob(base_dir+'wjets/*')
condor.run(exe, arg_template, input_files, dirname='wjets_mu_skims', nfiles=1)
arg_template = '-s skim %s /export/home/sschier/workarea/tree_trimmer/skims/skim-ff-mu.py -t zjet_Nom -b /export/home/sschier/workarea/tree_trimmer/boff/bon-mu.txt -o skim.root'
input_files = glob.glob(base_dir+'zjets/*')
condor.run(exe, arg_template, input_files, dirname='zjets_mu_skims', nfiles=1)
arg_template = '-s skim %s /export/home/sschier/workarea/tree_trimmer/skims/skim-ff-mu.py -t top_Nom -b /export/home/sschier/workarea/tree_trimmer/boff/bon-mu.txt -o skim.root'
input_files = glob.glob(base_dir+'singletop/*')
condor.run(exe, arg_template, input_files, dirname='singletop_mu_skims', nfiles=1)
#arg_template = '-s skim %s /export/home/sschier/workarea/tree_trimmer/skims/skim-ff-mu.py -t top_Nom -b /export/home/sschier/workarea/tree_trimmer/boff/bon-mu.txt -o skim.root'
input_files = glob.glob(base_dir+'ttbar/*')
condor.run(exe, arg_template, input_files, dirname='ttbar_mu_skims', nfiles=1)
arg_template = '-s skim %s /export/home/sschier/workarea/tree_trimmer/skims/skim-ff-mu.py -t data -b /export/home/sschier/workarea/tree_trimmer/boff/bon-mu.txt -o skim.root'
input_files = glob.glob(base_dir+'data16_13TeV/*')
condor.run(exe, arg_template, input_files, dirname='data16_mu_skims', nfiles=1)
input_files = glob.glob(base_dir+'data15_13TeV/*')
condor.run(exe, arg_template, input_files, dirname='data15_mu_skims', nfiles=1)
|
[
"sheena.schier@cern.ch"
] |
sheena.schier@cern.ch
|
26f06f77d5dc8d99f089d0da835b100b193339bc
|
64ddf2241687580c640711883550e564aadaec7e
|
/src/conjecture.py
|
79b4dfe035abe0609a8836350b22ac651301f91d
|
[] |
no_license
|
zedinc/multiplicative-persistence
|
7a69d1fb5ef4f614b76b210c361975a23a4e202f
|
73f1768f36852fe9d39f7b12aba3827997eef6d6
|
refs/heads/main
| 2023-02-02T20:24:54.328824
| 2020-12-20T03:39:58
| 2020-12-20T03:39:58
| 321,103,745
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,117
|
py
|
from functools import reduce
from itertools import product
from math import factorial as f
from math import log
# from numba import jit, vectorize, cuda
from time import time
import numpy as np
from colors import green
from sympy.utilities.iterables import multiset_permutations
pm = lambda n : reduce( int.__mul__ , map( int , list( str(n) ) ) )
sift = lambda seed: list( i for i in candidates(seed) if lp7(i) )
N237 = lambda TWO, THREE, SEVEN: 2 ** TWO * 3 ** THREE * 7 ** SEVEN
N2357 = lambda TWO, THREE, FIVE, SEVEN: 2**TWO * 3**THREE * 5**FIVE * 7**SEVEN
def streak(n):
"""Returns the list of numbers corresponding to the multiplicative persistence of the input number `n`.
Doesn't add numbers below 10 to the list."""
if n > 9:
return [ n ] + streak( pm(n) )
else:
return []
def fast_streak(n):
"Faster version of `streak` as it terminates the moment a zero is found in the latest result"
if '0' not in str(n) and n > 9:
return [ n ] + streak( pm(n) )
else:
return []
def search( TWO, THREE, SEVEN, action=None ):
size = TWO * THREE * SEVEN
count = 0
for seven, three, two in product( range(SEVEN), range(THREE), range(TWO) ):
count += 1
print( '\r({}, {}, {}) - {:.2%}'.format( two, three, seven, count/size ), end = '' )
n = 2**two * 3**three * 7**seven
# if '0' not in str(n) :
if '0' not in str(n) and len( fast_streak(n) ) > 7:
print( '\r({}, {}, {}) = {}'.format( two, three, seven , n ) )
action(TWO,THREE,SEVEN)
def explorer(seed):
queue = [ str(seed) ]
while True:
start = queue.pop(0)
for new_digit in [ 7, 8, 9 ]:
last_digit = str(start)[-1]
if int(last_digit) <= int(new_digit):
new_number = str(start) + str(new_digit)
queue.append(new_number)
yield new_number
def work(seed):
for n in explorer(seed):
length = len( streak( int(n) ) )
if length > 7:
print( '[{}] {} : {}'.format( seed, n, length ) )
def lp7ff(n):
"""True/false if the largest prime is 7. Requires fewer loops than lp7"""
i = 2
while True:
if n % i:
# print('In if : {i}, {n}'.format(i=i,n=n))
if i == 2: i = 3
elif i == 3: i = 5
elif i == 5: i = 7
elif i == 7: return False
else:
# print('In else: {i}, {n}'.format(i=i,n=n))
n //= i
if n == 1:
# print('In n==1: {i}, {n}'.format(i=i,n=n))
return True
def lp237(n):
"""True/false if the largest prime is at most 7. Requires fewer loops than lp7, bit-shifts to divide by 2 as much as possible"""
# Get rid of all divide by 2's in one bit-shift line
n >>= bin(( n ^ (n-1) ) >> 1).count('1')
if n < 11: return True
i = 3 # 'Skip' testing 3 if it isn't divisible
while n >= 11:
if n % i:
if i == 3: i = 7
elif i == 7: return False
else:
n //= i
return True
def lp27(n):
"""True/false if the largest prime is 7. Requires fewer loops than lp7, bit-shifts to divide by 2 as much as possible. Asserts that number input is not divisible by 3"""
# assert n % 3, 'Number should not be divisible by 3'
# Get rid of all divide by 2's in one bit-shift line
n >>= bin(( n ^ (n-1) ) >> 1).count('1')
if n < 11: return True
while n >= 11:
if n % 7:
return False
else:
n //= 7
return True
def lp7f(n):
"""True/false if the largest prime is 7. Requires fewer loops than lp7"""
primes = [2,3,5,7,None]
i = primes.pop(0)
while len(primes):
if n % i:
# print('In if : {i}, {n}'.format(i=i,n=n))
i = primes.pop(0)
else:
# print('In else: {i}, {n}'.format(i=i,n=n))
n //= i
if n == 1:
# print('In n==1: {i}, {n}'.format(i=i,n=n))
return True
return False
def lp7(n):
"""True/false if the largest prime is 7"""
i = 2
while i <= 7 and n != 1:
if n % i:
i += 1
else:
n //= i
return n == 1
def candidates( seed, ones=0 ):
"""Returns all permutations of the digits of `seed`, in addition to any `1` digits that can be specified via optional `ones` argument"""
for n in multiset_permutations( '1' * ones + str(seed) ):
yield int( ''.join(n) )
def onesies( TWO, THREE, SEVEN, start=0, limit=10 ):
ones = start
while ones <= limit:
for seed in building_sets( TWO, THREE, SEVEN ):
# what is in the loop represents one computational unit
ext = '1' * ones + seed
if len(ext) <= 80:
print( 'Exploring {}'.format( ext ) )
else:
print( 'Exploring {}'.format( ext[:77] + '...' ) )
count = 0
cmplx = complexity(ext)
divisible_by_three = not ( sum([int(i) for i in ext]) % 3 )
prospective = lp237 if divisible_by_three else lp27
for seq in candidates(seed,ones):
count += 1
print( '\r {} - {:.2%}'.format( str(seq), count/cmplx ), end = '' )
# length = len(str(seq))
# if length <= 80:
# print( '\r {} - {:.2%}'.format( str(seq), count/cmplx ), end = '' )
# else:
# print( '\r {} - {:.2%}'.format( str(seq)[:77] + '...', count/cmplx ), end = '' )
if prospective( int(seq) ):
print( green( '\r {} '.format(seq) ) )
print()
ones += 1
def unit(seed):
print(seed)
for i in candidates(seed):
print( '\r' + i, end = '' )
if lp7(i): print( '\r' + i )
def building_sets(TWO,THREE,SEVEN):
"""Constructs all digit sets for 2**`TWO` * 3**`THREE` * 7**`SEVEN` (without including `1`s)"""
n = 2 ** TWO * 3 ** THREE * 7 ** SEVEN
assert lp7f(n), 'Largest prime factor must be 7'
options2 = []
for two, four, eight in product( range( 1+int(TWO/1) ), range( 1+int(TWO/2) ), range( 1+int(TWO/3) ) ):
# print( 'Two : ' + str(two), str(four), str(eight), sep = ', ' )
if two * 1 + four * 2 + eight * 3 == TWO:
options2.append( '2' * two + '4' * four + '8' * eight )
options3 = []
for three, nine in product( range( 1+int(THREE/1) ), range( 1+int(THREE/2) ) ):
if three * 1 + nine * 2 == THREE:
options3.append( '3' * three + '9' * nine )
options7 = [ '7' * SEVEN ]
# print( '# {}, {}, {} #'.format( options2, options3, options7 ) )
# print(options3)
# print(options7)
# return ( two + three + seven for two, three, seven in product( options2, options3, options7 ) )
for two, three, seven in product( options2, options3, options7 ):
seq = two + three + seven
yield seq
two_count = two.count('2')
three_count = three.count('3')
if two_count and three_count:
for SIX in range( 1, 1 + min( two_count, three_count ) ):
two_mod = '2' * ( two_count - SIX ) + two.strip('2')
three_mod = '3' * ( three_count - SIX ) + three.strip('3')
alt_seq = two_mod + three_mod + '6' * SIX + '7' * SEVEN
# print( 'alt_seq = {}'.format(alt_seq) )
if alt_seq != seq:
yield alt_seq
def complexity(seq):
uniq = [ int(i) for i in set(str(seq)) ]
count = { i : seq.count(str(i)) for i in uniq }
complexity = f(len(seq))
for i in uniq:
complexity //= f( count[i] )
return complexity
def sequence_summary(TWO,THREE,SEVEN):
"""Returns a dictionary summarizing the different digit sets that satisfy the prime factorization of 2**`TWO` * 3**`THREE` * 7**`SEVEN`.
:param two: number of times 2 repeats as a prime factor
:param three: number of times 3 repeats as a prime factor
:param seven: number of times 7 repeats as a prime factor
"""
score = {}
for seq in building_sets(TWO,THREE,SEVEN):
# uniq = [ int(i) for i in set(seq) ]
# count = { i : seq.count(str(i)) for i in uniq }
# combinations = f(len(seq))
# for i in uniq:
# combinations //= f( count[i] )
# score[seq] = combinations
score[seq] = complexity(seq)
return score
def prime_factors(n):
"""Returns list of prime factors for `n`"""
i = 2
factors = []
while i * i <= n:
if n % i:
i += 1
else:
n //= i
factors.append(i)
if n > 1:
factors.append(n)
return factors
def prime_powers237(n):
"""Returns `TWO`, `THREE`, `SEVEN` representation of `n` (2**`TWO` * 3**`THREE` * 7**`SEVEN`)"""
assert not set(prime_factors(n)).difference({2,3,7}), "`prime_powers237()` only works if prime factors are limited to 2,3 and 7"
factors = prime_factors(n)
return factors.count(2), factors.count(3), factors.count(7)
def prime_powers2357(n):
"""Returns `TWO`, `THREE`, `FIVE`, `SEVEN` representation of `n` (2**`TWO` * 3**`THREE` * 5**`FIVE` * 7**`SEVEN`)"""
assert not set(prime_factors(n)).difference({2,3,5,7}), "`prime_powers237()` only works if prime factors are limited to 2,3 and 7"
factors = prime_factors(n)
return factors.count(2), factors.count(3), factors.count(5), factors.count(7)
def find_prospectives( n, pad='', level=0 ):
assert lp7(n), "Can't use find_prospectives() if largest prime > 7"
print( pad + str(n) )
TWO, THREE, SEVEN = prime_powers237(n)
possibilities = sequence_summary( TWO, THREE, SEVEN )
for _, score in sorted( possibilities.items(), key = lambda x: x[1] ):
print('\r' + pad + ' {}'.format(_))
for seq in candidates(_):
print( '\r' + pad + '* {}'.format(seq), end='')
i = int(seq)
if lp7(i) and i != n:
two, three, seven = prime_powers237(i)
print( '\r' + pad + ' {} = 2**{} * 3**{} * 7**{}'.format( seq, two, three, seven ) )
if level > 0:
find_prospectives( i, pad+' ' )
if __name__ == '__main__':
# for n in explorer():
# length = len( streak( int(n) ) )
# if length > 7:
# print( '[{}] {} : {}'.format( n, length ) )
# tic = time()
# sequential
# for seven, three, two in product( range(500), range(500), range(500) ):
# n = 2**two * 3**three * 7**seven
# if not str(n).count('0'):
# length = len( streak(n) )
# print( '\r({}, {}, {}) = '.format(two, three, seven), end='')
# if length > 7: print( n, length, sep=': ')
# toc = time()
# print('Time taken = {} s'.format(toc-tic))
# tic = time()
# # parallelized?
# for seven, three, two in product( range(100), range(100), range(100) ):
# n = 2**two * 3**three * 7**seven
# if not str(n).count('0'):
# nv = streakv(n)
# print( '\r({}, {}, {}) = '.format(two, three, seven), end='')
# if nv > 7: print( n, nv, sep=': ')
# toc = time()
# print('Time taken = {} s'.format(toc-tic))
# for i in candidates('54'):
# print('\r' + str(i), lp7(i), sep = ': ' , end = '')
# if lp7(i): print( '\r'+ str(i), lp7(i), sep = ': ' )
# print( complexity('1234') )
# ones = 0
# while ones <= 3:
# for seed in building_sets( 4, 20, 5 ):
# count = 0
# for seq in candidates(seed,ones):
# count += 1
# print( '\r{} digits - {:.2%}'.format( len(str(seq)), count/complexity(str(seq)) ), end = '' )
# if lp7( int(seq) ): print( '\r{}'.format(seq) )
# print()
# ones += 1
# search( 40, 40, 40 )
# exit(0)
# onesies( 4, 20, 5, 5 )
onesies( 8, 3, 2, 5 )
|
[
"mz.khan@yahoo.com"
] |
mz.khan@yahoo.com
|
9f78b6a20934481fed7410cab04f94aeffc6bed1
|
0c9034a60d2a4566b198d2522d4463d68c013876
|
/skypeshit.py
|
3bfe39668d6999111050a9a6f197ae1f87b6ba6d
|
[] |
no_license
|
evgkirov/skypeshit
|
90d803125e3f4617b162aa50db65aa960f52a96b
|
e5410757e134573a6cf5a05e03d1802b64d5149c
|
refs/heads/master
| 2022-05-21T05:28:56.312249
| 2013-04-10T23:12:07
| 2013-04-10T23:12:07
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,163
|
py
|
#!/usr/bin/env python
from datetime import datetime
import random
import settings
import Skype4Py
import sys
import time
class Daemon(object):
def __init__(self):
self.log('Hello!')
self.setup_skype()
self.my_last_message = 0
self.target_chats = [self.skype.Chat(name) for name in settings.TARGET_CHATS]
def random_line(self, filename):
lines = [line.strip() for line in open(filename).readlines()]
return random.choice(lines)
def run(self):
while True:
time.sleep(1)
if self.target_chats:
message_chance = 60 * 60 * 24 / len(self.target_chats)
if random.randint(1, message_chance) == 1:
self.send_skype(self.random_line(settings.MESSAGES_FILE), random.choice(self.target_chats))
def stop(self):
pass
def log(self, message, level=0):
log_levels = {
0: 'INFO',
1: 'WARNING',
2: 'ERROR'
}
print '%s [%s] %s' % (
datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
log_levels[level],
message
)
sys.stdout.flush()
def setup_skype(self):
self.skype = Skype4Py.Skype()
self.skype.Attach()
self.skype.OnMessageStatus = lambda *a, **kw: self.on_skype_message(*a, **kw)
self.log('Attached to Skype')
def on_skype_message(self, msg, status):
if status != 'RECEIVED':
return
reply_chance = 70
if time.time() - self.my_last_message < 60 * 5:
reply_chance = 5
if any([(i in msg.Body.lower()) for i in settings.IRRITATORS]):
reply_chance = 2
if random.randint(1, reply_chance) != 1:
return
time.sleep(random.randint(0, 20))
self.send_skype(self.random_line(settings.REPLIES_FILE), msg.Chat)
def send_skype(self, msg, chat):
chat.SendMessage(msg)
self.my_last_message = time.time()
self.log('Sent: %s' % msg)
if __name__ == '__main__':
d = Daemon()
try:
d.run()
except KeyboardInterrupt, e:
d.stop()
|
[
"evg.kirov@gmail.com"
] |
evg.kirov@gmail.com
|
a9b6ddf7d41e489a0d45bee00ecdbb0c0460c6e2
|
e0c916e38cfd429ecba70d7e4e1ac393d6bcc418
|
/Neural_network_example.py
|
394c2f40db5acf0dab90050637f1d75fa6f44c95
|
[] |
no_license
|
shaodouya/Neural-networks-Sea-Ice-classification
|
bc21970395ef7afdeaf3adbf31d091f4e852dce3
|
14db6a94ecffe549f41a6233fd8440da17995fb6
|
refs/heads/main
| 2023-06-01T00:29:43.055803
| 2021-06-29T12:06:34
| 2021-06-29T12:06:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,038
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 16 10:52:45 2021
@author: S and J
"""
#Example of a neural network to recognize handwritten numbers from 1 to 3, to simplfy the architecture and reduce calculation power, we will only
#look at numbers from 1 to 3, but the principle of course remains the same in any higher version
import numpy as np
import matplotlib.pyplot as plt
#defining the sigmoid function
def sigmoid(x):
return 1/(1+np.exp(-x))
#differential of the sigmoid function for later training purposes
def d_sigmoid(x):
return sigmoid(x)*(1-sigmoid(x))
#defining the activation function as sigmoid funciton of the sum of all weights multiplied by the outputs of the previous layer
def layer_output(L,W):
return sigmoid(np.dot(L,W))
#defining model inputs of handwritten numbers from 1 to 3, in this case organised in a 5 x 4 matrix to simplify calculation
L0 = np.array([[0,0,1,0,0,1,1,0,1,0,1,0,0,0,1,0,0,0,1,0],
[0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1],
[0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0] ])
#defining model outputs for training purposes
L2_aim = np.array([[1,0,0],[0,1,0],[0,0,1]])
#defining randomized weights for input and hidden layer
W0 = np.random.random((20,30))
W1 = np.random.random((30,3))
#optimizing weights until the cost function falls below a threshold of 0.001
for ix in range(60000):
L1 = layer_output(L0, W0)
L2 = layer_output(L1, W1)
L2_error = L2_aim - L2
if np.max(np.abs(L2_error)) < 0.001:
break
#adjusting the weights at each step to minimize the cost function utilizing its differential
W1 = W1 + np.dot(L1.T, L2_error*d_sigmoid(L2))
W0 += np.dot(L0.T,np.dot(L2_error * d_sigmoid(L2),W1.T)* d_sigmoid(L1))
#testing the networks abilities with the patter corresponding to the number 2
L0 = np.array([0, 1, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1])
L1 = layer_output(L0,W0)
L2 = layer_output(L1,W1)
print(L2)
|
[
"noreply@github.com"
] |
shaodouya.noreply@github.com
|
632b13905b07b97c70903174ab40d7ad79edae22
|
84bdd23754e00a46635e896b4343c22fa74087fe
|
/floorplan_lp.py
|
003794618ce9974c90d4b24bf568e8d659342999
|
[] |
no_license
|
djh496812749new/snippets
|
d2deff6ef6ae332e4c17cb6b75d97ae6a0be5820
|
32d985739247ea6f059cdcc37ca9f585147e19b5
|
refs/heads/master
| 2020-03-27T03:09:04.768934
| 2013-12-16T18:02:57
| 2013-12-16T18:02:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,144
|
py
|
# Figure 8.20, page 444.
# Floor planning example.
from cvxopt import solvers, matrix, spmatrix, mul, div
try: import pylab
except ImportError: pylab_installed = False
else: pylab_installed = True
def floorplan(Amin):
# minimize W+H
# subject to Amin1 / h1 <= w1
# Amin2 / h2 <= w2
# Amin3 / h3 <= w3
# Amin4 / h4 <= w4
# Amin5 / h5 <= w5
# x1 >= 0
# x2 >= 0
# x4 >= 0
# x1 + w1 + rho <= x3
# x2 + w2 + rho <= x3
# x3 + w3 + rho <= x5
# x4 + w4 + rho <= x5
# x5 + w5 <= W
# y2 >= 0
# y3 >= 0
# y5 >= 0
# y2 + h2 + rho <= y1
# y1 + h1 + rho <= y4
# y3 + h3 + rho <= y4
# y4 + h4 <= H
# y5 + h5 <= H
# h1/gamma <= w1 <= gamma*h1
# h2/gamma <= w2 <= gamma*h2
# h3/gamma <= w3 <= gamma*h3
# h4/gamma <= w4 <= gamma*h4
# h5/gamma <= w5 <= gamma*h5
#
# 22 Variables W, H, x (5), y (5), w (5), h (5).
#
# W, H: scalars; bounding box width and height
# x, y: 5-vectors; coordinates of bottom left corners of blocks
# w, h: 5-vectors; widths and heigths of the 5 blocks
rho, gamma = 1.0, 5.0 # min spacing, min aspect ratio
# The objective is to minimize W + H. There are five nonlinear
# constraints
#
# -w1 + Amin1 / h1 <= 0
# -w2 + Amin2 / h2 <= 0
# -w3 + Amin3 / h3 <= 0
# -w4 + Amin4 / h4 <= 0
# -w5 + Amin5 / h5 <= 0.
c = matrix(2*[1.0] + 20*[0.0])
def F(x=None, z=None):
if x is None:
return 5, matrix(17*[0.0] + 5*[1.0])
if min(x[17:]) <= 0.0:
return None
f = -x[12:17] + div(Amin, x[17:])
Df = matrix(0.0, (5,22))
Df[:,12:17] = spmatrix(-1.0, range(5), range(5))
Df[:,17:] = spmatrix(-div(Amin, x[17:]**2), range(5), range(5))
if z is None:
return f, Df
H = spmatrix( 2.0* mul(z, div(Amin, x[17::]**3)), range(17,22),
range(17,22) )
return f, Df, H
# linear inequalities
G = matrix(0.0, (26,22))
h = matrix(0.0, (26,1))
# -x1 <= 0
G[0,2] = -1.0
# -x2 <= 0
G[1,3] = -1.0
# -x4 <= 0
G[2,5] = -1.0
# x1 - x3 + w1 <= -rho
G[3, [2, 4, 12]], h[3] = [1.0, -1.0, 1.0], -rho
# x2 - x3 + w2 <= -rho
G[4, [3, 4, 13]], h[4] = [1.0, -1.0, 1.0], -rho
# x3 - x5 + w3 <= -rho
G[5, [4, 6, 14]], h[5] = [1.0, -1.0, 1.0], -rho
# x4 - x5 + w4 <= -rho
G[6, [5, 6, 15]], h[6] = [1.0, -1.0, 1.0], -rho
# -W + x5 + w5 <= 0
G[7, [0, 6, 16]] = -1.0, 1.0, 1.0
# -y2 <= 0
G[8,8] = -1.0
# -y3 <= 0
G[9,9] = -1.0
# -y5 <= 0
G[10,11] = -1.0
# -y1 + y2 + h2 <= -rho
G[11, [7, 8, 18]], h[11] = [-1.0, 1.0, 1.0], -rho
# y1 - y4 + h1 <= -rho
G[12, [7, 10, 17]], h[12] = [1.0, -1.0, 1.0], -rho
# y3 - y4 + h3 <= -rho
G[13, [9, 10, 19]], h[13] = [1.0, -1.0, 1.0], -rho
# -H + y4 + h4 <= 0
G[14, [1, 10, 20]] = -1.0, 1.0, 1.0
# -H + y5 + h5 <= 0
G[15, [1, 11, 21]] = -1.0, 1.0, 1.0
# -w1 + h1/gamma <= 0
G[16, [12, 17]] = -1.0, 1.0/gamma
# w1 - gamma * h1 <= 0
G[17, [12, 17]] = 1.0, -gamma
# -w2 + h2/gamma <= 0
G[18, [13, 18]] = -1.0, 1.0/gamma
# w2 - gamma * h2 <= 0
G[19, [13, 18]] = 1.0, -gamma
# -w3 + h3/gamma <= 0
G[20, [14, 18]] = -1.0, 1.0/gamma
# w3 - gamma * h3 <= 0
G[21, [14, 19]] = 1.0, -gamma
# -w4 + h4/gamma <= 0
G[22, [15, 19]] = -1.0, 1.0/gamma
# w4 - gamma * h4 <= 0
G[23, [15, 20]] = 1.0, -gamma
# -w5 + h5/gamma <= 0
G[24, [16, 21]] = -1.0, 1.0/gamma
# w5 - gamma * h5 <= 0.0
G[25, [16, 21]] = 1.0, -gamma
# solve and return W, H, x, y, w, h
sol = solvers.cpl(c, F, G, h)
return sol['x'][0], sol['x'][1], sol['x'][2:7], sol['x'][7:12], \
sol['x'][12:17], sol['x'][17:]
solvers.options['show_progress'] = False
if pylab_installed: pylab.figure(facecolor='w')
Amin = matrix([100., 100., 100., 100., 100.])
W, H, x, y, w, h = floorplan(Amin)
if pylab_installed:
if pylab_installed: pylab.subplot(221)
for k in range(5):
pylab.fill([x[k], x[k], x[k]+w[k], x[k]+w[k]],
[y[k], y[k]+h[k], y[k]+h[k], y[k]],
facecolor = '#D0D0D0')
pylab.text(x[k]+.5*w[k], y[k]+.5*h[k], "%d" %(k+1))
pylab.axis([-1.0, 26, -1.0, 26])
pylab.xticks([])
pylab.yticks([])
Amin = matrix([20., 50., 80., 150., 200.])
W, H, x, y, w, h = floorplan(Amin)
if pylab_installed:
pylab.subplot(222)
for k in range(5):
pylab.fill([x[k], x[k], x[k]+w[k], x[k]+w[k]],
[y[k], y[k]+h[k], y[k]+h[k], y[k]],
facecolor = '#D0D0D0')
pylab.text(x[k]+.5*w[k], y[k]+.5*h[k], "%d" %(k+1))
pylab.axis([-1.0, 26, -1.0, 26])
pylab.xticks([])
pylab.yticks([])
Amin = matrix([180., 80., 80., 80., 80.])
W, H, x, y, w, h = floorplan(Amin)
if pylab_installed:
pylab.subplot(223)
for k in range(5):
pylab.fill([x[k], x[k], x[k]+w[k], x[k]+w[k]],
[y[k], y[k]+h[k], y[k]+h[k], y[k]],
facecolor = '#D0D0D0')
pylab.text(x[k]+.5*w[k], y[k]+.5*h[k], "%d" %(k+1))
pylab.axis([-1.0, 26, -1.0, 26])
pylab.xticks([])
pylab.yticks([])
Amin = matrix([20., 150., 20., 200., 110.])
W, H, x, y, w, h = floorplan(Amin)
if pylab_installed:
pylab.subplot(224)
for k in range(5):
pylab.fill([x[k], x[k], x[k]+w[k], x[k]+w[k]],
[y[k], y[k]+h[k], y[k]+h[k], y[k]],
facecolor = '#D0D0D0')
pylab.text(x[k]+.5*w[k], y[k]+.5*h[k], "%d" %(k+1))
pylab.axis([-1.0, 26, -1.0, 26])
pylab.xticks([])
pylab.yticks([])
pylab.show()
|
[
"nicholas.pilkington@gmail.com"
] |
nicholas.pilkington@gmail.com
|
b468aeb1efa45724e677b60feb5dbb2a52b0f3a5
|
3cf8aaf8faba4d06eacbb3e06c7663688aef314c
|
/by_categories/BinarySearch/278.py
|
6bde83a904762f51a8dada677eaa4accfb9c3546
|
[] |
no_license
|
Yuqi9579/leetcode
|
5e946e5c7ab221ff85bf8331730a692760c30650
|
f11c55a3e8582c22e2bff3c86318da0445ab4b5b
|
refs/heads/master
| 2023-01-01T04:36:33.692905
| 2020-10-24T19:51:15
| 2020-10-24T19:51:15
| 274,647,319
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 724
|
py
|
'''
278. First bad version
Easy
[g,g,g,g,g,b,b,b] g:good b:bad
[1,2,3,4,5,6,7,8] 找到第一个bad产品
类比开二次根,都是找到一个临界值(在一个广义sorted的list中),具体取上还是取下要具体分析
'''
class Solution():
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int
"""
l = 1
r = n + 1
while l < r:
m = l + (r - l) // 2
if isBadVersion(m): #目标值在右边的判定条件
r = m
else:
l = m + 1
return l #开方中返回的l-1,注意具体问题具体分析
def isBadVersion(x):
'''
官方给的函数
'''
pass
|
[
"liyuqi9@live.com"
] |
liyuqi9@live.com
|
985f844caf10146bcd6d7847503edd19790a641c
|
ae7627dc5e6ef7e9f8db9d825a6bc097da5b34de
|
/question_no_21.py
|
4c116949953f5276f16fa7f00ee4a8ee064d267c
|
[] |
no_license
|
atulzh7/IW-Academy-Python-Assignment
|
cc5c8a377031097aff5ef62b209cb31f63241505
|
674d312b1438301865c840257686edf60fdb3a69
|
refs/heads/master
| 2022-11-14T13:26:16.747544
| 2020-07-12T16:06:08
| 2020-07-12T16:06:08
| 283,823,502
| 0
| 1
| null | 2020-07-30T16:24:30
| 2020-07-30T16:24:29
| null |
UTF-8
|
Python
| false
| false
| 160
|
py
|
def last(n):
return n[-1]
def sort_list_last(tuples):
return sorted(tuples, key=last)
print(sort_list_last([(2, 5), (1, 2), (4, 4), (2, 3), (2, 1)]))
|
[
"="
] |
=
|
84e80308c11bdfc8fa1bca64f3f3f0587635c3fa
|
70f564990215f47b139a777826f211477e9b44f6
|
/plan2vec_experiments/streetlearn/pretrain_baselines/load-and-plan.py
|
208582df52258ab5539dc14b86a574c949bc0894
|
[] |
no_license
|
geyang/plan2vec
|
de87f2d77732c4aacdefd00067ebebacb7cd763f
|
aeeb50aed3d7da4c266b4ca163e96d4c0747e3c1
|
refs/heads/master
| 2022-11-16T03:40:42.638239
| 2022-10-28T04:01:29
| 2022-10-28T04:01:29
| 261,273,420
| 65
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,022
|
py
|
from plan2vec_experiments import instr, config_charts
from plan2vec.plan2vec.plan2vec_streetlearn_2 import DEBUG, Args, main
import jaynes
def common_config():
Args.seed = 5 * 100
Args.num_epochs = 500
Args.lr = 3e-5
Args.gamma = 0.97
Args.target_update = 0.9
Args.top_k = None
Args.plan_steps = 1
Args.H = 50
Args.r_scale = 0.2
Args.optim_epochs = 32
Args.latent_dim = 2
# make this one to see early stage to make sure
Args.visualization_interval = 1
# turn off checkpointing b/c models are large
Args.checkpoint_interval = None
Args.binary_reward = None
Args.load_global_metric = "/geyang/plan2vec/2019/07-15/streetlearn/pretrain_baselines/" \
"sample-distribution-comparison/value-05-step-pretrain/dim-(2)/" \
"manhattan-medium/ResNet18L2/lr-(7.5e-08)/22.16/31.668959/" \
"value_fn_pretrain/global_metric_fn.pkl"
# DEBUG.pretrain_num_epochs = 1
# DEBUG.pretrain_global = True
# # also try to pretrain with random sample,
# # try sampling techniques
# # add evaluation loops
# DEBUG.value_fn_pretrain_global = True
# Args.data_path = None
local_metric_exp_path = "episodeyang/plan2vec/2019/06-20/streetlearn/local_metric/23.19/07.247751"
Args.load_local_metric = f"/{local_metric_exp_path}/models/local_metric_400.pkl"
def plan2vec_eval_planning(dataset, prefix, r):
Args.data_path = f"~/fair/streetlearn/processed-data/{dataset}"
assert DEBUG.pretrain_global is False
assert DEBUG.value_fn_pretrain_global is False
assert DEBUG.supervised_value_fn is False
assert Args.binary_reward is None
assert DEBUG.oracle_planning is False
Args.term_r, DEBUG.ground_truth_success = 1.2e-4 * 20, True
DEBUG.ground_truth_neighbor_r = r
DEBUG.real_r_distance = False
_ = instr(main, __postfix=f"eval-pretrained/{prefix}", **vars(Args), _DEBUG=vars(DEBUG))
config_charts(path="plan2vec_eval_planning.charts.yml")
jaynes.run(_)
def plan2vec_eval_control(dataset, prefix, neighbor_r, success_r):
Args.data_path = f"~/fair/streetlearn/processed-data/{dataset}"
Args.num_epochs = 20
# freeze the learning todo: remove HER-relabeling and optimization.
Args.lr = 0
assert DEBUG.pretrain_global is False
assert DEBUG.value_fn_pretrain_global is False
assert DEBUG.supervised_value_fn is False
assert Args.binary_reward is None
assert DEBUG.oracle_planning is False
DEBUG.ground_truth_neighbor_r = neighbor_r
Args.term_r, DEBUG.ground_truth_success = success_r, True
DEBUG.real_r_distance = False
_ = instr(main, __postfix=f"eval-control/{prefix}", **vars(Args), _DEBUG=vars(DEBUG))
config_charts(path="plan2vec_eval_control.charts.yml")
jaynes.run(_)
if __name__ == "__main__":
common_config()
param_dict = {
'ResNet18L2': {"lr": [1e-6, 3e-6, 1e-7, 3e-7], },
# 'GlobalMetricConvL2_s1': {"lr": [1e-6, 3e-6, 6e-6]},
# 'GlobalMetricConvDeepL2': {"lr": [1e-6, 3e-6, 6e-6]},
# 'GlobalMetricConvDeepL2_wide': {"lr": [1e-6, 3e-6, 6e-6]}
}
# ResNet requires much less memory than the other.
Args.global_metric = 'ResNet18L2'
_ = param_dict['ResNet18L2']
jaynes.config("vector-gpu")
key = 'medium'
# for n in [5, 10, 20, 40]:
# plan2vec_eval_planning(f"manhattan-{key}", r=1.2e-4 * float(n),
# prefix=f"dim-({Args.latent_dim})/manhattan-{key}/r-({n})")
# plan2vec_eval_control(f"manhattan-{key}", r=1.2e-4 * float(n),
# prefix=f"dim-({Args.latent_dim})/manhattan-{key}/r-({n})")
for n in [5, 10, 20, 40]:
for r in [10, 5, 1]:
plan2vec_eval_control(f"manhattan-{key}", neighbor_r=1.2e-4 * float(n), success_r=1.2e-4 * float(r),
prefix=f"dim-({Args.latent_dim})/manhattan-{key}/n-({n})/r-({r})")
jaynes.listen()
|
[
"yangge1987@gmail.com"
] |
yangge1987@gmail.com
|
b8774a8cc4581c31e7dc4b868cbabbf8cac649e8
|
e8b30b0745b1c4adb7451ef820f5ed86754a5c63
|
/test/dirtys/t.py
|
5f386e032b232dcba7d2638c75046f4003d6ea04
|
[] |
no_license
|
75509151/jqmtt-rpc
|
7294938d9f614e0c0d0c8743192b21ad011d1bcb
|
52edd74108c5b64dcea3afb29b6c3e5034566173
|
refs/heads/master
| 2020-06-24T13:14:51.464296
| 2019-09-26T07:47:10
| 2019-09-26T07:47:10
| 198,971,641
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 904
|
py
|
import eventlet
eventlet.monkey_patch()
import json
import time
import paho.mqtt.client as mqtt
def on_connect(client, userdata, flags, rc):
print("connected")
def on_pubish(client, userdata, mid):
print("on publish d: %s, mid: %s" % (userdata, mid))
def on_subscribe(client, userdata, mid):
print("on subscribe: %s, mid: %s" % (userdata, mid))
def _get_mid():
return 1
client = mqtt.Client()
# client._mid_generate = _get_mid
client.on_connect = on_connect
client.on_publish = on_pubish
# print(client.topic_matches_sub('/rpc/v1/+/+/'))
client.connect("127.0.0.1", 1883, 60)
client.loop_start()
count = 0
# while True:
start = time.time()
for i in range(20):
count += 1
ret = client.publish("/cereson/yc_a045/deliver", json.dumps({"slot_id": count}), qos=1)
print("publish ret: %s" % ret)
t = time.time()-start
print(t)
while True:
print(t)
time.sleep(1)
|
[
"75509151@qq.com"
] |
75509151@qq.com
|
a4ff63535b126c40b2234bfa5830c787a16555dc
|
27d41e8185a3433c4a98dc4bf671274b6e59e1b8
|
/list-1/rotate_left3 .py
|
5dd62a44b5e758d96c569ea7fc8d1748de31c9f4
|
[] |
no_license
|
AmirZregat/PythonAssignment2
|
79d259bbe1fee45ba5795db1c2257857ce44ff8b
|
5e4d803b64a5fc87c19515ae8e44b28850429386
|
refs/heads/master
| 2020-04-01T10:44:20.866865
| 2018-10-15T14:40:06
| 2018-10-15T14:40:06
| 153,129,295
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 311
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
def rotate_left3(nums):
arr=[nums[1],nums[2],nums[0]]
return arr
def main():
print(rotate_left3([1, 2, 3]))
print(rotate_left3([5, 11, 9]))
print(rotate_left3([7, 0, 0]))
if __name__ == '__main__':
main()
# In[ ]:
|
[
"ameerzregat97@gmail.com"
] |
ameerzregat97@gmail.com
|
5eecc20a05d6a926006396f8aef9e2e334488918
|
d11e6c63321f025d78a5e06903062f758bfb8e7c
|
/slicer.py
|
1ed3f40aaa41bff48b6cf2d04cd53c28faffc9e9
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
syenn2896/batik-recommendation
|
ea24cfc9d47ad9866670ef1d2ed420786fdeb69a
|
c1bd88e9e4448d5baa48880524ab9ffa356f5777
|
refs/heads/master
| 2020-05-31T10:31:04.739043
| 2019-06-07T17:25:13
| 2019-06-07T17:25:13
| 190,242,250
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,124
|
py
|
import numpy as np
import cv2
import progressbar
import argparse
import os
def slice_image(img, num_slices_per_axis):
slice_shape = (img.shape[0] / num_slices_per_axis, img.shape[1] / num_slices_per_axis)
for i in range(num_slices_per_axis):
for j in range(num_slices_per_axis):
top_left = (i * slice_shape[0], j * slice_shape[1])
yield img[top_left[0]:(top_left[0]+slice_shape[0]), top_left[1]:(top_left[1]+slice_shape[1])]
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Dataset image slicer', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('dataset_dir_path', help="Path to directory that contains subdirectories of images")
parser.add_argument('--output_dir_path', '-o', default="output", help="Path to output directory")
parser.add_argument('--num_slices_per_axis', '-n', type=int, default=3, help="Number of slices per sides. Total slices = num_slices_per_axis ^ 2")
args = parser.parse_args()
mypath = args.dataset_dir_path
outpath = args.output_dir_path
num_slices_per_axis = args.num_slices_per_axis
# preare output dir
if not os.path.exists(outpath):
os.makedirs(outpath)
count = 0
num_dir = len([name for name in os.listdir(mypath)])
bar = progressbar.ProgressBar(maxval=num_dir).start()
for f in os.listdir(mypath):
path = os.path.join(mypath, f)
for f_sub in os.listdir(path):
path_sub = os.path.join(path, f_sub)
if os.path.isfile(path_sub):
try:
img = cv2.imread(path_sub)
slice_count = 1
for sliced in slice_image(img, num_slices_per_axis):
outpath_sub = os.path.join(outpath, f)
if not os.path.exists(outpath_sub):
os.makedirs(outpath_sub)
basename, ext = os.path.splitext(f_sub)
sliced_name = "{}_{}{}".format(basename, slice_count, ext)
cv2.imwrite(os.path.join(outpath_sub, sliced_name), sliced)
slice_count = slice_count +1
except Exception as err:
print(err)
print(path_sub)
sys.exit(0)
count += 1
bar.update(count)
bar.finish()
|
[
"noreply@github.com"
] |
syenn2896.noreply@github.com
|
8a82fe4fda4dd2a787060c67f07e16f3c5ccd7d2
|
5bbf20ec787aaab426d475f4c0aa2e8b44383b3f
|
/DBN_code/dbn_tf-master/rbm_MNIST_test.py
|
039eec1f8516876bac989c162d93525c5081057f
|
[] |
no_license
|
gansterhuang/-
|
b124310f2b963e9d830f94b8e1fa60721f54c2e5
|
0b048899303655b37c8d37911e7f1cf5bd690eb6
|
refs/heads/master
| 2021-05-22T14:19:00.457161
| 2020-07-03T10:39:02
| 2020-07-03T10:39:02
| 252,960,052
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,659
|
py
|
import tensorflow as tf
import numpy as np
import input_data
import Image
from util import tile_raster_images
def sample_prob(probs):
return tf.nn.relu(
tf.sign(
probs - tf.random_uniform(tf.shape(probs))))
alpha = 1.0
batchsize = 100
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images,\
mnist.test.labels
X = tf.placeholder("float", [None, 784])
Y = tf.placeholder("float", [None, 10])
rbm_w = tf.placeholder("float", [784, 500])
rbm_vb = tf.placeholder("float", [784])
rbm_hb = tf.placeholder("float", [500])
h0 = sample_prob(tf.nn.sigmoid(tf.matmul(X, rbm_w) + rbm_hb))
v1 = sample_prob(tf.nn.sigmoid(
tf.matmul(h0, tf.transpose(rbm_w)) + rbm_vb))
h1 = tf.nn.sigmoid(tf.matmul(v1, rbm_w) + rbm_hb)
w_positive_grad = tf.matmul(tf.transpose(X), h0)
w_negative_grad = tf.matmul(tf.transpose(v1), h1)
update_w = rbm_w + alpha * \
(w_positive_grad - w_negative_grad) / tf.to_float(tf.shape(X)[0])
update_vb = rbm_vb + alpha * tf.reduce_mean(X - v1, 0)
update_hb = rbm_hb + alpha * tf.reduce_mean(h0 - h1, 0)
h_sample = sample_prob(tf.nn.sigmoid(tf.matmul(X, rbm_w) + rbm_hb))
v_sample = sample_prob(tf.nn.sigmoid(
tf.matmul(h_sample, tf.transpose(rbm_w)) + rbm_vb))
err = X - v_sample
err_sum = tf.reduce_mean(err * err)
sess = tf.Session()
init = tf.initialize_all_variables()
sess.run(init)
n_w = np.zeros([784, 500], np.float32)
n_vb = np.zeros([784], np.float32)
n_hb = np.zeros([500], np.float32)
o_w = np.zeros([784, 500], np.float32)
o_vb = np.zeros([784], np.float32)
o_hb = np.zeros([500], np.float32)
print sess.run(
err_sum, feed_dict={X: trX, rbm_w: o_w, rbm_vb: o_vb, rbm_hb: o_hb})
for start, end in zip(
range(0, len(trX), batchsize), range(batchsize, len(trX), batchsize)):
batch = trX[start:end]
n_w = sess.run(update_w, feed_dict={
X: batch, rbm_w: o_w, rbm_vb: o_vb, rbm_hb: o_hb})
n_vb = sess.run(update_vb, feed_dict={
X: batch, rbm_w: o_w, rbm_vb: o_vb, rbm_hb: o_hb})
n_hb = sess.run(update_hb, feed_dict={
X: batch, rbm_w: o_w, rbm_vb: o_vb, rbm_hb: o_hb})
o_w = n_w
o_vb = n_vb
o_hb = n_hb
if start % 10000 == 0:
print sess.run(
err_sum, feed_dict={X: trX, rbm_w: n_w, rbm_vb: n_vb, rbm_hb: n_hb})
image = Image.fromarray(
tile_raster_images(
X=n_w.T,
img_shape=(28, 28),
tile_shape=(25, 20),
tile_spacing=(1, 1)
)
)
image.save("rbm_%d.png" % (start / 10000))
|
[
"noreply@github.com"
] |
gansterhuang.noreply@github.com
|
6026a7879db9ce1551cc0380380a8e6cc455bd5d
|
3495808055f12c143c963c556102db720656234f
|
/Week10/Author.py
|
a63585fe77aeff410bafcab2797ee16d6997e5c1
|
[] |
no_license
|
morris-necc/assignments
|
9a2545ea0ba5a433dd0488c5a3d2307b3ac50036
|
1049a8b5a6dbbfa4b409ff3593a75e0df1597b63
|
refs/heads/master
| 2023-02-05T06:03:30.586300
| 2020-12-16T08:37:04
| 2020-12-16T08:37:04
| 297,251,908
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
class Author:
def __init__(self, name: str, email: str, gender: str):
self.__name = name
self.__email = email
self.__gender = gender
def getName(self) -> str:
return self.__name
def getEmail(self) -> str:
return self.__email
def setEmail(self, email: str) -> None:
self.__email = email
def getGender(self) -> str:
return self.__gender
def toString(self) -> str:
return f"Author[name={self.__name},email={self.__email},gender={self.__gender}]"
|
[
"morris.kim@binus.ac.id"
] |
morris.kim@binus.ac.id
|
fe5533726740ec4c707efdaa5bf8c7b6e73dffd6
|
a5c2b693e6bfcce584b6131b327364abde6daa25
|
/Entregaveis/dossier/codigo fonte/Code/ExecutionEnvironments/Python/src/app.py
|
6b499b45291b75896d6870a9dc6bb00c301c3fd3
|
[
"MIT"
] |
permissive
|
joaoesantos/ise_learning
|
47a6087eb7a45759175a36f328df7c75973bfcea
|
ad3a27cd6e24693f8f9a90de7ce298a4f300d97e
|
refs/heads/master
| 2023-03-22T10:12:03.367174
| 2020-10-09T18:50:23
| 2020-10-09T18:50:23
| 246,681,200
| 0
| 0
|
MIT
| 2021-03-20T05:03:14
| 2020-03-11T21:13:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 2,494
|
py
|
from flask import Flask, request
from werkzeug.exceptions import HTTPException
import json
from modules.codeExecution.execution_handler import ExecutionHandler
app = Flask(__name__)
@app.route('/', methods = ['POST'])
def execute_code():
executionHandler = ExecutionHandler(config["execution_parameters"])
execution_result = executionHandler.run(request.get_json())
return app.response_class(
response = json.dumps({
"rawResult": str(execution_result.rawResult),
"wasError": execution_result.wasError,
"executionTime": execution_result.executionTime,
}),
status = 200,
mimetype = "application/json"
)
@app.errorhandler(Exception)
def handle_exception(e):
# handles response caused by exception of TypeError type
if e.__class__.__name__ == "TypeError":
return app.response_class(
response = json.dumps({
"type": "TypeError",
"title": "TypeError",
"detail": str(e),
"instance": "/iselearning/executionEnvironments/python/typeError"
}),
status = 400,
mimetype = "application/problem+json"
)
# handles response caused by exception of TimeoutExpired type
if e.__class__.__name__ == "TimeoutExpired":
return app.response_class(
response = json.dumps({
"type": "TimeoutExpire",
"title": "TimeoutExpired",
"detail": "Code execution was cancelled due to exceeding process time.",
"instance": "/iselearning/executionEnvironments/python/timeout"
}),
status = 408,
mimetype = "application/problem+json"
)
# handles all unexpected exceptions
return app.response_class(
response = json.dumps({
"type": "Internal Server Error",
"title": "Internal Server Error",
"detail": "The server encountered an internal error and was unable to complete your request. Either the server is overloaded or there is an error in the application.",
"instance": "/iselearning/executionEnvironments/python/unexpected"
}),
status = 500,
mimetype = "application/problem+json"
)
if __name__ == '__main__':
with open("config.json") as json_config_file:
config = json.load(json_config_file)
app.run(host = config["server"]["host"], port = config["server"]["port"])
|
[
"aquintelaoliveira@gmail.com"
] |
aquintelaoliveira@gmail.com
|
f549188681936a82c81c61f02cbfa041f0ff61eb
|
89fe2c5b2ba96d935fe1acf8dcedb518d684fa19
|
/simulation_ws/src/test_nodes/setup.py
|
715e2ddda05a7edd2ffbe250ed5583c865c98f63
|
[
"LicenseRef-scancode-unknown-license-reference",
"MIT-0",
"MIT"
] |
permissive
|
aws-robotics/aws-robomaker-sample-application-cloudwatch
|
03a7228f7b7a2c883c3de295d00b6a78d2522d92
|
b1b347652436d1cb960ef9e848a9b270fda4fe25
|
refs/heads/ros1
| 2022-02-14T07:30:12.865345
| 2021-11-02T05:10:42
| 2021-11-02T05:10:42
| 157,936,786
| 22
| 58
|
NOASSERTION
| 2021-11-02T05:10:43
| 2018-11-17T00:40:28
|
Python
|
UTF-8
|
Python
| false
| false
| 380
|
py
|
## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['test_nodes'],
package_dir={'': 'src'}
)
setup(**setup_args)
|
[
"noreply@github.com"
] |
aws-robotics.noreply@github.com
|
33199a1ba23b6b5141059998e600962af789fe93
|
d299465acbeeddfff6e3fcdc71f6523c8cba1a30
|
/utils/generate_anchor_boxes.py
|
b2b14337dd6362fbbbcdd030f67f199867723c21
|
[] |
no_license
|
rajashekarv95/3d_cnn
|
173ae37a2015b84d19663762f0689bae9db81a54
|
f59c3e91c53c1bc3e0e9512626cd2842d125e2eb
|
refs/heads/master
| 2023-02-17T04:18:04.904186
| 2021-01-10T00:15:03
| 2021-01-10T00:15:03
| 324,251,089
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 775
|
py
|
import numpy as np
def generate_anchor_boxes(resolutions, aspect_ratios, steps = 16):
anchors = []
mask = []
x = np.arange(start = 16, stop = 640, step = 16)
y = np.arange(start = 16, stop = 480, step = 16)
for j in y:
for i in x:
for r in resolutions:
for a1, a2 in aspect_ratios:
dx = r * a1 / (a1 + a2)
dy = r * a2 / (a1 + a2)
dx = dx/2
dy = dy/2
anchors.append([i - dx, i + dx, j - dy, j + dy])
if i - dx <= 0 or i + dx >= 640 or j - dy <= 0 or j + dy >= 480:
mask.append(0)
else:
mask.append(1)
return anchors, mask
|
[
"rajashekar.v95@gmail.com"
] |
rajashekar.v95@gmail.com
|
1b9a9b6508676978960d2a6befa80bb19d553089
|
4906dafd1722eff0538cec8b11cd800afd9fc215
|
/PlayerAI.py
|
aeedacdc9a3dce92cbb21e82b85b56d994162235
|
[] |
no_license
|
shrestha-pranav/2048-AI
|
4ea548cc90b1b9180c91735b8faa8b16c1a6a5ab
|
5371bb92bb2203f53d1e8b30347e81f4a96d529f
|
refs/heads/master
| 2021-04-06T13:01:27.483538
| 2018-09-27T01:14:27
| 2018-09-27T01:14:27
| 125,275,474
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,369
|
py
|
from BaseAI import BaseAI
from random import randint
import op
import time
import heuristic as hr
# s -> current state
# a, b -> alpha, beta
# d -> recursion depth
# p -> probability
class PlayerAI(BaseAI):
def getMove(self, grid):
self.time = time.clock()
# Convert grid to a single 64-bit integer
for i in range(4):
for j in range(4):
grid.map[i][j] = min(15, op.log_mod[grid.map[i][j]])
x = op.bitify_grid(grid.map)
#Implementing a IDS to hopefully save time
prev_move = None
self.max_depth = 2
while True:
self.max_depth += 1
(move, _) = self.maximize(x, -float('inf'), float('inf'), 0, 0)
if time.clock()-self.time < 0.18:
prev_move = move
elif prev_move is not None:
return prev_move
else:
moves = op.getAvailableMoves(x)
return moves[randint(0, len(moves)-1)]
def chance(self, s, a, b, d, p):
if time.clock()-self.time>=0.18: return 0
# Unlikely getting more than three 4's in a row
if p==3: return self.minimize(1,s,a,b,d,p)
return 0.1*self.minimize(2,s,a,b,d,p+1)+ 0.9*self.minimize(1,s,a,b,d,p)
def minimize(self, new_val, s, a, b, d, p):
if d>=self.max_depth: return hr.eval(s)
minUtility = float('inf')
r = op.get_rows(s)
for x in range(4):
for y in op.empty_tiles[r[x]]:
child = op.set_cell(s, x, y, new_val)
(_, utility) = self.maximize(child, a, b, d, p)
if utility < minUtility: minUtility = utility
if minUtility <= a: break
if minUtility < b: b = minUtility
return minUtility
def maximize(self, s, a, b, d, p):
if time.clock()-self.time>=0.18: return (None, 0)
moves = op.getAvailableMoves(s)
if not moves: return (None, -16283176000)
(maxMove, maxUtility) = (None, -float('inf'))
for move in moves:
child = op.move_grid(s, move)
utility = self.chance(child, a, b, d+1, p)
if utility > maxUtility: (maxMove, maxUtility) = (move, utility)
if maxUtility >= b: break
if maxUtility > a: a = maxUtility
return (maxMove, maxUtility)
|
[
"shresthapranav278@gmail.com"
] |
shresthapranav278@gmail.com
|
dd657ec62d8b5a7c496e9cee0ffdafad0f6666de
|
d4ab4644168e60f4a3030d07ac0c6dc289a5a817
|
/Nao tests/tests/currentFolder.py
|
48590e9be0fbcbf3dac9c97da3f1b8e00bfca67f
|
[] |
no_license
|
thaije/MarcoPoloPepper
|
4f38dbee12aaf08467c477a457f8a5afb1fab85c
|
631d9f0af3cb064b3d534ffa9b0c82273d6ae4b8
|
refs/heads/master
| 2021-05-12T19:54:19.756890
| 2019-09-05T07:18:13
| 2019-09-05T07:18:13
| 117,107,865
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 85
|
py
|
import os
print "Files in current folder:"
print os.listdir("/home/nao/record.wav")
|
[
"tjalling_haije@outlook.com"
] |
tjalling_haije@outlook.com
|
63ddba6c901c72ebadee22bdd1b1a644b43e20e1
|
5f3c22a2cad301e1ce6753e944602d98ba39d407
|
/app.py
|
271d6e66e5ad0b25755e6be3321a475a195939c7
|
[] |
no_license
|
vasupradharamac/Budget_Tracker
|
4d0d70c9ef2675f83a6077c84c2ad1d2c3d9d243
|
c6c55802069a9d605127cd73ce03a7b859d02c26
|
refs/heads/main
| 2023-05-13T07:21:06.997748
| 2021-06-02T11:22:30
| 2021-06-02T11:22:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,971
|
py
|
from flask import Flask, render_template, request, redirect
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import *
from sqlalchemy.dialects import *
import os
project_dir = os.path.dirname(os.path.abspath(__file__))
database_file = "sqlite:///{}".format(
os.path.join(project_dir,"mydatabase.db")
)
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = database_file
db = SQLAlchemy(app)
class Expense(db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
date = db.Column(db.String(50), nullable=False)
expensename = db.Column(db.String(50), nullable=False)
amount = db.Column(db.Integer, nullable=False)
category = db.Column(db.String(50), nullable=False)
@app.route('/')
def add():
return render_template('add.html')
@app.route('/delete/<int:id>')
def delete(id):
expense = Expense.query.filter_by(id=id).first()
db.session.delete(expense)
db.session.commit()
return redirect('/expenses')
@app.route('/updateexpense/<int:id>')
def updateexpense(id):
expense = Expense.query.filter_by(id=id).first()
return render_template('updateexpense.html', expense=expense)
@app.route('/edit', methods=['POST'])
def edit():
id = request.form['id']
date = request.form['date']
expensename = request.form['expensename']
amount = request.form['amount']
category = request.form['category']
expense = Expense.query.filter_by(id=id).first()
expense.date = date
expense.expensename = expensename
expense.amount = amount
expense.category = category
db.session.commit()
return redirect('/expenses')
@app.route('/expenses')
def expenses():
expenses = Expense.query.all()
total = 0
t_business = 0
t_food = 0
t_entertainment = 0
t_other = 0
for expense in expenses:
total += expense.amount
if expense.category =='business':
t_business += expense.amount
elif expense.category =='food':
t_food += expense.amount
elif expense.category =='entertainment':
t_entertainment += expense.amount
elif expense.category =='other':
t_other += expense.amount
return render_template('expenses.html', expenses=expenses, total = total, t_business = t_business, t_food = t_food, t_entertainment = t_entertainment, t_other = t_other)
@app.route('/addexpense', methods=['POST'])
def addexpense():
date = request.form["date"]
expensename = request.form["expensename"]
amount = request.form["amount"]
category = request.form["category"]
print(date +" "+ expensename +" "+amount+" "+category)
expense = Expense(
date=date, expensename=expensename, amount=amount, category=category
)
db.session.add(expense)
db.session.commit()
return redirect('/expenses')
if __name__ == '__main__':
app.run(debug=True)
|
[
"noreply@github.com"
] |
vasupradharamac.noreply@github.com
|
481540764131d7074bfe42bf33d8e55e6de01a65
|
e76552ab870d13baf216b9a96283757e659d696d
|
/blog/migrations/0003_post_tags.py
|
0159f14a964180a2a040a4c13e2b52bed97695e0
|
[] |
no_license
|
jeer2234/blog
|
50ad09cdca818cd1a337be3dab8d84f91745bde2
|
17f158035853ae84bcbfd468b139f0e070699d98
|
refs/heads/main
| 2023-04-03T17:06:17.844705
| 2021-04-07T17:28:09
| 2021-04-07T17:28:09
| 351,650,202
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
# Generated by Django 3.1.7 on 2021-04-02 19:40
from django.db import migrations
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0003_taggeditem_add_unique_index'),
('blog', '0002_comment'),
]
operations = [
migrations.AddField(
model_name='post',
name='tags',
field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
]
|
[
"jeer2234@gmail.com"
] |
jeer2234@gmail.com
|
830e8e203059feac39b2616b5061d3d605633ed5
|
d9e4d3a30da0229899a0f10bdcb04c946ad5a2ff
|
/scripts/SR_FillPlots.py
|
d1dac57b8120d6e5d2be6359ab6b4133bb7874d8
|
[] |
no_license
|
TC01/zprime13
|
809c367d7d2a84e2cb4b4c14626915c0aed2a22c
|
0b874a81f9444df69fe2d73839a2ab837d499d7a
|
refs/heads/master
| 2016-08-04T19:31:59.223746
| 2015-09-23T18:51:55
| 2015-09-23T18:51:55
| 42,950,811
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,908
|
py
|
import ROOT
from ROOT import *
from CutOnTree import writeplot
####
## Code that assembles all (I do mean all) relevant histograms for output to theta. DOES NOT ADD SIGNAL. SIGNAL POINTS ARE PROCESSED SEPARATELY AND -M-U-S-T- BE PROCESSED SECOND.
####
# Defs:
lumi = 19748.
# Define files:
# single top
sFileName = ['t','s','tW','_t','_s','_tW']
sxs = [56.4,3.79,11.117,30.7,1.768,11.117]
sn = [3758227, 259961, 497658, 1935072, 139974, 493460]
sFilePrefix = '/home/osherson/Work/Trees/Gstar/T'
# data
dFileNameE = "/home/osherson/Work/Trees/Gstar/SingleElectron.root"
dFileNameM = "/home/osherson/Work/Trees/Gstar/SingleMu.root"
# ttbar
tFileName = ["tt", "ttl_uncut"]
txs = [107.7,25.17]
tn = [25424818,12043695]
tFilePrefix = "/home/osherson/Work/Trees/Gstar/"
# TT-rw vars (and errors)
N = 0.96
a = 0.0012
Nu = N + 0.1
au = a - 0.00023 # no this isn't a type, recall that alpha is a negative factor but the value being listed here is positive, so a smaller alpha is closer to positve
Nd = N - 0.1
ad = a + 0.00023 # no this isn't a type, recall that alpha is a negative factor but the value being listed here is positive, so a smaller alpha is closer to positve
TW = "("+str(N)+"*2.71828^(-"+str(a)+"*0.5*(MCantitoppt+MCtoppt)))"
TW_aup = "("+str(N)+"*2.71828^(-"+str(au)+"*0.5*(MCantitoppt+MCtoppt)))"
TW_adn = "("+str(N)+"*2.71828^(-"+str(ad)+"*0.5*(MCantitoppt+MCtoppt)))"
TW_Nup = "("+str(Nu)+"*2.71828^(-"+str(a)+"*0.5*(MCantitoppt+MCtoppt)))"
TW_Ndn = "("+str(Nd)+"*2.71828^(-"+str(a)+"*0.5*(MCantitoppt+MCtoppt)))"
# NT-est vars (and errors)
ntW = "(0.072885 + 0.000660127*(topcandmass-170.))"
ntWu = "(((0.072885 + 0.000660127*(topcandmass-170.)) + ((topcandmass-170.)*(topcandmass-170.)*(0.000633024*0.000633024)+((topcandmass-170.)*2*0.0000193051+(0.0348167*0.0348167)))^0.5))"
ntWd = "(((0.072885 + 0.000660127*(topcandmass-170.)) - ((topcandmass-170.)*(topcandmass-170.)*(0.000633024*0.000633024)+((topcandmass-170.)*2*0.0000193051+(0.0348167*0.0348167)))^0.5))"
# Theser are all the data driven uncrt, we'll have to load in separate Ntuples for most of the MC systematics.
### Set Up the Histograms:
# Not Saved: These don't play a part in limit setting, and are thus going to be discarded
msZPs = TH1F("msZPs", "", 30, 500, 3500)
esZPs = TH1F("esZPs", "", 30, 500, 3500)
msZPsU = TH1F("msZPsU", "", 30, 500, 3500) # sub up
esZPsU = TH1F("esZPsU", "", 30, 500, 3500)
msZPsD = TH1F("msZPsD", "", 30, 500, 3500) # sub down
esZPsD = TH1F("esZPsD", "", 30, 500, 3500)
mtZPs = TH1F("mtZPs", "", 30, 500, 3500) # Central Value
etZPs = TH1F("etZPs", "", 30, 500, 3500)
mtZPsU = TH1F("mtZPsU", "", 30, 500, 3500) # sub up
etZPsU = TH1F("etZPsU", "", 30, 500, 3500)
mtZPsD = TH1F("mtZPsD", "", 30, 500, 3500) # sub down
etZPsD = TH1F("etZPsD", "", 30, 500, 3500)
mtZPs_Nup = TH1F("mtZPs_Nup", "", 30, 500, 3500) # N up
etZPs_Nup = TH1F("etZPs_Nup", "", 30, 500, 3500)
mtZPs_Ndn = TH1F("mtZPs_Ndn", "", 30, 500, 3500) # N down
etZPs_Ndn = TH1F("etZPs_Ndn", "", 30, 500, 3500)
mtZPs_aup = TH1F("mtZPs_aup", "", 30, 500, 3500) # a up
etZPs_aup = TH1F("etZPs_aup", "", 30, 500, 3500)
mtZPs_adn = TH1F("mtZPs_adn", "", 30, 500, 3500) # a down
etZPs_adn = TH1F("etZPs_adn", "", 30, 500, 3500)
# Create save file:
fout = TFile("Zprime_Theta_Feed.root", "RECREATE") # Careful, unlike older versions of the code, this will overwrite old files.
fout.cd()
# Saved: Histograms created here will be saved to file.
# data:
mZPd = TH1F("MU__DATA", "", 30, 500, 3500)
eZPd = TH1F("EL__DATA", "", 30, 500, 3500)
# Central Value BKG:
mZPs = TH1F("MU__ST", "", 30, 500, 3500)
eZPs = TH1F("EL__ST", "", 30, 500, 3500)
mZPt = TH1F("MU__TT", "", 30, 500, 3500)
eZPt = TH1F("EL__TT", "", 30, 500, 3500)
mZPn = TH1F("MU__NT", "", 30, 500, 3500)
eZPn = TH1F("EL__NT", "", 30, 500, 3500)
# Errors from non-top:
mZPnU = TH1F("MU__NT__linfiterr__up", "", 30, 500, 3500)
eZPnU = TH1F("EL__NT__linfiterr__up", "", 30, 500, 3500)
mZPnD = TH1F("MU__NT__linfiterr__down", "", 30, 500, 3500)
eZPnD = TH1F("EL__NT__linfiterr__down", "", 30, 500, 3500)
# Errors from ttbar:
mZPt_aup = TH1F("MU__TT__a__up", "", 30, 500, 3500)
eZPt_aup = TH1F("EL__TT__a__up", "", 30, 500, 3500)
mZPn_aup = TH1F("MU__NT__a__up", "", 30, 500, 3500)
eZPn_aup = TH1F("EL__NT__a__up", "", 30, 500, 3500)
mZPt_adn = TH1F("MU__TT__a__down", "", 30, 500, 3500)
eZPt_adn = TH1F("EL__TT__a__down", "", 30, 500, 3500)
mZPn_adn = TH1F("MU__NT__a__down", "", 30, 500, 3500)
eZPn_adn = TH1F("EL__NT__a__down", "", 30, 500, 3500)
mZPt_Nup = TH1F("MU__TT__N__up", "", 30, 500, 3500)
eZPt_Nup = TH1F("EL__TT__N__up", "", 30, 500, 3500)
mZPn_Nup = TH1F("MU__NT__N__up", "", 30, 500, 3500)
eZPn_Nup = TH1F("EL__NT__N__up", "", 30, 500, 3500)
mZPt_Ndn = TH1F("MU__TT__N__down", "", 30, 500, 3500)
eZPt_Ndn = TH1F("EL__TT__N__down", "", 30, 500, 3500)
mZPn_Ndn = TH1F("MU__NT__N__down", "", 30, 500, 3500)
eZPn_Ndn = TH1F("EL__NT__N__down", "", 30, 500, 3500)
# Errors from MC:
# Now we fill them:
#cuts
Fulltag = "(topcandtau2/topcandtau1>0.1&(lepcut2Drel>25.||lepcut2Ddr>0.5)&heavytopcandmass>250.)&(topcandtau3/topcandtau2<0.55&topcandmass<250&topcandmass>140)&isLoose>0."
Antitag = "(topcandtau2/topcandtau1>0.1&(lepcut2Drel>25.||lepcut2Ddr>0.5)&heavytopcandmass>250.)&(topcandtau3/topcandtau2>0.55&topcandmass<250&topcandmass>140)&isLoose>0."
# Subtractions:
# ttbar:
for i in range(len(tFileName)):
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPs, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+"*"+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPs_aup, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+"*"+TW_aup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPs_adn, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+"*"+TW_adn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPs_Nup, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+"*"+TW_Nup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPs_Ndn, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+"*"+TW_Ndn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPsU, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWu+"*"+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mtZPsD, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWd+"*"+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPs, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+"*"+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPs_aup, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+"*"+TW_aup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPs_adn, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+"*"+TW_adn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPs_Nup, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+"*"+TW_Nup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPs_Ndn, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+"*"+TW_Ndn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPsU, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWu+"*"+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], etZPsD, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWd+"*"+TW+")")
# single top
for i in range(len(sFileName)):
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], msZPs, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], msZPsU, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWu+")")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], msZPsD, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWd+")")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], esZPs, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], esZPsU, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWu+")")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], esZPsD, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWd+")")
# DATA:
writeplot(dFileNameM, 1.0, mZPd, "EventMass", "("+Fulltag+"&isMuon>0.)", "(1.0)")
writeplot(dFileNameE, 1.0, eZPd, "EventMass", "("+Fulltag+"&isElec>0.)", "(1.0)")
# NONTOP EST:
writeplot(dFileNameM, 1.0, mZPn, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(dFileNameE, 1.0, eZPn, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(dFileNameM, 1.0, mZPn_aup, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(dFileNameE, 1.0, eZPn_aup, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(dFileNameM, 1.0, mZPn_adn, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(dFileNameE, 1.0, eZPn_adn, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(dFileNameM, 1.0, mZPn_Nup, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(dFileNameE, 1.0, eZPn_Nup, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(dFileNameM, 1.0, mZPn_Ndn, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntW+")")
writeplot(dFileNameE, 1.0, eZPn_Ndn, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntW+")")
writeplot(dFileNameM, 1.0, mZPnU, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWu+")")
writeplot(dFileNameE, 1.0, eZPnU, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWu+")")
writeplot(dFileNameM, 1.0, mZPnD, "EventMass", "("+Antitag+"&isMuon>0.)", "("+ntWd+")")
writeplot(dFileNameE, 1.0, eZPnD, "EventMass", "("+Antitag+"&isElec>0.)", "("+ntWd+")")
# TTBAR:
for i in range(len(tFileName)):
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mZPt, "EventMass", "("+Fulltag+"&isMuon>0.)", "("+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mZPt_aup, "EventMass", "("+Fulltag+"&isMuon>0.)", "("+TW_aup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mZPt_adn, "EventMass", "("+Fulltag+"&isMuon>0.)", "("+TW_adn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mZPt_Nup, "EventMass", "("+Fulltag+"&isMuon>0.)", "("+TW_Nup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], mZPt_Ndn, "EventMass", "("+Fulltag+"&isMuon>0.)", "("+TW_Ndn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], eZPt, "EventMass", "("+Fulltag+"&isElec>0.)", "("+TW+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], eZPt_aup, "EventMass", "("+Fulltag+"&isElec>0.)", "("+TW_aup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], eZPt_adn, "EventMass", "("+Fulltag+"&isElec>0.)", "("+TW_adn+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], eZPt_Nup, "EventMass", "("+Fulltag+"&isElec>0.)", "("+TW_Nup+")")
writeplot(tFilePrefix+tFileName[i]+'.root', lumi*txs[i]/tn[i], eZPt_Ndn, "EventMass", "("+Fulltag+"&isElec>0.)", "("+TW_Ndn+")")
# SINGLE TOP:
for i in range(len(sFileName)):
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], mZPs, "EventMass", "("+Fulltag+"&isMuon>0.)", "(1.0)")
writeplot(sFilePrefix+sFileName[i]+'.root', lumi*sxs[i]/sn[i], eZPs, "EventMass", "("+Fulltag+"&isElec>0.)", "(1.0)")
# error files:
# Do the substractions as needed:
mZPn.Add(msZPs,-1)
eZPn.Add(esZPs,-1)
mZPn.Add(mtZPs,-1)
eZPn.Add(etZPs,-1)
# Nup:
mZPn_Nup.Add(msZPs,-1)
eZPn_Nup.Add(esZPs,-1)
mZPn_Nup.Add(mtZPs_Nup,-1)
eZPn_Nup.Add(etZPs_Nup,-1)
# Ndn
mZPn_Ndn.Add(msZPs,-1)
eZPn_Ndn.Add(esZPs,-1)
mZPn_Ndn.Add(mtZPs_Ndn,-1)
eZPn_Ndn.Add(etZPs_Ndn,-1)
# aup:
mZPn_aup.Add(msZPs,-1)
eZPn_aup.Add(esZPs,-1)
mZPn_aup.Add(mtZPs_aup,-1)
eZPn_aup.Add(etZPs_aup,-1)
# adn:
mZPn_adn.Add(msZPs,-1)
eZPn_adn.Add(esZPs,-1)
mZPn_adn.Add(mtZPs_adn,-1)
eZPn_adn.Add(etZPs_adn,-1)
# fit U
mZPnU.Add(msZPsU,-1)
eZPnU.Add(esZPsU,-1)
mZPnU.Add(mtZPsU,-1)
eZPnU.Add(etZPsU,-1)
# fit D
mZPnD.Add(msZPsD,-1)
eZPnD.Add(esZPsD,-1)
mZPnD.Add(mtZPsD,-1)
eZPnD.Add(etZPsD,-1)
# save file! now you're done!
fout.Write()
fout.Save()
fout.Close()
|
[
"rosser.bjr@gmail.com"
] |
rosser.bjr@gmail.com
|
278bf386e32ebad40ef06e6967c5e4b15b9ab92e
|
835e2936e3dcba7c908aca1c380c521b3c7eac6f
|
/core/migrations/0013_auto_20210716_1703.py
|
5efa559b72b5496a12f912a958daa393ad9b48c9
|
[] |
no_license
|
shahzadShahid/Ecommerce
|
2b594533d34efbcf03614e2e2c50259a8597c630
|
7dc61af23b445a8c5f573b3f2ce361a2a2e8b386
|
refs/heads/main
| 2023-06-21T04:44:46.239684
| 2021-07-16T15:59:05
| 2021-07-16T15:59:05
| 386,167,045
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 508
|
py
|
# Generated by Django 2.2 on 2021-07-16 11:33
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0012_auto_20210716_1701'),
]
operations = [
migrations.AlterField(
model_name='orderitem',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"shaikshahzad8765@gmal.com"
] |
shaikshahzad8765@gmal.com
|
b5713d23357735ce2126e994aaa92ea6b2bf5cf7
|
5bcd23d57dae969b78a44f2706a41877d1dd8ba5
|
/Mutate.py
|
24c4b61d63700fed703996105240d12f054ecd3c
|
[] |
no_license
|
halloweentown/Coen352
|
ecbeda69419f641db0e06c2edeada89113cf7035
|
0f9f1fe976671c15e3cbcdbf03e5c3a21df1a015
|
refs/heads/master
| 2020-04-10T11:37:44.556559
| 2018-12-13T02:15:51
| 2018-12-13T02:15:51
| 160,998,914
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,906
|
py
|
import numpy as np
import random as rng
import globalVariables as gV
import SortIndividual
##TODO probabilities as global variable
## probabilities of each mutation occuring
#m1p = 0.25
#m2p = 0.25
#m3p = 0.25
#m4p = 0.25
class Mutate:
# different mutation functions
def add (self):
# add new swap to sort
if (len(self)<gV.maxSortLength):
swap = []
swap.append(rng.randint(0,100))
swap.append(rng.randint(0,100))
#Insert at random index
self.insert(rng.randint(0,len(self)), swap)
#Insert mutation abrieviation
self.mHistory.append('A')
return
else:
self.mHistory.append('~A')
return
#def delete (self):
## delete swap from sort
#self.insert(rng.randint(0,len(self)), swap)
#swap.append(rng.randint(0,100))
#swap.append(rng.randint(0,100))
##Insert at random index
#self.insert(rng.randint(0,len(self)), swap)
##Insert mutation abrieviation
#self.mHistory.append('D')
#return
#def modifiy (self):
## Changes the the value of indices in a swap
#return
#def mutate (self):
## select mutation to be applied
##Choose which mutation will occur
## Parameters: 4 element list, return 1, Replace after returned, probability of each element
#m = np.random.choice(4,1,True,p=[m1p,m2p,m3p,m4p])
#if m==0:
##DO m1
#self.add()
#elif m==1:
##DO m2
#self.delete()
#elif m==2:
##DO m3
#self.modifiy()
#else:
##DO m4
##DO NOTHING
#return
##Time permitting
##def swap (self):
### Swap the order of two swaps in a sort
##return
|
[
"James.Mariotti.Lapointe@gmail.com"
] |
James.Mariotti.Lapointe@gmail.com
|
0935af4f907eff25bfef6de76be625086b7181e3
|
30fd7ecfbcdab2fe37ded162e91676cc8334ddc5
|
/models/user.py
|
979aee5b6350ff382261136c028aad1a487b864c
|
[] |
no_license
|
rifkegribenes/flask-rest-api-jwt-extended
|
31bdef2cf9c51d4b15b7c74c88aa4bad7e209271
|
1f0e71cc8ccbd13df4cda3b651d2e0d67632fdf6
|
refs/heads/master
| 2020-04-22T01:54:42.203592
| 2019-02-12T06:14:23
| 2019-02-12T06:14:23
| 170,029,369
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 710
|
py
|
from db import db
class UserModel(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80))
password = db.Column(db.String(80))
def __init__(self, username, password):
self.username = username
self.password = password
def json(self):
return {
'id': self.id,
'username': self.username
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_by_username(cls, username):
return cls.query.filter_by(username=username).first()
@classmethod
def find_by_id(cls, _id):
return cls.query.filter_by(id=_id).first()
|
[
"rifkegribenes@gmail.com"
] |
rifkegribenes@gmail.com
|
c8be00b71fa6f727217ae6a414d4e855d5e87968
|
40b41ca5654c79ba2207c904b6c4d183491d75d9
|
/venv/Scripts/easy_install-3.7-script.py
|
2ac1600323fa99b50a30cbf776e21965f1b6d55d
|
[] |
no_license
|
manueljeffin/Hotel
|
87f6dea9fdb53671e38b508859c707aedf5d36a1
|
fc274f8cf17fcbc64cbb9d85fa801ed5fd9eaac6
|
refs/heads/master
| 2020-04-11T19:02:59.907484
| 2018-12-16T17:09:44
| 2018-12-16T17:09:44
| 162,020,251
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 458
|
py
|
#!C:\Users\Sherine\PycharmProjects\Hotel\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.7')()
)
|
[
"manueljeffin@gmail.com"
] |
manueljeffin@gmail.com
|
70e432bd6bd96830b4bc7d0fdb17080b5fbbc983
|
056a1d46538e24c4b6556a41f587db71aeb65476
|
/scripts/convert_to_threejs.py
|
4a6cbe9de153db4ce62d83c52793a56cbfeaaf46
|
[] |
no_license
|
RVelichko/Aerobike
|
aad93f7e2e012f15904b59c5cbb296fdd3d2e26f
|
b1b271848d40af4a808b73ede7d2864ca9709064
|
refs/heads/master
| 2020-04-07T09:43:04.786538
| 2018-11-19T17:13:49
| 2018-11-19T17:13:49
| 158,262,544
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 77,120
|
py
|
#!/usr/bin/python
# @author zfedoran / http://github.com/zfedoran
import os
import sys
import math
import operator
import re
import json
import types
import shutil
# #####################################################
# Globals
# #####################################################
option_triangulate = True
option_textures = True
option_copy_textures = True
option_prefix = True
option_geometry = False
option_forced_y_up = False
option_default_camera = False
option_default_light = False
option_pretty_print = False
converter = None
inputFolder = ""
outputFolder = ""
# #####################################################
# Pretty Printing Hacks
# #####################################################
# Force an array to be printed fully on a single line
class NoIndent(object):
def __init__(self, value, separator = ','):
self.separator = separator
self.value = value
def encode(self):
if not self.value:
return None
return '[ %s ]' % self.separator.join(str(f) for f in self.value)
# Force an array into chunks rather than printing each element on a new line
class ChunkedIndent(object):
def __init__(self, value, chunk_size = 15, force_rounding = False):
self.value = value
self.size = chunk_size
self.force_rounding = force_rounding
def encode(self):
# Turn the flat array into an array of arrays where each subarray is of
# length chunk_size. Then string concat the values in the chunked
# arrays, delimited with a ', ' and round the values finally append
# '{CHUNK}' so that we can find the strings with regex later
if not self.value:
return None
if self.force_rounding:
return ['{CHUNK}%s' % ', '.join(str(round(f, 6)) for f in self.value[i:i+self.size]) for i in range(0, len(self.value), self.size)]
else:
return ['{CHUNK}%s' % ', '.join(str(f) for f in self.value[i:i+self.size]) for i in range(0, len(self.value), self.size)]
# This custom encoder looks for instances of NoIndent or ChunkedIndent.
# When it finds
class CustomEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, NoIndent) or isinstance(obj, ChunkedIndent):
return obj.encode()
else:
return json.JSONEncoder.default(self, obj)
def executeRegexHacks(output_string):
# turn strings of arrays into arrays (remove the double quotes)
output_string = re.sub(':\s*\"(\[.*\])\"', r': \1', output_string)
output_string = re.sub('(\n\s*)\"(\[.*\])\"', r'\1\2', output_string)
output_string = re.sub('(\n\s*)\"{CHUNK}(.*)\"', r'\1\2', output_string)
# replace '0metadata' with metadata
output_string = re.sub('0metadata', r'metadata', output_string)
# replace 'zchildren' with children
output_string = re.sub('zchildren', r'children', output_string)
# add an extra newline after '"children": {'
output_string = re.sub('(children.*{\s*\n)', r'\1\n', output_string)
# add an extra newline after '},'
output_string = re.sub('},\s*\n', r'},\n\n', output_string)
# add an extra newline after '\n\s*],'
output_string = re.sub('(\n\s*)],\s*\n', r'\1],\n\n', output_string)
return output_string
# #####################################################
# Object Serializers
# #####################################################
# FbxVector2 is not JSON serializable
def serializeVector2(v, round_vector = False):
# JSON does not support NaN or Inf
if math.isnan(v[0]) or math.isinf(v[0]):
v[0] = 0
if math.isnan(v[1]) or math.isinf(v[1]):
v[1] = 0
if round_vector or option_pretty_print:
v = (round(v[0], 5), round(v[1], 5))
if option_pretty_print:
return NoIndent([v[0], v[1]], ', ')
else:
return [v[0], v[1]]
# FbxVector3 is not JSON serializable
def serializeVector3(v, round_vector = False):
# JSON does not support NaN or Inf
if math.isnan(v[0]) or math.isinf(v[0]):
v[0] = 0
if math.isnan(v[1]) or math.isinf(v[1]):
v[1] = 0
if math.isnan(v[2]) or math.isinf(v[2]):
v[2] = 0
if round_vector or option_pretty_print:
v = (round(v[0], 5), round(v[1], 5), round(v[2], 5))
if option_pretty_print:
return NoIndent([v[0], v[1], v[2]], ', ')
else:
return [v[0], v[1], v[2]]
# FbxVector4 is not JSON serializable
def serializeVector4(v, round_vector = False):
# JSON does not support NaN or Inf
if math.isnan(v[0]) or math.isinf(v[0]):
v[0] = 0
if math.isnan(v[1]) or math.isinf(v[1]):
v[1] = 0
if math.isnan(v[2]) or math.isinf(v[2]):
v[2] = 0
if math.isnan(v[3]) or math.isinf(v[3]):
v[3] = 0
if round_vector or option_pretty_print:
v = (round(v[0], 5), round(v[1], 5), round(v[2], 5), round(v[3], 5))
if option_pretty_print:
return NoIndent([v[0], v[1], v[2], v[3]], ', ')
else:
return [v[0], v[1], v[2], v[3]]
# #####################################################
# Helpers
# #####################################################
def getRadians(v):
return ((v[0]*math.pi)/180, (v[1]*math.pi)/180, (v[2]*math.pi)/180)
def getHex(c):
color = (int(c[0]*255) << 16) + (int(c[1]*255) << 8) + int(c[2]*255)
return int(color)
def setBit(value, position, on):
if on:
mask = 1 << position
return (value | mask)
else:
mask = ~(1 << position)
return (value & mask)
def generate_uvs(uv_layers):
layers = []
for uvs in uv_layers:
tmp = []
for uv in uvs:
tmp.append(uv[0])
tmp.append(uv[1])
if option_pretty_print:
layer = ChunkedIndent(tmp)
else:
layer = tmp
layers.append(layer)
return layers
# #####################################################
# Object Name Helpers
# #####################################################
def hasUniqueName(o, class_id):
scene = o.GetScene()
object_name = o.GetName()
object_id = o.GetUniqueID()
object_count = scene.GetSrcObjectCount(class_id)
for i in range(object_count):
other = scene.GetSrcObject(class_id, i)
other_id = other.GetUniqueID()
other_name = other.GetName()
if other_id == object_id:
continue
if other_name == object_name:
return False
return True
def getObjectName(o, force_prefix = False):
if not o:
return ""
object_name = o.GetName()
object_id = o.GetUniqueID()
if not force_prefix:
force_prefix = not hasUniqueName(o, FbxNode.ClassId)
prefix = ""
if option_prefix or force_prefix:
prefix = "Object_%s_" % object_id
return prefix + object_name
def getMaterialName(o, force_prefix = False):
object_name = o.GetName()
object_id = o.GetUniqueID()
if not force_prefix:
force_prefix = not hasUniqueName(o, FbxSurfaceMaterial.ClassId)
prefix = ""
if option_prefix or force_prefix:
prefix = "Material_%s_" % object_id
return prefix + object_name
def getTextureName(t, force_prefix = False):
if type(t) is FbxFileTexture:
texture_file = t.GetFileName()
texture_id = os.path.splitext(os.path.basename(texture_file))[0]
else:
texture_id = t.GetName()
if texture_id == "_empty_":
texture_id = ""
prefix = ""
if option_prefix or force_prefix:
prefix = "Texture_%s_" % t.GetUniqueID()
if len(texture_id) == 0:
prefix = prefix[0:len(prefix)-1]
return prefix + texture_id
def getMtlTextureName(texture_name, texture_id, force_prefix = False):
texture_name = os.path.splitext(texture_name)[0]
prefix = ""
if option_prefix or force_prefix:
prefix = "Texture_%s_" % texture_id
return prefix + texture_name
def getPrefixedName(o, prefix):
return (prefix + '_%s_') % o.GetUniqueID() + o.GetName()
# #####################################################
# Triangulation
# #####################################################
def triangulate_node_hierarchy(node):
node_attribute = node.GetNodeAttribute();
if node_attribute:
if node_attribute.GetAttributeType() == FbxNodeAttribute.eMesh or \
node_attribute.GetAttributeType() == FbxNodeAttribute.eNurbs or \
node_attribute.GetAttributeType() == FbxNodeAttribute.eNurbsSurface or \
node_attribute.GetAttributeType() == FbxNodeAttribute.ePatch:
converter.TriangulateInPlace(node);
child_count = node.GetChildCount()
for i in range(child_count):
triangulate_node_hierarchy(node.GetChild(i))
def triangulate_scene(scene):
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
triangulate_node_hierarchy(node.GetChild(i))
# #####################################################
# Generate Material Object
# #####################################################
def generate_texture_bindings(material_property, material_params):
# FBX to Three.js texture types
binding_types = {
"DiffuseColor": "map",
"DiffuseFactor": "diffuseFactor",
"EmissiveColor": "emissiveMap",
"EmissiveFactor": "emissiveFactor",
"AmbientColor": "lightMap", # "ambientMap",
"AmbientFactor": "ambientFactor",
"SpecularColor": "specularMap",
"SpecularFactor": "specularFactor",
"ShininessExponent": "shininessExponent",
"NormalMap": "normalMap",
"Bump": "bumpMap",
"TransparentColor": "transparentMap",
"TransparencyFactor": "transparentFactor",
"ReflectionColor": "reflectionMap",
"ReflectionFactor": "reflectionFactor",
"DisplacementColor": "displacementMap",
"VectorDisplacementColor": "vectorDisplacementMap"
}
if material_property.IsValid():
#Here we have to check if it's layeredtextures, or just textures:
layered_texture_count = material_property.GetSrcObjectCount(FbxLayeredTexture.ClassId)
if layered_texture_count > 0:
for j in range(layered_texture_count):
layered_texture = material_property.GetSrcObject(FbxLayeredTexture.ClassId, j)
texture_count = layered_texture.GetSrcObjectCount(FbxTexture.ClassId)
for k in range(texture_count):
texture = layered_texture.GetSrcObject(FbxTexture.ClassId,k)
if texture:
texture_id = getTextureName(texture, True)
material_params[binding_types[str(material_property.GetName())]] = texture_id
else:
# no layered texture simply get on the property
texture_count = material_property.GetSrcObjectCount(FbxTexture.ClassId)
for j in range(texture_count):
texture = material_property.GetSrcObject(FbxTexture.ClassId,j)
if texture:
texture_id = getTextureName(texture, True)
material_params[binding_types[str(material_property.GetName())]] = texture_id
def generate_material_object(material):
#Get the implementation to see if it's a hardware shader.
implementation = GetImplementation(material, "ImplementationHLSL")
implementation_type = "HLSL"
if not implementation:
implementation = GetImplementation(material, "ImplementationCGFX")
implementation_type = "CGFX"
output = None
material_params = None
material_type = None
if implementation:
print("Shader materials are not supported")
elif material.GetClassId().Is(FbxSurfaceLambert.ClassId):
ambient = getHex(material.Ambient.Get())
diffuse = getHex(material.Diffuse.Get())
emissive = getHex(material.Emissive.Get())
opacity = 1.0 - material.TransparencyFactor.Get()
opacity = 1.0 if opacity == 0 else opacity
opacity = opacity
transparent = False
reflectivity = 1
material_type = 'MeshBasicMaterial'
# material_type = 'MeshLambertMaterial'
material_params = {
'color' : diffuse,
'ambient' : ambient,
'emissive' : emissive,
'reflectivity' : reflectivity,
'transparent' : transparent,
'opacity' : opacity
}
elif material.GetClassId().Is(FbxSurfacePhong.ClassId):
ambient = getHex(material.Ambient.Get())
diffuse = getHex(material.Diffuse.Get())
emissive = getHex(material.Emissive.Get())
specular = getHex(material.Specular.Get())
opacity = 1.0 - material.TransparencyFactor.Get()
opacity = 1.0 if opacity == 0 else opacity
opacity = opacity
shininess = material.Shininess.Get()
transparent = False
reflectivity = 1
bumpScale = 1
material_type = 'MeshPhongMaterial'
material_params = {
'color' : diffuse,
'ambient' : ambient,
'emissive' : emissive,
'specular' : specular,
'shininess' : shininess,
'bumpScale' : bumpScale,
'reflectivity' : reflectivity,
'transparent' : transparent,
'opacity' : opacity
}
else:
print "Unknown type of Material", getMaterialName(material)
# default to Lambert Material if the current Material type cannot be handeled
if not material_type:
ambient = getHex((0,0,0))
diffuse = getHex((0.5,0.5,0.5))
emissive = getHex((0,0,0))
opacity = 1
transparent = False
reflectivity = 1
material_type = 'MeshLambertMaterial'
material_params = {
'color' : diffuse,
'ambient' : ambient,
'emissive' : emissive,
'reflectivity' : reflectivity,
'transparent' : transparent,
'opacity' : opacity
}
if option_textures:
texture_count = FbxLayerElement.sTypeTextureCount()
for texture_index in range(texture_count):
material_property = material.FindProperty(FbxLayerElement.sTextureChannelNames(texture_index))
generate_texture_bindings(material_property, material_params)
material_params['wireframe'] = False
material_params['wireframeLinewidth'] = 1
output = {
'type' : material_type,
'parameters' : material_params
}
return output
def generate_proxy_material_object(node, material_names):
material_type = 'MeshFaceMaterial'
material_params = {
'materials' : material_names
}
output = {
'type' : material_type,
'parameters' : material_params
}
return output
# #####################################################
# Find Scene Materials
# #####################################################
def extract_materials_from_node(node, material_dict):
name = node.GetName()
mesh = node.GetNodeAttribute()
node = None
if mesh:
node = mesh.GetNode()
if node:
material_count = node.GetMaterialCount()
material_names = []
for l in range(mesh.GetLayerCount()):
materials = mesh.GetLayer(l).GetMaterials()
if materials:
if materials.GetReferenceMode() == FbxLayerElement.eIndex:
#Materials are in an undefined external table
continue
for i in range(material_count):
material = node.GetMaterial(i)
material_names.append(getMaterialName(material))
if material_count > 1:
proxy_material = generate_proxy_material_object(node, material_names)
proxy_name = getMaterialName(node, True)
material_dict[proxy_name] = proxy_material
def generate_materials_from_hierarchy(node, material_dict):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh:
extract_materials_from_node(node, material_dict)
for i in range(node.GetChildCount()):
generate_materials_from_hierarchy(node.GetChild(i), material_dict)
def generate_material_dict(scene):
material_dict = {}
# generate all materials for this scene
material_count = scene.GetSrcObjectCount(FbxSurfaceMaterial.ClassId)
for i in range(material_count):
material = scene.GetSrcObject(FbxSurfaceMaterial.ClassId, i)
material_object = generate_material_object(material)
material_name = getMaterialName(material)
material_dict[material_name] = material_object
# generate material porxies
# Three.js does not support meshs with multiple materials, however it does
# support materials with multiple submaterials
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_materials_from_hierarchy(node.GetChild(i), material_dict)
return material_dict
# #####################################################
# Generate Texture Object
# #####################################################
def generate_texture_object(texture):
#TODO: extract more texture properties
wrap_u = texture.GetWrapModeU()
wrap_v = texture.GetWrapModeV()
offset = texture.GetUVTranslation()
if type(texture) is FbxFileTexture:
url = texture.GetFileName()
else:
url = getTextureName( texture )
#url = replace_inFolder2OutFolder( url )
#print( url )
index = url.rfind( '/' )
if index == -1:
index = url.rfind( '\\' )
filename = url[ index+1 : len(url) ]
output = {
'url': filename,
'fullpath': url,
'repeat': serializeVector2( (1,1) ),
'offset': serializeVector2( texture.GetUVTranslation() ),
'magFilter': 'LinearFilter',
'minFilter': 'LinearMipMapLinearFilter',
'anisotropy': True
}
return output
# #####################################################
# Replace Texture input path to output
# #####################################################
def replace_inFolder2OutFolder(url):
folderIndex = url.find(inputFolder)
if folderIndex != -1:
url = url[ folderIndex+len(inputFolder): ]
url = outputFolder + url
return url
# #####################################################
# Replace Texture output path to input
# #####################################################
def replace_OutFolder2inFolder(url):
folderIndex = url.find(outputFolder)
if folderIndex != -1:
url = url[ folderIndex+len(outputFolder): ]
url = inputFolder + url
return url
# #####################################################
# Find Scene Textures
# #####################################################
def extract_material_textures(material_property, texture_dict):
if material_property.IsValid():
#Here we have to check if it's layeredtextures, or just textures:
layered_texture_count = material_property.GetSrcObjectCount(FbxLayeredTexture.ClassId)
if layered_texture_count > 0:
for j in range(layered_texture_count):
layered_texture = material_property.GetSrcObject(FbxLayeredTexture.ClassId, j)
texture_count = layered_texture.GetSrcObjectCount(FbxTexture.ClassId)
for k in range(texture_count):
texture = layered_texture.GetSrcObject(FbxTexture.ClassId,k)
if texture:
texture_object = generate_texture_object(texture)
texture_name = getTextureName( texture, True )
texture_dict[texture_name] = texture_object
else:
# no layered texture simply get on the property
texture_count = material_property.GetSrcObjectCount(FbxTexture.ClassId)
for j in range(texture_count):
texture = material_property.GetSrcObject(FbxTexture.ClassId,j)
if texture:
texture_object = generate_texture_object(texture)
texture_name = getTextureName( texture, True )
texture_dict[texture_name] = texture_object
def extract_textures_from_node(node, texture_dict):
name = node.GetName()
mesh = node.GetNodeAttribute()
#for all materials attached to this mesh
material_count = mesh.GetNode().GetSrcObjectCount(FbxSurfaceMaterial.ClassId)
for material_index in range(material_count):
material = mesh.GetNode().GetSrcObject(FbxSurfaceMaterial.ClassId, material_index)
#go through all the possible textures types
if material:
texture_count = FbxLayerElement.sTypeTextureCount()
for texture_index in range(texture_count):
material_property = material.FindProperty(FbxLayerElement.sTextureChannelNames(texture_index))
extract_material_textures(material_property, texture_dict)
def generate_textures_from_hierarchy(node, texture_dict):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh:
extract_textures_from_node(node, texture_dict)
for i in range(node.GetChildCount()):
generate_textures_from_hierarchy(node.GetChild(i), texture_dict)
def generate_texture_dict(scene):
if not option_textures:
return {}
texture_dict = {}
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_textures_from_hierarchy(node.GetChild(i), texture_dict)
return texture_dict
# #####################################################
# Extract Fbx SDK Mesh Data
# #####################################################
def extract_fbx_vertex_positions(mesh):
control_points_count = mesh.GetControlPointsCount()
control_points = mesh.GetControlPoints()
positions = []
for i in range(control_points_count):
tmp = control_points[i]
tmp = [tmp[0], tmp[1], tmp[2]]
positions.append(tmp)
node = mesh.GetNode()
if node:
t = node.GeometricTranslation.Get()
t = FbxVector4(t[0], t[1], t[2], 1)
r = node.GeometricRotation.Get()
r = FbxVector4(r[0], r[1], r[2], 1)
s = node.GeometricScaling.Get()
s = FbxVector4(s[0], s[1], s[2], 1)
hasGeometricTransform = False
if t[0] != 0 or t[1] != 0 or t[2] != 0 or \
r[0] != 0 or r[1] != 0 or r[2] != 0 or \
s[0] != 1 or s[1] != 1 or s[2] != 1:
hasGeometricTransform = True
if hasGeometricTransform:
geo_transform = FbxMatrix(t,r,s)
else:
geo_transform = FbxMatrix()
transform = None
if option_geometry:
# FbxMeshes are local to their node, we need the vertices in global space
# when scene nodes are not exported
transform = node.EvaluateGlobalTransform()
transform = FbxMatrix(transform) * geo_transform
elif hasGeometricTransform:
transform = geo_transform
if transform:
for i in range(len(positions)):
v = positions[i]
position = FbxVector4(v[0], v[1], v[2])
position = transform.MultNormalize(position)
positions[i] = [position[0], position[1], position[2]]
return positions
def extract_fbx_vertex_normals(mesh):
# eNone The mapping is undetermined.
# eByControlPoint There will be one mapping coordinate for each surface control point/vertex.
# eByPolygonVertex There will be one mapping coordinate for each vertex, for every polygon of which it is a part. This means that a vertex will have as many mapping coordinates as polygons of which it is a part.
# eByPolygon There can be only one mapping coordinate for the whole polygon.
# eByEdge There will be one mapping coordinate for each unique edge in the mesh. This is meant to be used with smoothing layer elements.
# eAllSame There can be only one mapping coordinate for the whole surface.
layered_normal_indices = []
layered_normal_values = []
poly_count = mesh.GetPolygonCount()
control_points = mesh.GetControlPoints()
for l in range(mesh.GetLayerCount()):
mesh_normals = mesh.GetLayer(l).GetNormals()
if not mesh_normals:
continue
normals_array = mesh_normals.GetDirectArray()
normals_count = normals_array.GetCount()
if normals_count == 0:
continue
normal_indices = []
normal_values = []
# values
for i in range(normals_count):
normal = normals_array.GetAt(i)
normal = [normal[0], normal[1], normal[2]]
normal_values.append(normal)
node = mesh.GetNode()
if node:
t = node.GeometricTranslation.Get()
t = FbxVector4(t[0], t[1], t[2], 1)
r = node.GeometricRotation.Get()
r = FbxVector4(r[0], r[1], r[2], 1)
s = node.GeometricScaling.Get()
s = FbxVector4(s[0], s[1], s[2], 1)
hasGeometricTransform = False
if t[0] != 0 or t[1] != 0 or t[2] != 0 or \
r[0] != 0 or r[1] != 0 or r[2] != 0 or \
s[0] != 1 or s[1] != 1 or s[2] != 1:
hasGeometricTransform = True
if hasGeometricTransform:
geo_transform = FbxMatrix(t,r,s)
else:
geo_transform = FbxMatrix()
transform = None
if option_geometry:
# FbxMeshes are local to their node, we need the vertices in global space
# when scene nodes are not exported
transform = node.EvaluateGlobalTransform()
transform = FbxMatrix(transform) * geo_transform
elif hasGeometricTransform:
transform = geo_transform
if transform:
t = FbxVector4(0,0,0,1)
transform.SetRow(3, t)
for i in range(len(normal_values)):
n = normal_values[i]
normal = FbxVector4(n[0], n[1], n[2])
normal = transform.MultNormalize(normal)
normal.Normalize()
normal = [normal[0], normal[1], normal[2]]
normal_values[i] = normal
# indices
vertexId = 0
for p in range(poly_count):
poly_size = mesh.GetPolygonSize(p)
poly_normals = []
for v in range(poly_size):
control_point_index = mesh.GetPolygonVertex(p, v)
# mapping mode is by control points. The mesh should be smooth and soft.
# we can get normals by retrieving each control point
if mesh_normals.GetMappingMode() == FbxLayerElement.eByControlPoint:
# reference mode is direct, the normal index is same as vertex index.
# get normals by the index of control vertex
if mesh_normals.GetReferenceMode() == FbxLayerElement.eDirect:
poly_normals.append(control_point_index)
elif mesh_normals.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
index = mesh_normals.GetIndexArray().GetAt(control_point_index)
poly_normals.append(index)
# mapping mode is by polygon-vertex.
# we can get normals by retrieving polygon-vertex.
elif mesh_normals.GetMappingMode() == FbxLayerElement.eByPolygonVertex:
if mesh_normals.GetReferenceMode() == FbxLayerElement.eDirect:
poly_normals.append(vertexId)
elif mesh_normals.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
index = mesh_normals.GetIndexArray().GetAt(vertexId)
poly_normals.append(index)
elif mesh_normals.GetMappingMode() == FbxLayerElement.eByPolygon or \
mesh_normals.GetMappingMode() == FbxLayerElement.eAllSame or \
mesh_normals.GetMappingMode() == FbxLayerElement.eNone:
print("unsupported normal mapping mode for polygon vertex")
vertexId += 1
normal_indices.append(poly_normals)
layered_normal_values.append(normal_values)
layered_normal_indices.append(normal_indices)
normal_values = []
normal_indices = []
# Three.js only supports one layer of normals
if len(layered_normal_values) > 0:
normal_values = layered_normal_values[0]
normal_indices = layered_normal_indices[0]
return normal_values, normal_indices
def extract_fbx_vertex_colors(mesh):
# eNone The mapping is undetermined.
# eByControlPoint There will be one mapping coordinate for each surface control point/vertex.
# eByPolygonVertex There will be one mapping coordinate for each vertex, for every polygon of which it is a part. This means that a vertex will have as many mapping coordinates as polygons of which it is a part.
# eByPolygon There can be only one mapping coordinate for the whole polygon.
# eByEdge There will be one mapping coordinate for each unique edge in the mesh. This is meant to be used with smoothing layer elements.
# eAllSame There can be only one mapping coordinate for the whole surface.
layered_color_indices = []
layered_color_values = []
poly_count = mesh.GetPolygonCount()
control_points = mesh.GetControlPoints()
for l in range(mesh.GetLayerCount()):
mesh_colors = mesh.GetLayer(l).GetVertexColors()
if not mesh_colors:
continue
colors_array = mesh_colors.GetDirectArray()
colors_count = colors_array.GetCount()
if colors_count == 0:
continue
color_indices = []
color_values = []
# values
for i in range(colors_count):
color = colors_array.GetAt(i)
color = [color.mRed, color.mGreen, color.mBlue, color.mAlpha]
color_values.append(color)
# indices
vertexId = 0
for p in range(poly_count):
poly_size = mesh.GetPolygonSize(p)
poly_colors = []
for v in range(poly_size):
control_point_index = mesh.GetPolygonVertex(p, v)
if mesh_colors.GetMappingMode() == FbxLayerElement.eByControlPoint:
if mesh_colors.GetReferenceMode() == FbxLayerElement.eDirect:
poly_colors.append(control_point_index)
elif mesh_colors.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
index = mesh_colors.GetIndexArray().GetAt(control_point_index)
poly_colors.append(index)
elif mesh_colors.GetMappingMode() == FbxLayerElement.eByPolygonVertex:
if mesh_colors.GetReferenceMode() == FbxLayerElement.eDirect:
poly_colors.append(vertexId)
elif mesh_colors.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
index = mesh_colors.GetIndexArray().GetAt(vertexId)
poly_colors.append(index)
elif mesh_colors.GetMappingMode() == FbxLayerElement.eByPolygon or \
mesh_colors.GetMappingMode() == FbxLayerElement.eAllSame or \
mesh_colors.GetMappingMode() == FbxLayerElement.eNone:
print("unsupported color mapping mode for polygon vertex")
vertexId += 1
color_indices.append(poly_colors)
layered_color_indices.append( color_indices )
layered_color_values.append( color_values )
color_values = []
color_indices = []
# Three.js only supports one layer of colors
if len(layered_color_values) > 0:
color_values = layered_color_values[0]
color_indices = layered_color_indices[0]
'''
# The Fbx SDK defaults mesh.Color to (0.8, 0.8, 0.8)
# This causes most models to receive incorrect vertex colors
if len(color_values) == 0:
color = mesh.Color.Get()
color_values = [[color[0], color[1], color[2]]]
color_indices = []
for p in range(poly_count):
poly_size = mesh.GetPolygonSize(p)
color_indices.append([0] * poly_size)
'''
return color_values, color_indices
def extract_fbx_vertex_uvs(mesh):
# eNone The mapping is undetermined.
# eByControlPoint There will be one mapping coordinate for each surface control point/vertex.
# eByPolygonVertex There will be one mapping coordinate for each vertex, for every polygon of which it is a part. This means that a vertex will have as many mapping coordinates as polygons of which it is a part.
# eByPolygon There can be only one mapping coordinate for the whole polygon.
# eByEdge There will be one mapping coordinate for each unique edge in the mesh. This is meant to be used with smoothing layer elements.
# eAllSame There can be only one mapping coordinate for the whole surface.
layered_uv_indices = []
layered_uv_values = []
poly_count = mesh.GetPolygonCount()
control_points = mesh.GetControlPoints()
for l in range(mesh.GetLayerCount()):
mesh_uvs = mesh.GetLayer(l).GetUVs()
if not mesh_uvs:
continue
uvs_array = mesh_uvs.GetDirectArray()
uvs_count = uvs_array.GetCount()
if uvs_count == 0:
continue
uv_indices = []
uv_values = []
# values
for i in range(uvs_count):
uv = uvs_array.GetAt(i)
uv = [uv[0], uv[1]]
uv_values.append(uv)
# indices
vertexId = 0
for p in range(poly_count):
poly_size = mesh.GetPolygonSize(p)
poly_uvs = []
for v in range(poly_size):
control_point_index = mesh.GetPolygonVertex(p, v)
if mesh_uvs.GetMappingMode() == FbxLayerElement.eByControlPoint:
if mesh_uvs.GetReferenceMode() == FbxLayerElement.eDirect:
poly_uvs.append(control_point_index)
elif mesh_uvs.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
index = mesh_uvs.GetIndexArray().GetAt(control_point_index)
poly_uvs.append(index)
elif mesh_uvs.GetMappingMode() == FbxLayerElement.eByPolygonVertex:
uv_texture_index = mesh_uvs.GetIndexArray().GetAt(vertexId)
if mesh_uvs.GetReferenceMode() == FbxLayerElement.eDirect or \
mesh_uvs.GetReferenceMode() == FbxLayerElement.eIndexToDirect:
poly_uvs.append(uv_texture_index)
elif mesh_uvs.GetMappingMode() == FbxLayerElement.eByPolygon or \
mesh_uvs.GetMappingMode() == FbxLayerElement.eAllSame or \
mesh_uvs.GetMappingMode() == FbxLayerElement.eNone:
print("unsupported uv mapping mode for polygon vertex")
vertexId += 1
uv_indices.append(poly_uvs)
layered_uv_values.append(uv_values)
layered_uv_indices.append(uv_indices)
return layered_uv_values, layered_uv_indices
# #####################################################
# Process Mesh Geometry
# #####################################################
def generate_normal_key(normal):
return (round(normal[0], 6), round(normal[1], 6), round(normal[2], 6))
def generate_color_key(color):
return getHex(color)
def generate_uv_key(uv):
return (round(uv[0], 6), round(uv[1], 6))
def append_non_duplicate_uvs(source_uvs, dest_uvs, counts):
source_layer_count = len(source_uvs)
for layer_index in range(source_layer_count):
dest_layer_count = len(dest_uvs)
if dest_layer_count <= layer_index:
dest_uv_layer = {}
count = 0
dest_uvs.append(dest_uv_layer)
counts.append(count)
else:
dest_uv_layer = dest_uvs[layer_index]
count = counts[layer_index]
source_uv_layer = source_uvs[layer_index]
for uv in source_uv_layer:
key = generate_uv_key(uv)
if key not in dest_uv_layer:
dest_uv_layer[key] = count
count += 1
counts[layer_index] = count
return counts
def generate_unique_normals_dictionary(mesh_list):
normals_dictionary = {}
nnormals = 0
# Merge meshes, remove duplicate data
for mesh in mesh_list:
node = mesh.GetNode()
normal_values, normal_indices = extract_fbx_vertex_normals(mesh)
if len(normal_values) > 0:
for normal in normal_values:
key = generate_normal_key(normal)
if key not in normals_dictionary:
normals_dictionary[key] = nnormals
nnormals += 1
return normals_dictionary
def generate_unique_colors_dictionary(mesh_list):
colors_dictionary = {}
ncolors = 0
# Merge meshes, remove duplicate data
for mesh in mesh_list:
color_values, color_indices = extract_fbx_vertex_colors(mesh)
if len(color_values) > 0:
for color in color_values:
key = generate_color_key(color)
if key not in colors_dictionary:
colors_dictionary[key] = ncolors
ncolors += 1
return colors_dictionary
def generate_unique_uvs_dictionary_layers(mesh_list):
uvs_dictionary_layers = []
nuvs_list = []
# Merge meshes, remove duplicate data
for mesh in mesh_list:
uv_values, uv_indices = extract_fbx_vertex_uvs(mesh)
if len(uv_values) > 0:
nuvs_list = append_non_duplicate_uvs(uv_values, uvs_dictionary_layers, nuvs_list)
return uvs_dictionary_layers
def generate_normals_from_dictionary(normals_dictionary):
normal_values = []
for key, index in sorted(normals_dictionary.items(), key = operator.itemgetter(1)):
normal_values.append(key)
return normal_values
def generate_colors_from_dictionary(colors_dictionary):
color_values = []
for key, index in sorted(colors_dictionary.items(), key = operator.itemgetter(1)):
color_values.append(key)
return color_values
def generate_uvs_from_dictionary_layers(uvs_dictionary_layers):
uv_values = []
for uvs_dictionary in uvs_dictionary_layers:
uv_values_layer = []
for key, index in sorted(uvs_dictionary.items(), key = operator.itemgetter(1)):
uv_values_layer.append(key)
uv_values.append(uv_values_layer)
return uv_values
def generate_normal_indices_for_poly(poly_index, mesh_normal_values, mesh_normal_indices, normals_to_indices):
if len(mesh_normal_indices) <= 0:
return []
poly_normal_indices = mesh_normal_indices[poly_index]
poly_size = len(poly_normal_indices)
output_poly_normal_indices = []
for v in range(poly_size):
normal_index = poly_normal_indices[v]
normal_value = mesh_normal_values[normal_index]
key = generate_normal_key(normal_value)
output_index = normals_to_indices[key]
output_poly_normal_indices.append(output_index)
return output_poly_normal_indices
def generate_color_indices_for_poly(poly_index, mesh_color_values, mesh_color_indices, colors_to_indices):
if len(mesh_color_indices) <= 0:
return []
poly_color_indices = mesh_color_indices[poly_index]
poly_size = len(poly_color_indices)
output_poly_color_indices = []
for v in range(poly_size):
color_index = poly_color_indices[v]
color_value = mesh_color_values[color_index]
key = generate_color_key(color_value)
output_index = colors_to_indices[key]
output_poly_color_indices.append(output_index)
return output_poly_color_indices
def generate_uv_indices_for_poly(poly_index, mesh_uv_values, mesh_uv_indices, uvs_to_indices):
if len(mesh_uv_indices) <= 0:
return []
poly_uv_indices = mesh_uv_indices[poly_index]
poly_size = len(poly_uv_indices)
output_poly_uv_indices = []
for v in range(poly_size):
uv_index = poly_uv_indices[v]
uv_value = mesh_uv_values[uv_index]
key = generate_uv_key(uv_value)
output_index = uvs_to_indices[key]
output_poly_uv_indices.append(output_index)
return output_poly_uv_indices
def process_mesh_vertices(mesh_list):
vertex_offset = 0
vertex_offset_list = [0]
vertices = []
for mesh in mesh_list:
node = mesh.GetNode()
mesh_vertices = extract_fbx_vertex_positions(mesh)
vertices.extend(mesh_vertices[:])
vertex_offset += len(mesh_vertices)
vertex_offset_list.append(vertex_offset)
return vertices, vertex_offset_list
def process_mesh_materials(mesh_list):
material_offset = 0
material_offset_list = [0]
materials_list = []
#TODO: remove duplicate mesh references
for mesh in mesh_list:
node = mesh.GetNode()
material_count = node.GetMaterialCount()
if material_count > 0:
for l in range(mesh.GetLayerCount()):
materials = mesh.GetLayer(l).GetMaterials()
if materials:
if materials.GetReferenceMode() == FbxLayerElement.eIndex:
#Materials are in an undefined external table
continue
for i in range(material_count):
material = node.GetMaterial(i)
materials_list.append( material )
material_offset += material_count
material_offset_list.append(material_offset)
return materials_list, material_offset_list
def process_mesh_polygons(mesh_list, normals_to_indices, colors_to_indices, uvs_to_indices_list, vertex_offset_list, material_offset_list):
faces = []
for mesh_index in range(len(mesh_list)):
mesh = mesh_list[mesh_index]
flipWindingOrder = False
node = mesh.GetNode()
if node:
local_scale = node.EvaluateLocalScaling()
if local_scale[0] < 0 or local_scale[1] < 0 or local_scale[2] < 0:
flipWindingOrder = True
poly_count = mesh.GetPolygonCount()
control_points = mesh.GetControlPoints()
normal_values, normal_indices = extract_fbx_vertex_normals(mesh)
color_values, color_indices = extract_fbx_vertex_colors(mesh)
uv_values_layers, uv_indices_layers = extract_fbx_vertex_uvs(mesh)
for poly_index in range(poly_count):
poly_size = mesh.GetPolygonSize(poly_index)
face_normals = generate_normal_indices_for_poly(poly_index, normal_values, normal_indices, normals_to_indices)
face_colors = generate_color_indices_for_poly(poly_index, color_values, color_indices, colors_to_indices)
face_uv_layers = []
for l in range(len(uv_indices_layers)):
uv_values = uv_values_layers[l]
uv_indices = uv_indices_layers[l]
face_uv_indices = generate_uv_indices_for_poly(poly_index, uv_values, uv_indices, uvs_to_indices_list[l])
face_uv_layers.append(face_uv_indices)
face_vertices = []
for vertex_index in range(poly_size):
control_point_index = mesh.GetPolygonVertex(poly_index, vertex_index)
face_vertices.append(control_point_index)
#TODO: assign a default material to any mesh without one
if len(material_offset_list) <= mesh_index:
material_offset = 0
else:
material_offset = material_offset_list[mesh_index]
vertex_offset = vertex_offset_list[mesh_index]
if poly_size > 4:
new_face_normals = []
new_face_colors = []
new_face_uv_layers = []
for i in range(poly_size - 2):
new_face_vertices = [face_vertices[0], face_vertices[i+1], face_vertices[i+2]]
if len(face_normals):
new_face_normals = [face_normals[0], face_normals[i+1], face_normals[i+2]]
if len(face_colors):
new_face_colors = [face_colors[0], face_colors[i+1], face_colors[i+2]]
if len(face_uv_layers):
new_face_uv_layers = []
for layer in face_uv_layers:
new_face_uv_layers.append([layer[0], layer[i+1], layer[i+2]])
face = generate_mesh_face(mesh,
poly_index,
new_face_vertices,
new_face_normals,
new_face_colors,
new_face_uv_layers,
vertex_offset,
material_offset,
flipWindingOrder)
faces.append(face)
else:
face = generate_mesh_face(mesh,
poly_index,
face_vertices,
face_normals,
face_colors,
face_uv_layers,
vertex_offset,
material_offset,
flipWindingOrder)
faces.append(face)
return faces
def generate_mesh_face(mesh, polygon_index, vertex_indices, normals, colors, uv_layers, vertex_offset, material_offset, flipOrder):
isTriangle = ( len(vertex_indices) == 3 )
nVertices = 3 if isTriangle else 4
hasMaterial = False
for l in range(mesh.GetLayerCount()):
materials = mesh.GetLayer(l).GetMaterials()
if materials:
hasMaterial = True
break
hasFaceUvs = False
hasFaceVertexUvs = len(uv_layers) > 0
hasFaceNormals = False
hasFaceVertexNormals = len(normals) > 0
hasFaceColors = False
hasFaceVertexColors = len(colors) > 0
faceType = 0
faceType = setBit(faceType, 0, not isTriangle)
faceType = setBit(faceType, 1, hasMaterial)
faceType = setBit(faceType, 2, hasFaceUvs)
faceType = setBit(faceType, 3, hasFaceVertexUvs)
faceType = setBit(faceType, 4, hasFaceNormals)
faceType = setBit(faceType, 5, hasFaceVertexNormals)
faceType = setBit(faceType, 6, hasFaceColors)
faceType = setBit(faceType, 7, hasFaceVertexColors)
faceData = []
# order is important, must match order in JSONLoader
# face type
# vertex indices
# material index
# face uvs index
# face vertex uvs indices
# face color index
# face vertex colors indices
faceData.append(faceType)
if flipOrder:
if nVertices == 3:
vertex_indices = [vertex_indices[0], vertex_indices[2], vertex_indices[1]]
if hasFaceVertexNormals:
normals = [normals[0], normals[2], normals[1]]
if hasFaceVertexColors:
colors = [colors[0], colors[2], colors[1]]
if hasFaceVertexUvs:
tmp = []
for polygon_uvs in uv_layers:
tmp.append([polygon_uvs[0], polygon_uvs[2], polygon_uvs[1]])
uv_layers = tmp
else:
vertex_indices = [vertex_indices[0], vertex_indices[3], vertex_indices[2], vertex_indices[1]]
if hasFaceVertexNormals:
normals = [normals[0], normals[3], normals[2], normals[1]]
if hasFaceVertexColors:
colors = [colors[0], colors[3], colors[2], colors[1]]
if hasFaceVertexUvs:
tmp = []
for polygon_uvs in uv_layers:
tmp.append([polygon_uvs[0], polygon_uvs[3], polygon_uvs[2], polygon_uvs[3]])
uv_layers = tmp
for i in range(nVertices):
index = vertex_indices[i] + vertex_offset
faceData.append(index)
if hasMaterial:
material_id = 0
for l in range(mesh.GetLayerCount()):
materials = mesh.GetLayer(l).GetMaterials()
if materials:
material_id = materials.GetIndexArray().GetAt(polygon_index)
break
material_id += material_offset
faceData.append( material_id )
if hasFaceVertexUvs:
for polygon_uvs in uv_layers:
for i in range(nVertices):
index = polygon_uvs[i]
faceData.append(index)
if hasFaceVertexNormals:
for i in range(nVertices):
index = normals[i]
faceData.append(index)
if hasFaceVertexColors:
for i in range(nVertices):
index = colors[i]
faceData.append(index)
return faceData
# #####################################################
# Generate Mesh Object (for scene output format)
# #####################################################
def generate_scene_output(node):
mesh = node.GetNodeAttribute()
# This is done in order to keep the scene output and non-scene output code DRY
mesh_list = [ mesh ]
# Extract the mesh data into arrays
vertices, vertex_offsets = process_mesh_vertices(mesh_list)
materials, material_offsets = process_mesh_materials(mesh_list)
normals_to_indices = generate_unique_normals_dictionary(mesh_list)
colors_to_indices = generate_unique_colors_dictionary(mesh_list)
uvs_to_indices_list = generate_unique_uvs_dictionary_layers(mesh_list)
normal_values = generate_normals_from_dictionary(normals_to_indices)
color_values = generate_colors_from_dictionary(colors_to_indices)
uv_values = generate_uvs_from_dictionary_layers(uvs_to_indices_list)
# Generate mesh faces for the Three.js file format
faces = process_mesh_polygons(mesh_list,
normals_to_indices,
colors_to_indices,
uvs_to_indices_list,
vertex_offsets,
material_offsets)
# Generate counts for uvs, vertices, normals, colors, and faces
nuvs = []
for layer_index, uvs in enumerate(uv_values):
nuvs.append(str(len(uvs)))
nvertices = len(vertices)
nnormals = len(normal_values)
ncolors = len(color_values)
nfaces = len(faces)
# Flatten the arrays, currently they are in the form of [[0, 1, 2], [3, 4, 5], ...]
vertices = [val for v in vertices for val in v]
normal_values = [val for n in normal_values for val in n]
color_values = [c for c in color_values]
faces = [val for f in faces for val in f]
uv_values = generate_uvs(uv_values)
# Disable automatic json indenting when pretty printing for the arrays
if option_pretty_print:
nuvs = NoIndent(nuvs)
vertices = ChunkedIndent(vertices, 15, True)
normal_values = ChunkedIndent(normal_values, 15, True)
color_values = ChunkedIndent(color_values, 15)
faces = ChunkedIndent(faces, 30)
metadata = {
'vertices' : nvertices,
'normals' : nnormals,
'colors' : ncolors,
'faces' : nfaces,
'uvs' : nuvs
}
output = {
'scale' : 1,
'materials' : [],
'vertices' : vertices,
'normals' : [] if nnormals <= 0 else normal_values,
'colors' : [] if ncolors <= 0 else color_values,
'uvs' : uv_values,
'faces' : faces
}
if option_pretty_print:
output['0metadata'] = metadata
else:
output['metadata'] = metadata
return output
# #####################################################
# Generate Mesh Object (for non-scene output)
# #####################################################
def generate_non_scene_output(scene):
mesh_list = generate_mesh_list(scene)
# Extract the mesh data into arrays
vertices, vertex_offsets = process_mesh_vertices(mesh_list)
materials, material_offsets = process_mesh_materials(mesh_list)
normals_to_indices = generate_unique_normals_dictionary(mesh_list)
colors_to_indices = generate_unique_colors_dictionary(mesh_list)
uvs_to_indices_list = generate_unique_uvs_dictionary_layers(mesh_list)
normal_values = generate_normals_from_dictionary(normals_to_indices)
color_values = generate_colors_from_dictionary(colors_to_indices)
uv_values = generate_uvs_from_dictionary_layers(uvs_to_indices_list)
# Generate mesh faces for the Three.js file format
faces = process_mesh_polygons(mesh_list,
normals_to_indices,
colors_to_indices,
uvs_to_indices_list,
vertex_offsets,
material_offsets)
# Generate counts for uvs, vertices, normals, colors, and faces
nuvs = []
for layer_index, uvs in enumerate(uv_values):
nuvs.append(str(len(uvs)))
nvertices = len(vertices)
nnormals = len(normal_values)
ncolors = len(color_values)
nfaces = len(faces)
# Flatten the arrays, currently they are in the form of [[0, 1, 2], [3, 4, 5], ...]
vertices = [val for v in vertices for val in v]
normal_values = [val for n in normal_values for val in n]
color_values = [c for c in color_values]
faces = [val for f in faces for val in f]
uv_values = generate_uvs(uv_values)
# Disable json indenting when pretty printing for the arrays
if option_pretty_print:
nuvs = NoIndent(nuvs)
vertices = NoIndent(vertices)
normal_values = NoIndent(normal_values)
color_values = NoIndent(color_values)
faces = NoIndent(faces)
metadata = {
'formatVersion' : 3,
'type' : 'geometry',
'generatedBy' : 'convert-to-threejs.py',
'vertices' : nvertices,
'normals' : nnormals,
'colors' : ncolors,
'faces' : nfaces,
'uvs' : nuvs
}
output = {
'scale' : 1,
'materials' : [],
'vertices' : vertices,
'normals' : [] if nnormals <= 0 else normal_values,
'colors' : [] if ncolors <= 0 else color_values,
'uvs' : uv_values,
'faces' : faces
}
if option_pretty_print:
output['0metadata'] = metadata
else:
output['metadata'] = metadata
return output
def generate_mesh_list_from_hierarchy(node, mesh_list):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh or \
attribute_type == FbxNodeAttribute.eNurbs or \
attribute_type == FbxNodeAttribute.eNurbsSurface or \
attribute_type == FbxNodeAttribute.ePatch:
if attribute_type != FbxNodeAttribute.eMesh:
converter.TriangulateInPlace(node);
mesh_list.append(node.GetNodeAttribute())
for i in range(node.GetChildCount()):
generate_mesh_list_from_hierarchy(node.GetChild(i), mesh_list)
def generate_mesh_list(scene):
mesh_list = []
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_mesh_list_from_hierarchy(node.GetChild(i), mesh_list)
return mesh_list
# #####################################################
# Generate Embed Objects
# #####################################################
def generate_embed_dict_from_hierarchy(node, embed_dict):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh or \
attribute_type == FbxNodeAttribute.eNurbs or \
attribute_type == FbxNodeAttribute.eNurbsSurface or \
attribute_type == FbxNodeAttribute.ePatch:
if attribute_type != FbxNodeAttribute.eMesh:
converter.TriangulateInPlace(node);
embed_object = generate_scene_output(node)
embed_name = getPrefixedName(node, 'Embed')
embed_dict[embed_name] = embed_object
for i in range(node.GetChildCount()):
generate_embed_dict_from_hierarchy(node.GetChild(i), embed_dict)
def generate_embed_dict(scene):
embed_dict = {}
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_embed_dict_from_hierarchy(node.GetChild(i), embed_dict)
return embed_dict
# #####################################################
# Generate Geometry Objects
# #####################################################
def generate_geometry_object(node):
output = {
'type' : 'embedded',
'id' : getPrefixedName( node, 'Embed' )
}
return output
def generate_geometry_dict_from_hierarchy(node, geometry_dict):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh:
geometry_object = generate_geometry_object(node)
geometry_name = getPrefixedName( node, 'Geometry' )
geometry_dict[geometry_name] = geometry_object
for i in range(node.GetChildCount()):
generate_geometry_dict_from_hierarchy(node.GetChild(i), geometry_dict)
def generate_geometry_dict(scene):
geometry_dict = {}
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_geometry_dict_from_hierarchy(node.GetChild(i), geometry_dict)
return geometry_dict
# #####################################################
# Generate Light Node Objects
# #####################################################
def generate_default_light():
direction = (1,1,1)
color = (1,1,1)
intensity = 80.0
output = {
'type': 'DirectionalLight',
'color': getHex(color),
'intensity': intensity/100.00,
'direction': serializeVector3( direction ),
'target': getObjectName( None )
}
return output
def generate_light_object(node):
light = node.GetNodeAttribute()
light_types = ["point", "directional", "spot", "area", "volume"]
light_type = light_types[light.LightType.Get()]
transform = node.EvaluateLocalTransform()
position = transform.GetT()
output = None
if light_type == "directional":
# Three.js directional lights emit light from a point in 3d space to a target node or the origin.
# When there is no target, we need to take a point, one unit away from the origin, and move it
# into the right location so that the origin acts like the target
if node.GetTarget():
direction = position
else:
translation = FbxVector4(0,0,0,0)
scale = FbxVector4(1,1,1,1)
rotation = transform.GetR()
matrix = FbxMatrix(translation, rotation, scale)
direction = matrix.MultNormalize(FbxVector4(0,1,0,1))
output = {
'type': 'DirectionalLight',
'color': getHex(light.Color.Get()),
'intensity': light.Intensity.Get()/100.0,
'direction': serializeVector3( direction ),
'target': getObjectName( node.GetTarget() )
}
elif light_type == "point":
output = {
'type': 'PointLight',
'color': getHex(light.Color.Get()),
'intensity': light.Intensity.Get()/100.0,
'position': serializeVector3( position ),
'distance': light.FarAttenuationEnd.Get()
}
elif light_type == "spot":
output = {
'type': 'SpotLight',
'color': getHex(light.Color.Get()),
'intensity': light.Intensity.Get()/100.0,
'position': serializeVector3( position ),
'distance': light.FarAttenuationEnd.Get(),
'angle': light.OuterAngle.Get()*math.pi/180,
'exponent': light.DecayType.Get(),
'target': getObjectName( node.GetTarget() )
}
return output
def generate_ambient_light(scene):
scene_settings = scene.GetGlobalSettings()
ambient_color = scene_settings.GetAmbientColor()
ambient_color = (ambient_color.mRed, ambient_color.mGreen, ambient_color.mBlue)
if ambient_color[0] == 0 and ambient_color[1] == 0 and ambient_color[2] == 0:
return None
output = {
'type': 'AmbientLight',
'color': getHex(ambient_color)
}
return output
# #####################################################
# Generate Camera Node Objects
# #####################################################
def generate_default_camera():
position = (100, 100, 100)
near = 0.1
far = 1000
fov = 75
output = {
'type': 'PerspectiveCamera',
'fov': fov,
'near': near,
'far': far,
'position': serializeVector3( position )
}
return output
def generate_camera_object(node):
camera = node.GetNodeAttribute()
position = camera.Position.Get()
projection_types = [ "perspective", "orthogonal" ]
projection = projection_types[camera.ProjectionType.Get()]
near = camera.NearPlane.Get()
far = camera.FarPlane.Get()
name = getObjectName( node )
output = {}
if projection == "perspective":
aspect = camera.PixelAspectRatio.Get()
fov = camera.FieldOfView.Get()
output = {
'type': 'PerspectiveCamera',
'fov': fov,
'aspect': aspect,
'near': near,
'far': far,
'position': serializeVector3( position )
}
elif projection == "orthogonal":
left = ""
right = ""
top = ""
bottom = ""
output = {
'type': 'PerspectiveCamera',
'left': left,
'right': right,
'top': top,
'bottom': bottom,
'near': near,
'far': far,
'position': serializeVector3( position )
}
return output
# #####################################################
# Generate Camera Names
# #####################################################
def generate_camera_name_list_from_hierarchy(node, camera_list):
if node.GetNodeAttribute() == None:
pass
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eCamera:
camera_string = getObjectName(node)
camera_list.append(camera_string)
for i in range(node.GetChildCount()):
generate_camera_name_list_from_hierarchy(node.GetChild(i), camera_list)
def generate_camera_name_list(scene):
camera_list = []
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
generate_camera_name_list_from_hierarchy(node.GetChild(i), camera_list)
return camera_list
# #####################################################
# Generate Mesh Node Object
# #####################################################
def generate_mesh_object(node):
mesh = node.GetNodeAttribute()
transform = node.EvaluateLocalTransform()
position = transform.GetT()
scale = transform.GetS()
rotation = getRadians(transform.GetR())
quaternion = transform.GetQ()
material_count = node.GetMaterialCount()
material_name = ""
if material_count > 0:
material_names = []
for l in range(mesh.GetLayerCount()):
materials = mesh.GetLayer(l).GetMaterials()
if materials:
if materials.GetReferenceMode() == FbxLayerElement.eIndex:
#Materials are in an undefined external table
continue
for i in range(material_count):
material = node.GetMaterial(i)
material_names.append( getMaterialName(material) )
if not material_count > 1 and not len(material_names) > 0:
material_names.append('')
#If this mesh has more than one material, use a proxy material
material_name = getMaterialName( node, True) if material_count > 1 else material_names[0]
output = {
'geometry': getPrefixedName( node, 'Geometry' ),
'material': material_name,
'position': serializeVector3( position ),
'quaternion': serializeVector4( quaternion ),
'scale': serializeVector3( scale ),
'visible': True,
}
return output
# #####################################################
# Generate Node Object
# #####################################################
def generate_object(node):
node_types = ["Unknown", "Null", "Marker", "Skeleton", "Mesh", "Nurbs", "Patch", "Camera",
"CameraStereo", "CameraSwitcher", "Light", "OpticalReference", "OpticalMarker", "NurbsCurve",
"TrimNurbsSurface", "Boundary", "NurbsSurface", "Shape", "LODGroup", "SubDiv", "CachedEffect", "Line"]
transform = node.EvaluateLocalTransform()
position = transform.GetT()
scale = transform.GetS()
rotation = getRadians(transform.GetR())
quaternion = transform.GetQ()
node_type = ""
if node.GetNodeAttribute() == None:
node_type = "Null"
else:
node_type = node_types[node.GetNodeAttribute().GetAttributeType()]
name = getObjectName( node )
output = {
'fbx_type': node_type,
'position': serializeVector3( position ),
'quaternion': serializeVector4( quaternion ),
'scale': serializeVector3( scale ),
'visible': True
}
return output
# #####################################################
# Parse Scene Node Objects
# #####################################################
def generate_object_hierarchy(node, object_dict):
object_count = 0
if node.GetNodeAttribute() == None:
object_data = generate_object(node)
else:
attribute_type = (node.GetNodeAttribute().GetAttributeType())
if attribute_type == FbxNodeAttribute.eMesh:
object_data = generate_mesh_object(node)
elif attribute_type == FbxNodeAttribute.eLight:
object_data = generate_light_object(node)
elif attribute_type == FbxNodeAttribute.eCamera:
object_data = generate_camera_object(node)
else:
object_data = generate_object(node)
object_count += 1
object_name = getObjectName(node)
object_children = {}
for i in range(node.GetChildCount()):
object_count += generate_object_hierarchy(node.GetChild(i), object_children)
if node.GetChildCount() > 0:
# Having 'children' above other attributes is hard to read.
# We can send it to the bottom using the last letter of the alphabet 'z'.
# This letter is removed from the final output.
if option_pretty_print:
object_data['zchildren'] = object_children
else:
object_data['children'] = object_children
object_dict[object_name] = object_data
return object_count
def generate_scene_objects(scene):
object_count = 0
object_dict = {}
ambient_light = generate_ambient_light(scene)
if ambient_light:
object_dict['AmbientLight'] = ambient_light
object_count += 1
if option_default_light:
default_light = generate_default_light()
object_dict['DefaultLight'] = default_light
object_count += 1
if option_default_camera:
default_camera = generate_default_camera()
object_dict['DefaultCamera'] = default_camera
object_count += 1
node = scene.GetRootNode()
if node:
for i in range(node.GetChildCount()):
object_count += generate_object_hierarchy(node.GetChild(i), object_dict)
return object_dict, object_count
# #####################################################
# Generate Scene Output
# #####################################################
def extract_scene(scene, filename):
global_settings = scene.GetGlobalSettings()
objects, nobjects = generate_scene_objects(scene)
textures = generate_texture_dict(scene)
materials = generate_material_dict(scene)
geometries = generate_geometry_dict(scene)
embeds = generate_embed_dict(scene)
ntextures = len(textures)
nmaterials = len(materials)
ngeometries = len(geometries)
position = serializeVector3( (0,0,0) )
rotation = serializeVector3( (0,0,0) )
scale = serializeVector3( (1,1,1) )
camera_names = generate_camera_name_list(scene)
scene_settings = scene.GetGlobalSettings()
# This does not seem to be any help here
# global_settings.GetDefaultCamera()
defcamera = camera_names[0] if len(camera_names) > 0 else ""
if option_default_camera:
defcamera = 'default_camera'
metadata = {
'formatVersion': 3.2,
'type': 'scene',
'generatedBy': 'convert-to-threejs.py',
'objects': nobjects,
'geometries': ngeometries,
'materials': nmaterials,
'textures': ntextures
}
transform = {
'position' : position,
'rotation' : rotation,
'scale' : scale
}
defaults = {
'bgcolor' : 0,
'camera' : defcamera,
'fog' : ''
}
output = {
'objects': objects,
'geometries': geometries,
'materials': materials,
'textures': textures,
'embeds': embeds,
'transform': transform,
'defaults': defaults,
}
if option_pretty_print:
output['0metadata'] = metadata
else:
output['metadata'] = metadata
return output
# #####################################################
# Generate Non-Scene Output
# #####################################################
def extract_geometry(scene, filename):
output = generate_non_scene_output(scene)
return output
# #####################################################
# File Helpers
# #####################################################
def write_file(filepath, content):
index = filepath.rfind('/')
dir = filepath[0:index]
#if not os.path.exists(dir):
#os.makedirs(dir)
out = open(filepath, "w")
out.write(content.encode('utf8', 'replace'))
out.close()
def read_file(filepath):
f = open(filepath)
content = f.readlines()
f.close()
return content
def copy_textures(textures):
texture_dict = {}
for key in textures:
url = textures[key]['fullpath']
#src = replace_OutFolder2inFolder(url)
#print( src )
#print( url )
if url in texture_dict: # texture has been copied
continue
if not os.path.exists(url):
print("copy_texture error: we can't find this texture at " + url)
continue
try:
index = url.rfind('/')
if index == -1:
index = url.rfind( '\\' )
filename = url[index+1:len(url)]
saveFolder = "maps"
saveFilename = saveFolder + "/" + filename
#print( src )
#print( url )
#print( saveFilename )
if not os.path.exists(saveFolder):
os.makedirs(saveFolder)
shutil.copyfile(url, saveFilename)
texture_dict[url] = True
except IOError as e:
print "I/O error({0}): {1} {2}".format(e.errno, e.strerror, url)
def findFilesWithExt(directory, ext, include_path = True):
ext = ext.lower()
found = []
for root, dirs, files in os.walk(directory):
for filename in files:
current_ext = os.path.splitext(filename)[1].lower()
if current_ext == ext:
if include_path:
found.append(os.path.join(root, filename))
else:
found.append(filename)
return found
# #####################################################
# main
# #####################################################
if __name__ == "__main__":
from optparse import OptionParser
try:
from FbxCommon import *
except ImportError:
import platform
msg = 'Could not locate the python FBX SDK!\n'
msg += 'You need to copy the FBX SDK into your python install folder such as '
if platform.system() == 'Windows' or platform.system() == 'Microsoft':
msg += '"Python26/Lib/site-packages"'
elif platform.system() == 'Linux':
msg += '"/usr/local/lib/python2.6/site-packages"'
elif platform.system() == 'Darwin':
msg += '"/Library/Frameworks/Python.framework/Versions/2.6/lib/python2.6/site-packages"'
msg += ' folder.'
print(msg)
sys.exit(1)
usage = "Usage: %prog [source_file.fbx] [output_file.js] [options]"
parser = OptionParser(usage=usage)
parser.add_option('-t', '--triangulate', action='store_true', dest='triangulate', help="force quad geometry into triangles", default=False)
parser.add_option('-x', '--ignore-textures', action='store_true', dest='notextures', help="don't include texture references in output file", default=False)
parser.add_option('-n', '--no-texture-copy', action='store_true', dest='notexturecopy', help="don't copy texture files", default=False)
parser.add_option('-u', '--force-prefix', action='store_true', dest='prefix', help="prefix all object names in output file to ensure uniqueness", default=False)
parser.add_option('-f', '--flatten-scene', action='store_true', dest='geometry', help="merge all geometries and apply node transforms", default=False)
parser.add_option('-y', '--force-y-up', action='store_true', dest='forceyup', help="ensure that the y axis shows up", default=False)
parser.add_option('-c', '--add-camera', action='store_true', dest='defcamera', help="include default camera in output scene", default=False)
parser.add_option('-l', '--add-light', action='store_true', dest='deflight', help="include default light in output scene", default=False)
parser.add_option('-p', '--pretty-print', action='store_true', dest='pretty', help="prefix all object names in output file", default=False)
(options, args) = parser.parse_args()
option_triangulate = options.triangulate
option_textures = True if not options.notextures else False
option_copy_textures = True if not options.notexturecopy else False
option_prefix = options.prefix
option_geometry = options.geometry
option_forced_y_up = options.forceyup
option_default_camera = options.defcamera
option_default_light = options.deflight
option_pretty_print = options.pretty
# Prepare the FBX SDK.
sdk_manager, scene = InitializeSdkObjects()
converter = FbxGeometryConverter(sdk_manager)
# The converter takes an FBX file as an argument.
if len(args) > 1:
print("\nLoading file: %s" % args[0])
result = LoadScene(sdk_manager, scene, args[0])
else:
result = False
print("\nUsage: convert_fbx_to_threejs [source_file.fbx] [output_file.js]\n")
if not result:
print("\nAn error occurred while loading the file...")
else:
if option_triangulate:
print("\nForcing geometry to triangles")
triangulate_scene(scene)
axis_system = FbxAxisSystem.MayaYUp
if not option_forced_y_up:
# According to asset's coordinate to convert scene
upVector = scene.GetGlobalSettings().GetAxisSystem().GetUpVector();
if upVector[0] == 3:
axis_system = FbxAxisSystem.MayaZUp
axis_system.ConvertScene(scene)
inputFolder = args[0].replace( "\\", "/" );
index = args[0].rfind( "/" );
inputFolder = inputFolder[:index]
outputFolder = args[1].replace( "\\", "/" );
index = args[1].rfind( "/" );
outputFolder = outputFolder[:index]
if option_geometry:
output_content = extract_geometry(scene, os.path.basename(args[0]))
else:
output_content = extract_scene(scene, os.path.basename(args[0]))
if option_pretty_print:
output_string = json.dumps(output_content, indent=4, cls=CustomEncoder, separators=(',', ': '), sort_keys=True)
output_string = executeRegexHacks(output_string)
else:
output_string = json.dumps(output_content, separators=(',', ': '), sort_keys=True)
output_path = os.path.join(os.getcwd(), args[1])
write_file(output_path, output_string)
if option_copy_textures:
copy_textures( output_content['textures'] )
print("\nExported Three.js file to:\n%s\n" % output_path)
# Destroy all objects created by the FBX SDK.
sdk_manager.Destroy()
sys.exit(0)
|
[
"rostislav.vel@gmail.com"
] |
rostislav.vel@gmail.com
|
19c58a6721b17bcbf087c8ba204fadc0af615155
|
11076a25f24d0185966de0551e4f18cb97f89efa
|
/138.复制带随机指针的链表.py
|
04cdf8aa9cc0dc0cd4728b22ca8676674a70d34c
|
[] |
no_license
|
YinWincher/leetcode
|
3b828b3570f8a445b5ae3e404ac439db6c3aae7f
|
bb4cb5eaef04578cdfbb4253fdf14a6f979ed55b
|
refs/heads/master
| 2021-07-02T15:42:35.563470
| 2020-12-11T11:30:04
| 2020-12-11T11:30:04
| 204,738,361
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,111
|
py
|
#
# @lc app=leetcode.cn id=138 lang=python3
#
# [138] 复制带随机指针的链表
#
"""
# Definition for a Node.
class Node:
def __init__(self, val, next, random):
self.val = val
self.next = next
self.random = random
"""
class Solution:
def copyRandomList(self, head: 'Node') -> 'Node':
if not head:
return None
node = self.getList(head.val)
if head.random:
node.random = self.getList(head.random.val)
res = node
while head.next :
node.next = self.getList(head.next.val)
if head.next.random:
node.next.random = self.getList(head.next.random.val)
head = head.next
node = node.next
# print(self._nodeHash)
return res
def getList(self, val) -> 'Node':
if not hasattr(self,'_nodeHash'):
self._nodeHash = {}
if self._nodeHash.get(val):
return self._nodeHash[val]
else:
node = Node(val,None,None)
self._nodeHash[val] = node
return node
|
[
"812480580@qq.com"
] |
812480580@qq.com
|
1105e9a282f259df8071dc3490a0639a31431369
|
489c946cb584e9fdc18ee7fd2ee013df6161aa0c
|
/ex16.py
|
dccb865a9ffcde385a996e5bbbcb5197ba6132af
|
[
"MIT"
] |
permissive
|
HninYamoneNwae/python-exercises
|
6990a68de5ad6cd2b54a886a5adcf37e1903ab56
|
2cd797746f4ee6b3c5d10e2579cc4840c4645f7e
|
refs/heads/master
| 2020-03-19T07:58:44.632119
| 2018-06-26T08:59:47
| 2018-06-26T08:59:47
| 136,165,434
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 641
|
py
|
from sys import argv
script,filename=argv
print(f"We're going to erase {filename}.")
print("If you don't want that , hit CTRL-C (^C).")
print("If you do want that , hit RETURN.")
input("?")
print("Opening the file ...")
target=open(filename,'w')
print("Truncating the file. Goodbye!")
target.truncate()
print("Now I'm going to ask you for three lines.")
line1=input("line 1: ")
line2=input("line 2: ")
line3=input("line 3: ")
print("I'm going to write these to the file.")
target.write(line1)
target.write("\n")
target.write(line2)
target.write("\n")
target.write(line3)
target.write("\n")
print("And finally , we close it.")
target.close()
|
[
"hninyamonenway@gmail.com"
] |
hninyamonenway@gmail.com
|
6a8021d4e8cba61d4921b7ae55b98bbc3f119c61
|
a7699b02adc473058b870a309408894e3ae15d89
|
/dice.py
|
4d22b88fecb99dcb42f032275d23d394eb13823e
|
[] |
no_license
|
Bezzadx/Pazaak
|
508c5638d28d9cd1c6d274df20e69c37bb3fa9e7
|
fd8d6d83450eac13bf70770f0e9907aecd8aeb33
|
refs/heads/main
| 2023-03-23T09:36:34.141519
| 2021-03-16T10:07:48
| 2021-03-16T10:07:48
| 337,674,102
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 591
|
py
|
import random as r
def roll(dice, number = 1, ad = 0):
if dice < 1:
return 0
else:
score = 0
if ad != 0:
x = roll(dice, number)
y = roll(dice, number)
if ad > 0:
return max(x,y)
elif ad < 0:
return min(x,y)
else:
for i in range(number):
score += r.randint(1,dice)
return score
def rolln(dice, number = 1, repetitions = 1):
array = []
for i in range(repetitions):
array.append(roll(dice, number))
return array
|
[
"bmdawson97@gmail.com"
] |
bmdawson97@gmail.com
|
f9dbe7d7f96e28f3b3be69de19766f1734b17f4d
|
57a01c9c7a0e19ed6e1b48d2fe31fbded71fbbeb
|
/examples/tgmm/gibbs_tgmm.py
|
184d6653c3f1c6b1e807f62990aef71d2210b4ce
|
[
"MIT"
] |
permissive
|
hanyas/mimo
|
a5b6cefd470e09005d385118cc29e3922a7d64ab
|
589538cb7a05860a47c0b370db89086e2a189ca2
|
refs/heads/master
| 2023-06-08T15:38:12.098072
| 2023-05-29T07:27:34
| 2023-05-29T07:27:34
| 192,242,527
| 18
| 6
|
MIT
| 2023-05-19T15:50:32
| 2019-06-16T22:28:04
|
Python
|
UTF-8
|
Python
| false
| false
| 1,666
|
py
|
import numpy as np
import numpy.random as npr
from mimo.distributions import TiedGaussiansWithPrecision
from mimo.distributions import TiedGaussiansWithNormalWisharts
from mimo.distributions import TiedNormalWisharts
from mimo.distributions import Categorical
from mimo.distributions import Dirichlet
from mimo.distributions import CategoricalWithDirichlet
from mimo.mixtures import MixtureOfGaussians
from mimo.mixtures import BayesianMixtureOfGaussians
from matplotlib import pyplot as plt
# npr.seed(1337)
# generate data
gating = Categorical(dim=4)
mus = np.stack([np.array([-3., 3.]),
np.array([3., -3.]),
np.array([5., 5.]),
np.array([-5., -5.])])
lmbdas = np.array(4 * [np.eye(2)])
components = TiedGaussiansWithPrecision(size=4, dim=2,
mus=mus, lmbdas=lmbdas)
gmm = MixtureOfGaussians(gating=gating, components=components)
obs, labels = gmm.rvs(500)
gmm.plot(obs)
# learn model
gating_prior = Dirichlet(dim=4, alphas=np.ones((4, )))
gating = CategoricalWithDirichlet(dim=4, prior=gating_prior)
mus = np.zeros((4, 2))
kappas = 1e-2 * np.ones((4,))
psis = np.array(4 * [np.eye(2)])
nus = 3. * np.ones((4,)) + 1e-8
components_prior = TiedNormalWisharts(size=4, dim=2,
mus=mus, kappas=kappas,
psis=psis, nus=nus)
components = TiedGaussiansWithNormalWisharts(size=4, dim=2,
prior=components_prior)
model = BayesianMixtureOfGaussians(gating=gating, components=components)
model.resample(obs, maxiter=1000)
plt.figure()
model.plot(obs)
|
[
"abdulsamad@ias.informatik.tu-darmstadt.de"
] |
abdulsamad@ias.informatik.tu-darmstadt.de
|
de45c2010e1fd826b7a417faf0be7515a27d549f
|
54806fb5debdd52b09e2db74708f26c4791107ce
|
/scripts/SongCollection.py
|
8cba0f86ca291c0fa135ebff65c11f21030acf72
|
[] |
no_license
|
Luke-Jacobs/Radio-Song-Analysis
|
9e59236ae1e147654879c7fdebe1eb1c5f81cb47
|
530439b62fcdd2be916601727dddfbcd4db3a228
|
refs/heads/master
| 2020-06-15T13:43:27.852888
| 2019-07-14T14:59:19
| 2019-07-14T14:59:19
| 195,288,786
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,384
|
py
|
from scripts.StationInterfaces import *
from typing import List, Dict, Optional
import os, time
"""
SongCollection
This file contains classes and functions that collect PlayedSongs into a format that can be analyzed and graphed.
"""
class StationPlayCollection:
def __init__(self):
self.songLists = {}
def __getstate__(self):
return {'stationData': dict([(item[0], item[1].songs) for item in self.songLists.items()])}
def __setstate__(self, state):
self.songLists = {}
for stationName in state['stationData']:
self.songLists[stationName] = SongList(state['stationData'][stationName])
def __getitem__(self, item: str) -> SongList:
return self.songLists[item]
def __setitem__(self, key: str, value: SongList):
self.songLists[key] = value
def add(self, stationName, songs: List[PlayedSong]) -> None:
# Add first addition
if self.songLists.get(stationName, None) is None:
print('[%s] Starting collection with %d song%s' % (stationName, len(songs), '' if len(songs) == 1 else 's'))
self.songLists[stationName] = SongList(songs)
# Add to collection if not first addition
else:
lastFewSongs = self.songLists[stationName].songs[-11:]
newSongs = getNewSongs(lastFewSongs, songs)
print('[%s] Adding %d new song%s' % (stationName, len(newSongs), '' if len(newSongs) == 1 else 's'))
self.songLists[stationName].add(newSongs)
def save(self, timestamp: Optional[int]=None, folder: Optional[str]=None):
if folder is None:
folder = 'data'
os.makedirs(folder, exist_ok=True)
if timestamp is None:
timestamp = int(time.time())
with open(os.path.join(folder, str(timestamp)), 'wb+') as fp:
pickle.dump(self, fp)
def getStations(self) -> List[str]:
return list(self.songLists.keys())
def getAllSongs(self) -> SongList:
return SongList([song for songList in self.songLists.values() for song in songList.songs])
def getLists(self) -> Dict[str, SongList]:
return self.songLists
def compareAllFrequencies(self, title: str = None):
SongList.compareSongFrequencies(list(self.songLists.keys()), *self.songLists.values(), title=title)
@staticmethod
def restore(timestamp: int, folder: str = 'data'):
filepath = os.path.join(folder, str(timestamp))
if not os.path.exists(filepath):
raise RuntimeError('Collection does not exist')
with open(filepath, 'rb') as fp:
restoredCollection = pickle.load(fp)
return restoredCollection
@staticmethod
def getMostRecentSaved(folder='data/', output=True) -> 'StationPlayCollection':
"""Retrieve the most recently saved collection from a specified folder. Pickle files must be in timestamp format."""
savedCollections = os.listdir(folder)
newest = 0
for collectionName in savedCollections:
try:
timestamp = int(collectionName)
if timestamp > newest:
newest = timestamp
except ValueError:
continue
if output: print('[i] Loading newest collection: %s' % datetime.fromtimestamp(newest).strftime('%b %d, %I:%M'))
return StationPlayCollection.restore(newest)
def showStats(self) -> None:
stationNames = self.songLists.keys()
print('[i] Station Stats:')
for station in stationNames:
print('\t%s - %d' % (station, len(self.songLists[station])))
class RadioAnalysis:
def __init__(self, samples: Dict[str, SongList]):
self.stationSamples = samples
def showSongPopularityByHour(self, stationName: str=None) -> None:
# Collect data
if stationName is None:
allSongs = SongList.fromLists(list(self.stationSamples.values()))
else:
allSongs = self.stationSamples[stationName] # Extract one station's songs
popularityIndex = dict(allSongs.getPopularityIndices())
# Iterate over 24 hours
avgPopularityByHour = []
allHours = list(range(24))
for hour in allHours:
songsAtHour = allSongs.select(hours=hour)
nSongsAtHour = len(songsAtHour)
hourlyPopularityTotal = sum([popularityIndex[song] for song in songsAtHour.songs]) / nSongsAtHour
avgPopularityByHour.append(hourlyPopularityTotal)
plt.bar(allHours, avgPopularityByHour)
plt.title('During which hour of the day is the most popular music played?')
plt.xlabel('Hour')
plt.ylabel('Average popularity index (sum of each song\'s total play frequency per hour)')
plt.show()
def getNewSongs(laterSongs: List[PlayedSong], earlierSongs: List[PlayedSong]) -> Optional[List[PlayedSong]]:
"""Combine two PlayedSong lists while respecting the older timestamp values in the laterSongs list."""
# Get overlapping section
overlap = set(laterSongs).intersection(set(earlierSongs))
if not overlap:
print('[-] No overlap between songs!')
addition = earlierSongs.copy()
for item in overlap:
addition.remove(item)
if overlap != set(earlierSongs[-len(overlap):]):
print('[-] Error in stitching! \n\tOverlap: %s \n\tLater: %s \n\tEarlier: %s \n\tAdding: %s' %
(PlayedSong.showList(overlap), PlayedSong.showList(laterSongs), PlayedSong.showList(earlierSongs), PlayedSong.showList(addition)))
return addition
def collectSongsFromStations(collection: StationPlayCollection, wait: float=5*60, folder: str=None):
"""This function continuously checks the websites of Air1 and K-LOVE for new songs."""
while True:
# Retrieve songs from Air1 and KLOVE
for i in range(2):
# Add more stations here <---
writeSongsToCollectionAIR1(collection)
writeSongsToCollectionKLOVE(collection)
writeSongsToCollectionKISS(collection)
print('[i] Waiting %d minutes' % (wait / 60))
time.sleep(wait)
# Every two additions, save collection
collection.showStats()
print('[i] Saving collection (%s)...' % (datetime.now().strftime("%I:%M")), end='')
collection.save(folder=folder)
print('saved!')
|
[
"lukedjacobs@yahoo.com"
] |
lukedjacobs@yahoo.com
|
1eb2488eab7713582f37b167ce82a022b6a313f2
|
d33d6642299b56ae04eac597de6859de1c258b17
|
/euler/euler_2.py
|
b2406a66202b39bbd60216fd0fca6a415937f764
|
[] |
no_license
|
laurieskelly/lrs-bin
|
276e85c032f80de806b1a2e653f0724fdd3eb509
|
d57ea623f5c91d82311aff2c24c174135197b538
|
refs/heads/master
| 2020-04-06T15:32:27.379451
| 2018-11-01T17:16:50
| 2018-11-01T17:16:50
| 20,307,944
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 493
|
py
|
# Each new term in the Fibonacci sequence is generated by adding the previous
# two terms. By starting with 1 and 2, the first 10 terms will be:
# 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, ...
# By considering the terms in the Fibonacci sequence whose values do not exceed
# four million, find the sum of the even-valued terms.
sum = 2
fib1,fib2 = 1,2
while fib2 <= 4e6:
next_fib = fib1+fib2
if next_fib % 2 == 0:
sum += next_fib
fib1 = fib2
fib2 = next_fib
print sum
|
[
"laurieskelly@users.noreply.github.com"
] |
laurieskelly@users.noreply.github.com
|
0ccf0aed7349e27100d4353e23cb2991d49134ae
|
7fb1f77a604c8d694544621ea7967c61fe4d6184
|
/src/core/util/NetworkHandler.py
|
e439609477e3d10af48f389d94ad5192784272e3
|
[] |
no_license
|
HP-xyz/mediatracker
|
df92bbf24dba7690016e82738a5daf2f647b4609
|
867fdef4145ccaa5b1e9deb9e11932490b38ab1e
|
refs/heads/master
| 2016-08-06T11:48:38.099618
| 2014-08-19T09:52:05
| 2014-08-19T09:52:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,293
|
py
|
__author__="dvorak"
__date__ ="$Dec 17, 2011 10:45:35 PM$"
import logging
import urllib
from urllib import request
import threading
class NetworkHandler(threading.Thread):
def __init__(self, callback):
threading.Thread.__init__(self)
self.Callback = callback
self.logger_network = logging.getLogger("NetworkHandler")
self.logger_network.debug(" === NetworkHandler::__init__ ===")
# self.config = configparser.ConfigParser()
#self.config.read_file(open('./config.conf'))
def DoGet(self, url, user=None, password=None):
proxy = urllib.request.ProxyHandler({'http': r'bbdnet1289:aoeuidhtns8@proxy.bbdnet.bbd.co.za:8080'})
auth = urllib.request.HTTPBasicAuthHandler()
opener = urllib.request.build_opener(proxy, auth, urllib.request.HTTPHandler)
networkRequest = opener.open(url)
response = networkRequest.read()
self.Callback(response)
if __name__ == "__main__":
class ThreadManager():
def Get(self):
networkHandle = NetworkHandler(self.threadComplete)
networkHandle.DoGet('http://myanimelist.net/malappinfo.php?status=all&u=DvorakUser')
def threadComplete(self, data):
print(data)
manager = ThreadManager()
manager.Get()
|
[
"DvorakUser@gmail.com"
] |
DvorakUser@gmail.com
|
dbea09afa0ccdeb9f86e9d3e7d3a1b5e15bb9687
|
5e679637d492b216ce550761ce8014c6b30033cb
|
/Classifier_2/plot_hmmcopy.py
|
3f9a6a713be9823928ec200c91b287d799d17051
|
[] |
no_license
|
flywind2/CancerCellClassifier
|
c9159f60583487e10aa519fefae45298a53d669c
|
2ca33e0b9faa765d4394c8dc0e08195018d0cd83
|
refs/heads/master
| 2020-07-09T11:45:47.140157
| 2018-11-22T20:36:32
| 2018-11-22T20:36:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,821
|
py
|
'''
Created on Nov 16, 2016
@author: dgrewal
Updated on Feb 20, 2018
@updated by Nafis Abrar
'''
from __future__ import division
import argparse
import pandas as pd
import matplotlib
import os
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
import statsmodels.api as sm
from matplotlib.backends.backend_pdf import PdfPages
import utils as utl
import math
from argparse import Namespace
lowess = sm.nonparametric.lowess
matplotlib.rcParams['pdf.fonttype'] = 42
sns.set(context='talk',
style='ticks',
font='Helvetica',
rc={'axes.titlesize': 12,
'axes.labelsize': 15,
'xtick.labelsize': 15,
'ytick.labelsize': 15,
'legend.fontsize': 15})
def parse_args(*args):
# =======================================================================================================================
# Args are passed from Classifier
# =======================================================================================================================
parser = argparse.ArgumentParser()
parser.add_argument('--corrected_reads',
required=True,
help='''Path to HMMcopy corrected reads output .csv file.''')
parser.add_argument('--segments',
required=True,
help='''Path to HMMcopy segments output .csv file.''')
parser.add_argument('--quality_metrics',
required=True,
help='''Optional quality metrics file for the run, with 'mad_neutral_state' column.''')
parser.add_argument('--ref_genome',
required=True,
help='''Path to reference genome used for alignment.''')
parser.add_argument('--num_states', type=int, default=7,
help='''Number of states used to run HMMcopy, default 7.''')
parser.add_argument('--mad_threshold', type=float, default=0,
help='''all cells that have low MAD won't be plotted''')
parser.add_argument('--reads_output',
required=True,
help='''Path to HMMcopy corrected reads output .pdf file.''')
parser.add_argument('--bias_output',
required=True,
help='''Path to HMMcopy bias reads output .pdf file.''')
parser.add_argument('--segs_output',
required=True,
help='''Path to HMMcopy segs reads output .pdf file.''')
parser.add_argument('--plot_title',
help='''title of the plots''')
parser.add_argument('--samples',
nargs='*',
help='''title of the plots''')
# print os.getcwd()
if "Classifier_2" not in os.getcwd():
os.chdir("Classifier_2")
args = Namespace(bias_output='output/A95724B_reads.pdf', corrected_reads=args[0], mad_threshold=0, num_states=7, plot_title='good_clonal_profiles', quality_metrics=args[1], reads_output='output/good_clonal_profiles_read.pdf', ref_genome='GRCh37-lite.fa', samples=None, segments=args[2], segs_output='output/good_clonal_profiles_segs.pdf')
return args
class GenHmmPlots(object):
"""
generate the reads, bias and segment plots
"""
def __init__(self, args):
self.args = args
self.segs_pdf = self.get_pdf_handles()
def load_data_pandas(self, infile):
"""
"""
data = pd.read_csv(infile,
sep=',')
data = data.groupby('cell_id')
return data
def read_quality_metrics(self):
"""
all_metrics_summary
"""
df = self.load_data_pandas(self.args.quality_metrics)
return df
def read_corrected_reads(self):
"""
corrected_reads
"""
df = self.load_data_pandas(self.args.corrected_reads)
return df
def read_segments(self):
"""
qulaity_metrics
"""
df = self.load_data_pandas(self.args.segments)
return df
def get_sample_ids(self, df, metrics):
"""
"""
samples = df.groups.keys()
return samples
def get_pdf_handles(self):
"""
"""
# reads_pdf = PdfPages(self.args.reads_output)
# bias_pdf = PdfPages(self.args.bias_output)
segs_pdf = PdfPages(self.args.segs_output)
return segs_pdf
def get_plot_title(self, sample_id, metrics):
"""
"""
if 'cell_call' in metrics.get_group(sample_id):
cellcall = metrics.get_group(sample_id)['cell_call'].iloc[0]
else:
cellcall = 'NA'
if 'experimental_condition' in metrics.get_group(sample_id):
cond = metrics.get_group(sample_id)['experimental_condition'].iloc[0]
else:
cond = 'NA'
if 'sample_type' in metrics.get_group(sample_id):
st = metrics.get_group(sample_id)['sample_type'].iloc[0]
st = str(st)
else:
st = 'NA'
mad = metrics.get_group(sample_id)['mad_neutral_state'].iloc[0]
mad = str('%.3f' % mad)
ni = metrics.get_group(sample_id)['MSRSI_non_integerness'].iloc[0]
ni = str('%.3f' % ni)
title_str = [sample_id, '(cell call', cellcall, ', condition',
cond, ', sample_type', st, ' neutral MAD ', mad, ', MSRSI Non Integerness ',
ni, ')']
title_str = ' '.join(title_str) + self.args.plot_title
return title_str
def get_mad_score(self, sample_id, metrics):
"""
"""
mad = metrics.get_group(sample_id)['mad_neutral_state'].iloc[0]
return mad
def gen_reads_plot(self, df, ax, typ='norm'):
col = '#595959'
ax = utl.create_chromosome_plot_axes(ax, self.args.ref_genome)
# only attempt to plot if data is available
if df is not None:
plt.scatter(df['plot_coord'], df[typ], color=col, s=4)
plt_lowess = lowess(df[typ], df['plot_coord'], frac=0.01, return_sorted=False)
plt.plot(df['plot_coord'], plt_lowess, color='black', linewidth=1.2)
if typ == 'norm':
ax.set_ylabel('Normalized reads per bin')
elif typ == 'cor_gc':
ax.set_ylabel('GC corrected reads per bin')
elif typ == 'cor_map':
ax.set_ylabel('GC and mappability \n corrected reads per bin')
ax.tick_params(axis='x', which='minor', pad=9.1)
ax = utl.add_open_grid_lines(ax)
# def plot_corrected_reads(self, df, sample_id, title):
# """
#
# """
# fig = plt.figure(figsize=(15, 12))
#
# plt.subplot(3, 1, 1)
# ax = fig.gca()
# self.gen_reads_plot(df, ax, typ='norm')
# ax.set_title(title)
# ax.set_xlabel('')
#
# plt.subplot(3, 1, 2)
# ax = fig.gca()
# self.gen_reads_plot(df, ax, typ='cor_gc')
# ax.set_xlabel('')
#
# plt.subplot(3, 1, 3)
# ax = fig.gca()
# self.gen_reads_plot(df, ax, typ='cor_map')
#
# sns.despine(offset=10, trim=True)
# plt.tight_layout()
# self.reads_pdf.savefig(fig, pad_inches=0.2)
# plt.close()
#
# def plot_bias(self, df, sample_id, title):
# """
# """
# df_ideal = df[df['ideal'] == True]
#
# col = '#006ba4'
#
# fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='col', sharey='row', figsize=(9, 9))
#
# ax1.set_ylabel('Read count')
# ax1.set_title('Uncorrected')
# if df is not None:
# ax1.scatter(df_ideal['gc'], df_ideal['reads'], edgecolors=col, facecolors='none', alpha=0.1)
#
# ax2.set_title('Uncorrected')
# if df is not None:
# ax2.scatter(df_ideal['map'], df_ideal['reads'], edgecolors=col, facecolors='none', alpha=0.1)
#
# ax3.set_xlabel('GC content')
# ax3.set_ylabel('Normalized read count')
# ax3.set_title('GC corrected')
# not_null = df_ideal['cor_gc'].notnull()
# if df is not None:
# ax3.scatter(df_ideal['gc'][not_null], df_ideal['cor_gc'][not_null], edgecolors=col, facecolors='none',
# alpha=0.1)
#
# ax4.set_xlabel('Mappability')
# ax4.set_title('GC and mappability corrected')
# not_null = df_ideal['cor_map'].notnull()
# if df is not None:
# ax4.scatter(df_ideal['map'][not_null], df_ideal['cor_map'][not_null], edgecolors=col, facecolors='none',
# alpha=0.1)
#
# fig.suptitle(title)
# sns.despine(offset=10, trim=True)
#
# plt.tight_layout(rect=(0, 0, 1, 0.95))
# self.bias_pdf.savefig(fig, pad_inches=0.2)
# plt.close()
def plot_segments(self,count, df, segments, plot_title, num_states=7, remove_y=False):
if df is not None and remove_y:
df = df[df['chr'] != 'Y']
# standard: 15,4
# SA501X3F xenograft heatmap: 20.4, 4
fig = plt.figure(figsize=(15, 4))
ax = fig.gca()
ax = utl.create_chromosome_plot_axes(ax, self.args.ref_genome)
ax.set_title(plot_title)
ax.set_ylabel('Copy number')
segment_states = range(1, num_states + 1)
segment_labels = [str(x) for x in range(num_states)]
segment_labels[-1] = segment_labels[-1] + ' or more'
segment_colours = ['#006ba4', '#5f9ed1', '#ababab', '#ffbc79', '#ff800e', '#c85200', '#8c3900']
# we pass None if we don't have data
if df is not None and segments is not None:
cols = df['state']
cols = cols.replace(segment_states, segment_colours)
cols = cols[~df['copy'].isnull()]
plt.scatter(df['plot_coord'], df['integer_copy_scale'], facecolors=cols, edgecolors='none', s=4)
x, y = utl.get_segment_start_end(segments, remove_y)
plt.plot(x, y, color='black', linewidth=1)
ax.set_ylim((0, 14))
sns.despine(offset=10, trim=True)
ax.tick_params(axis='x', which='minor', pad=9.1)
ax.legend = utl.add_legend(ax, segment_labels, segment_colours, num_states, type='rectangle',
location='upper center')
ax = utl.add_open_grid_lines(ax)
# This is where the png are appended to the existing ones in Pdfs directory
os.chdir("..")
os.chdir("Pdfs")
plt.tight_layout()
plt.savefig(str(count) + ".png")
os.chdir("..")
os.chdir("Classifier_2")
# self.segs_pdf.savefig(fig, pad_inches=0.2)
plt.close()
def get_sample_data(self, df, sample, norm=False):
"""
"""
if sample not in df.groups:
return None
df = df.get_group(sample)
if norm:
df = utl.normalize_reads(df)
df = utl.compute_chromosome_coordinates(df, self.args.ref_genome)
return df
def check_mad_score(self, sample, metrics):
"""
"""
mad = self.get_mad_score(sample, metrics)
# if mad_threshold is set to nonzero.
# zero is defaults and means mad_threshold is not set. so no filtering
if self.args.mad_threshold:
if math.isnan(mad):
return False
if mad > self.args.mad_threshold:
return False
return True
def main(self):
"""
main
"""
metrics = self.read_quality_metrics()
reads = self.read_corrected_reads()
segs = self.read_segments()
if self.args.samples:
samples = self.args.samples
else:
samples = self.get_sample_ids(reads, metrics)
os.chdir("..")
os.chdir("Pdfs")
count = 0
# Currently at directory Pdfs to get the count which will be used to append the new png files created
for file in os.listdir(str(os.getcwd())):
if file.endswith(".png"):
count += 1
d_dic = {}
df_cell_id = []
df_sample_id = []
os.chdir("..")
os.chdir("Classifier_2")
# Read the csv in Classifier identified by args.qiualitymetrics to plot
df2 = pd.read_csv(self.args.quality_metrics)
for sample in samples[:]:
for d in df2.itertuples():
if d.cell_id == sample:
cell_i = d.cell_id
sample_i = d.sample_id
print sample + " CSV-" + cell_i
df_cell_id.append(cell_i)
df_sample_id.append(sample_i)
d_dic = {'cell_id': df_cell_id, 'sample_id':df_sample_id}
plot_title = self.get_plot_title(sample, metrics)
# If the check_mad returns false: filter it
if not self.check_mad_score(sample, metrics):
continue
# extract the data for the sample we're plotting
reads_samp = self.get_sample_data(reads, sample, norm=True)
segs_samp = self.get_sample_data(segs, sample)
# Count is the title of each plot
self.plot_segments(count,reads_samp, segs_samp, plot_title,
num_states=self.args.num_states)
count +=1
output_csv = self.args.quality_metrics.split("_")[0]
print output_csv
os.chdir("..")
df = pd.DataFrame(d_dic)
# TODO change csv
df.to_csv(output_csv+"test"+ ".csv")
os.chdir("Classifier_2")
if __name__ == '__main__':
args = parse_args()
genhmm = GenHmmPlots(args)
genhmm.main(args)
|
[
"nafis_abrar@ymail.com"
] |
nafis_abrar@ymail.com
|
96ac30f1a003ff428f3114d2fa3a8ef208f66ed3
|
7fe8acb05e1436bae23ae73eb8ab02a4504b7ba0
|
/test/unit/mocker.py
|
89fd268dc127fdc6f5e04701d12cdd1b3b788ef7
|
[
"Apache-2.0"
] |
permissive
|
henrysher/opslib
|
1463749e952c30222d4d2077ad0efb3dbca9ea28
|
157be1342cf24297ef6b79cbe320207e610c9c81
|
refs/heads/master
| 2020-04-05T23:29:53.264552
| 2014-06-03T15:15:33
| 2014-06-03T15:15:33
| 14,837,390
| 1
| 2
|
Apache-2.0
| 2018-03-05T17:26:48
| 2013-12-01T11:12:30
|
Python
|
UTF-8
|
Python
| false
| false
| 83,725
|
py
|
"""
Mocker
Graceful platform for test doubles in Python: mocks, stubs, fakes, and dummies.
Copyright (c) 2007-2010, Gustavo Niemeyer <gustavo@niemeyer.net>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import __builtin__
import tempfile
import unittest
import inspect
import shutil
import types
import sys
import os
import re
import gc
if sys.version_info < (2, 4):
from sets import Set as set # pragma: nocover
__all__ = ["Mocker", "Expect", "expect", "IS", "CONTAINS", "IN", "MATCH",
"ANY", "ARGS", "KWARGS", "MockerTestCase"]
__credits__ = """
Written by Gustavo Niemeyer <gustavo@niemeyer.net>
Maintained by Zygmunt Krynicki <zygmunt.krynicki@linaro.org>"""
__license__ = "BSD"
__version__ = "1.1.1"
ERROR_PREFIX = "[Mocker] "
# --------------------------------------------------------------------
# Exceptions
class MatchError(AssertionError):
"""Raised when an unknown expression is seen in playback mode."""
# --------------------------------------------------------------------
# Helper for chained-style calling.
class expect(object):
"""This is a simple helper that allows a different call-style.
With this class one can comfortably do chaining of calls to the
mocker object responsible by the object being handled. For instance::
expect(obj.attr).result(3).count(1, 2)
Is the same as::
obj.attr
mocker.result(3)
mocker.count(1, 2)
"""
__mocker__ = None
def __init__(self, mock, attr=None):
self._mock = mock
self._attr = attr
def __getattr__(self, attr):
return self.__class__(self._mock, attr)
def __call__(self, *args, **kwargs):
mocker = self.__mocker__
if not mocker:
mocker = self._mock.__mocker__
getattr(mocker, self._attr)(*args, **kwargs)
return self
def Expect(mocker):
"""Create an expect() "function" using the given Mocker instance.
This helper allows defining an expect() "function" which works even
in trickier cases such as:
expect = Expect(mymocker)
expect(iter(mock)).generate([1, 2, 3])
"""
return type("Expect", (expect,), {"__mocker__": mocker})
# --------------------------------------------------------------------
# Extensions to Python's unittest.
class MockerTestCase(unittest.TestCase):
"""unittest.TestCase subclass with Mocker support.
@ivar mocker: The mocker instance.
This is a convenience only. Mocker may easily be used with the
standard C{unittest.TestCase} class if wanted.
Test methods have a Mocker instance available on C{self.mocker}.
At the end of each test method, expectations of the mocker will
be verified, and any requested changes made to the environment
will be restored.
In addition to the integration with Mocker, this class provides
a few additional helper methods.
"""
def __init__(self, methodName="runTest"):
# So here is the trick: we take the real test method, wrap it on
# a function that do the job we have to do, and insert it in the
# *instance* dictionary, so that getattr() will return our
# replacement rather than the class method.
test_method = getattr(self, methodName, None)
if test_method is not None:
def test_method_wrapper():
try:
result = test_method()
except:
raise
else:
if (self.mocker.is_recording() and
self.mocker.get_events()):
raise RuntimeError("Mocker must be put in replay "
"mode with self.mocker.replay()")
if (hasattr(result, "addCallback") and
hasattr(result, "addErrback")):
def verify(result):
self.mocker.verify()
return result
result.addCallback(verify)
else:
self.mocker.verify()
self.mocker.restore()
return result
# Copy all attributes from the original method..
for attr in dir(test_method):
# .. unless they're present in our wrapper already.
if not hasattr(test_method_wrapper, attr) or attr == "__doc__":
setattr(test_method_wrapper, attr,
getattr(test_method, attr))
setattr(self, methodName, test_method_wrapper)
# We could overload run() normally, but other well-known testing
# frameworks do it as well, and some of them won't call the super,
# which might mean that cleanup wouldn't happen. With that in mind,
# we make integration easier by using the following trick.
run_method = self.run
def run_wrapper(*args, **kwargs):
try:
return run_method(*args, **kwargs)
finally:
self.__cleanup()
self.run = run_wrapper
self.mocker = Mocker()
self.expect = Expect(self.mocker)
self.__cleanup_funcs = []
self.__cleanup_paths = []
super(MockerTestCase, self).__init__(methodName)
def __call__(self, *args, **kwargs):
# This is necessary for Python 2.3 only, because it didn't use run(),
# which is supported above.
try:
super(MockerTestCase, self).__call__(*args, **kwargs)
finally:
if sys.version_info < (2, 4):
self.__cleanup()
def __cleanup(self):
for path in self.__cleanup_paths:
if os.path.isfile(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path)
self.mocker.reset()
for func, args, kwargs in self.__cleanup_funcs:
func(*args, **kwargs)
def addCleanup(self, func, *args, **kwargs):
self.__cleanup_funcs.append((func, args, kwargs))
def makeFile(self, content=None, suffix="", prefix="tmp", basename=None,
dirname=None, path=None):
"""Create a temporary file and return the path to it.
@param content: Initial content for the file.
@param suffix: Suffix to be given to the file's basename.
@param prefix: Prefix to be given to the file's basename.
@param basename: Full basename for the file.
@param dirname: Put file inside this directory.
The file is removed after the test runs.
"""
if path is not None:
self.__cleanup_paths.append(path)
elif basename is not None:
if dirname is None:
dirname = tempfile.mkdtemp()
self.__cleanup_paths.append(dirname)
path = os.path.join(dirname, basename)
else:
fd, path = tempfile.mkstemp(suffix, prefix, dirname)
self.__cleanup_paths.append(path)
os.close(fd)
if content is None:
os.unlink(path)
if content is not None:
file = open(path, "w")
file.write(content)
file.close()
return path
def makeDir(self, suffix="", prefix="tmp", dirname=None, path=None):
"""Create a temporary directory and return the path to it.
@param suffix: Suffix to be given to the file's basename.
@param prefix: Prefix to be given to the file's basename.
@param dirname: Put directory inside this parent directory.
The directory is removed after the test runs.
"""
if path is not None:
os.makedirs(path)
else:
path = tempfile.mkdtemp(suffix, prefix, dirname)
self.__cleanup_paths.append(path)
return path
def failUnlessIs(self, first, second, msg=None):
"""Assert that C{first} is the same object as C{second}."""
if first is not second:
raise self.failureException(msg or "%r is not %r" % (first, second))
def failIfIs(self, first, second, msg=None):
"""Assert that C{first} is not the same object as C{second}."""
if first is second:
raise self.failureException(msg or "%r is %r" % (first, second))
def failUnlessIn(self, first, second, msg=None):
"""Assert that C{first} is contained in C{second}."""
if first not in second:
raise self.failureException(msg or "%r not in %r" % (first, second))
def failUnlessStartsWith(self, first, second, msg=None):
"""Assert that C{first} starts with C{second}."""
if first[:len(second)] != second:
raise self.failureException(msg or "%r doesn't start with %r" %
(first, second))
def failIfStartsWith(self, first, second, msg=None):
"""Assert that C{first} doesn't start with C{second}."""
if first[:len(second)] == second:
raise self.failureException(msg or "%r starts with %r" %
(first, second))
def failUnlessEndsWith(self, first, second, msg=None):
"""Assert that C{first} starts with C{second}."""
if first[len(first)-len(second):] != second:
raise self.failureException(msg or "%r doesn't end with %r" %
(first, second))
def failIfEndsWith(self, first, second, msg=None):
"""Assert that C{first} doesn't start with C{second}."""
if first[len(first)-len(second):] == second:
raise self.failureException(msg or "%r ends with %r" %
(first, second))
def failIfIn(self, first, second, msg=None):
"""Assert that C{first} is not contained in C{second}."""
if first in second:
raise self.failureException(msg or "%r in %r" % (first, second))
def failUnlessApproximates(self, first, second, tolerance, msg=None):
"""Assert that C{first} is near C{second} by at most C{tolerance}."""
if abs(first - second) > tolerance:
raise self.failureException(msg or "abs(%r - %r) > %r" %
(first, second, tolerance))
def failIfApproximates(self, first, second, tolerance, msg=None):
"""Assert that C{first} is far from C{second} by at least C{tolerance}.
"""
if abs(first - second) <= tolerance:
raise self.failureException(msg or "abs(%r - %r) <= %r" %
(first, second, tolerance))
def failUnlessMethodsMatch(self, first, second):
"""Assert that public methods in C{first} are present in C{second}.
This method asserts that all public methods found in C{first} are also
present in C{second} and accept the same arguments. C{first} may
have its own private methods, though, and may not have all methods
found in C{second}. Note that if a private method in C{first} matches
the name of one in C{second}, their specification is still compared.
This is useful to verify if a fake or stub class have the same API as
the real class being simulated.
"""
first_methods = dict(inspect.getmembers(first, inspect.ismethod))
second_methods = dict(inspect.getmembers(second, inspect.ismethod))
for name, first_method in first_methods.iteritems():
first_argspec = inspect.getargspec(first_method)
first_formatted = inspect.formatargspec(*first_argspec)
second_method = second_methods.get(name)
if second_method is None:
if name[:1] == "_":
continue # First may have its own private methods.
raise self.failureException("%s.%s%s not present in %s" %
(first.__name__, name, first_formatted, second.__name__))
second_argspec = inspect.getargspec(second_method)
if first_argspec != second_argspec:
second_formatted = inspect.formatargspec(*second_argspec)
raise self.failureException("%s.%s%s != %s.%s%s" %
(first.__name__, name, first_formatted,
second.__name__, name, second_formatted))
def failUnlessRaises(self, excClass, *args, **kwargs):
"""
Fail unless an exception of class excClass is thrown by callableObj
when invoked with arguments args and keyword arguments kwargs. If a
different type of exception is thrown, it will not be caught, and the
test case will be deemed to have suffered an error, exactly as for an
unexpected exception. It returns the exception instance if it matches
the given exception class.
This may also be used as a context manager when provided with a single
argument, as such:
with self.failUnlessRaises(ExcClass):
logic_which_should_raise()
"""
return self.failUnlessRaisesRegexp(excClass, None, *args, **kwargs)
def failUnlessRaisesRegexp(self, excClass, regexp, *args, **kwargs):
"""
Fail unless an exception of class excClass is thrown by callableObj
when invoked with arguments args and keyword arguments kwargs, and
the str(error) value matches the provided regexp. If a different type
of exception is thrown, it will not be caught, and the test case will
be deemed to have suffered an error, exactly as for an unexpected
exception. It returns the exception instance if it matches the given
exception class.
This may also be used as a context manager when provided with a single
argument, as such:
with self.failUnlessRaisesRegexp(ExcClass, "something like.*happened"):
logic_which_should_raise()
"""
def match_regexp(error):
error_str = str(error)
if regexp is not None and not re.search(regexp, error_str):
raise self.failureException("%r doesn't match %r" %
(error_str, regexp))
excName = self.__class_name(excClass)
if args:
callableObj = args[0]
try:
result = callableObj(*args[1:], **kwargs)
except excClass, e:
match_regexp(e)
return e
else:
raise self.failureException("%s not raised (%r returned)" %
(excName, result))
else:
test = self
class AssertRaisesContextManager(object):
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.exception = value
if value is None:
raise test.failureException("%s not raised" % excName)
elif isinstance(value, excClass):
match_regexp(value)
return True
return AssertRaisesContextManager()
def __class_name(self, cls):
return getattr(cls, "__name__", str(cls))
def failUnlessIsInstance(self, obj, cls, msg=None):
"""Assert that isinstance(obj, cls)."""
if not isinstance(obj, cls):
if msg is None:
msg = "%r is not an instance of %s" % \
(obj, self.__class_name(cls))
raise self.failureException(msg)
def failIfIsInstance(self, obj, cls, msg=None):
"""Assert that isinstance(obj, cls) is False."""
if isinstance(obj, cls):
if msg is None:
msg = "%r is an instance of %s" % \
(obj, self.__class_name(cls))
raise self.failureException(msg)
assertIs = failUnlessIs
assertIsNot = failIfIs
assertIn = failUnlessIn
assertNotIn = failIfIn
assertStartsWith = failUnlessStartsWith
assertNotStartsWith = failIfStartsWith
assertEndsWith = failUnlessEndsWith
assertNotEndsWith = failIfEndsWith
assertApproximates = failUnlessApproximates
assertNotApproximates = failIfApproximates
assertMethodsMatch = failUnlessMethodsMatch
assertRaises = failUnlessRaises
assertRaisesRegexp = failUnlessRaisesRegexp
assertIsInstance = failUnlessIsInstance
assertIsNotInstance = failIfIsInstance
assertNotIsInstance = failIfIsInstance # Poor choice in 2.7/3.2+.
# The following are missing in Python < 2.4.
assertTrue = unittest.TestCase.failUnless
assertFalse = unittest.TestCase.failIf
# The following is provided for compatibility with Twisted's trial.
assertIdentical = assertIs
assertNotIdentical = assertIsNot
failUnlessIdentical = failUnlessIs
failIfIdentical = failIfIs
# --------------------------------------------------------------------
# Mocker.
class classinstancemethod(object):
def __init__(self, method):
self.method = method
def __get__(self, obj, cls=None):
def bound_method(*args, **kwargs):
return self.method(cls, obj, *args, **kwargs)
return bound_method
class MockerBase(object):
"""Controller of mock objects.
A mocker instance is used to command recording and replay of
expectations on any number of mock objects.
Expectations should be expressed for the mock object while in
record mode (the initial one) by using the mock object itself,
and using the mocker (and/or C{expect()} as a helper) to define
additional behavior for each event. For instance::
mock = mocker.mock()
mock.hello()
mocker.result("Hi!")
mocker.replay()
assert mock.hello() == "Hi!"
mock.restore()
mock.verify()
In this short excerpt a mock object is being created, then an
expectation of a call to the C{hello()} method was recorded, and
when called the method should return the value C{10}. Then, the
mocker is put in replay mode, and the expectation is satisfied by
calling the C{hello()} method, which indeed returns 10. Finally,
a call to the L{restore()} method is performed to undo any needed
changes made in the environment, and the L{verify()} method is
called to ensure that all defined expectations were met.
The same logic can be expressed more elegantly using the
C{with mocker:} statement, as follows::
mock = mocker.mock()
mock.hello()
mocker.result("Hi!")
with mocker:
assert mock.hello() == "Hi!"
Also, the MockerTestCase class, which integrates the mocker on
a unittest.TestCase subclass, may be used to reduce the overhead
of controlling the mocker. A test could be written as follows::
class SampleTest(MockerTestCase):
def test_hello(self):
mock = self.mocker.mock()
mock.hello()
self.mocker.result("Hi!")
self.mocker.replay()
self.assertEquals(mock.hello(), "Hi!")
"""
_recorders = []
# For convenience only.
on = expect
class __metaclass__(type):
def __init__(self, name, bases, dict):
# Make independent lists on each subclass, inheriting from parent.
self._recorders = list(getattr(self, "_recorders", ()))
def __init__(self):
self._recorders = self._recorders[:]
self._events = []
self._recording = True
self._ordering = False
self._last_orderer = None
def is_recording(self):
"""Return True if in recording mode, False if in replay mode.
Recording is the initial state.
"""
return self._recording
def replay(self):
"""Change to replay mode, where recorded events are reproduced.
If already in replay mode, the mocker will be restored, with all
expectations reset, and then put again in replay mode.
An alternative and more comfortable way to replay changes is
using the 'with' statement, as follows::
mocker = Mocker()
<record events>
with mocker:
<reproduce events>
The 'with' statement will automatically put mocker in replay
mode, and will also verify if all events were correctly reproduced
at the end (using L{verify()}), and also restore any changes done
in the environment (with L{restore()}).
Also check the MockerTestCase class, which integrates the
unittest.TestCase class with mocker.
"""
if not self._recording:
for event in self._events:
event.restore()
else:
self._recording = False
for event in self._events:
event.replay()
def restore(self):
"""Restore changes in the environment, and return to recording mode.
This should always be called after the test is complete (succeeding
or not). There are ways to call this method automatically on
completion (e.g. using a C{with mocker:} statement, or using the
L{MockerTestCase} class.
"""
if not self._recording:
self._recording = True
for event in self._events:
event.restore()
def reset(self):
"""Reset the mocker state.
This will restore environment changes, if currently in replay
mode, and then remove all events previously recorded.
"""
if not self._recording:
self.restore()
self.unorder()
del self._events[:]
def get_events(self):
"""Return all recorded events."""
return self._events[:]
def add_event(self, event):
"""Add an event.
This method is used internally by the implementation, and
shouldn't be needed on normal mocker usage.
"""
self._events.append(event)
if self._ordering:
orderer = event.add_task(Orderer(event.path))
if self._last_orderer:
orderer.add_dependency(self._last_orderer)
self._last_orderer = orderer
return event
def verify(self):
"""Check if all expectations were met, and raise AssertionError if not.
The exception message will include a nice description of which
expectations were not met, and why.
"""
errors = []
for event in self._events:
try:
event.verify()
except AssertionError, e:
error = str(e)
if not error:
raise RuntimeError("Empty error message from %r"
% event)
errors.append(error)
if errors:
message = [ERROR_PREFIX + "Unmet expectations:", ""]
for error in errors:
lines = error.splitlines()
message.append("=> " + lines.pop(0))
message.extend([" " + line for line in lines])
message.append("")
raise AssertionError(os.linesep.join(message))
def mock(self, spec_and_type=None, spec=None, type=None,
name=None, count=True):
"""Return a new mock object.
@param spec_and_type: Handy positional argument which sets both
spec and type.
@param spec: Method calls will be checked for correctness against
the given class.
@param type: If set, the Mock's __class__ attribute will return
the given type. This will make C{isinstance()} calls
on the object work.
@param name: Name for the mock object, used in the representation of
expressions. The name is rarely needed, as it's usually
guessed correctly from the variable name used.
@param count: If set to false, expressions may be executed any number
of times, unless an expectation is explicitly set using
the L{count()} method. By default, expressions are
expected once.
"""
if spec_and_type is not None:
spec = type = spec_and_type
return Mock(self, spec=spec, type=type, name=name, count=count)
def proxy(self, object, spec=True, type=True, name=None, count=True,
passthrough=True):
"""Return a new mock object which proxies to the given object.
Proxies are useful when only part of the behavior of an object
is to be mocked. Unknown expressions may be passed through to
the real implementation implicitly (if the C{passthrough} argument
is True), or explicitly (using the L{passthrough()} method
on the event).
@param object: Real object to be proxied, and replaced by the mock
on replay mode. It may also be an "import path",
such as C{"time.time"}, in which case the object
will be the C{time} function from the C{time} module.
@param spec: Method calls will be checked for correctness against
the given object, which may be a class or an instance
where attributes will be looked up. Defaults to the
the C{object} parameter. May be set to None explicitly,
in which case spec checking is disabled. Checks may
also be disabled explicitly on a per-event basis with
the L{nospec()} method.
@param type: If set, the Mock's __class__ attribute will return
the given type. This will make C{isinstance()} calls
on the object work. Defaults to the type of the
C{object} parameter. May be set to None explicitly.
@param name: Name for the mock object, used in the representation of
expressions. The name is rarely needed, as it's usually
guessed correctly from the variable name used.
@param count: If set to false, expressions may be executed any number
of times, unless an expectation is explicitly set using
the L{count()} method. By default, expressions are
expected once.
@param passthrough: If set to False, passthrough of actions on the
proxy to the real object will only happen when
explicitly requested via the L{passthrough()}
method.
"""
if isinstance(object, basestring):
if name is None:
name = object
import_stack = object.split(".")
attr_stack = []
while import_stack:
module_path = ".".join(import_stack)
try:
__import__(module_path)
except ImportError:
attr_stack.insert(0, import_stack.pop())
if not import_stack:
raise
continue
else:
object = sys.modules[module_path]
for attr in attr_stack:
object = getattr(object, attr)
break
if isinstance(object, types.UnboundMethodType):
object = object.im_func
if spec is True:
spec = object
if type is True:
type = __builtin__.type(object)
return Mock(self, spec=spec, type=type, object=object,
name=name, count=count, passthrough=passthrough)
def replace(self, object, spec=True, type=True, name=None, count=True,
passthrough=True):
"""Create a proxy, and replace the original object with the mock.
On replay, the original object will be replaced by the returned
proxy in all dictionaries found in the running interpreter via
the garbage collecting system. This should cover module
namespaces, class namespaces, instance namespaces, and so on.
@param object: Real object to be proxied, and replaced by the mock
on replay mode. It may also be an "import path",
such as C{"time.time"}, in which case the object
will be the C{time} function from the C{time} module.
@param spec: Method calls will be checked for correctness against
the given object, which may be a class or an instance
where attributes will be looked up. Defaults to the
the C{object} parameter. May be set to None explicitly,
in which case spec checking is disabled. Checks may
also be disabled explicitly on a per-event basis with
the L{nospec()} method.
@param type: If set, the Mock's __class__ attribute will return
the given type. This will make C{isinstance()} calls
on the object work. Defaults to the type of the
C{object} parameter. May be set to None explicitly.
@param name: Name for the mock object, used in the representation of
expressions. The name is rarely needed, as it's usually
guessed correctly from the variable name used.
@param passthrough: If set to False, passthrough of actions on the
proxy to the real object will only happen when
explicitly requested via the L{passthrough()}
method.
"""
mock = self.proxy(object, spec, type, name, count, passthrough)
event = self._get_replay_restore_event()
event.add_task(ProxyReplacer(mock))
return mock
def patch(self, object, spec=True):
"""Patch an existing object to reproduce recorded events.
@param object: Class or instance to be patched.
@param spec: Method calls will be checked for correctness against
the given object, which may be a class or an instance
where attributes will be looked up. Defaults to the
the C{object} parameter. May be set to None explicitly,
in which case spec checking is disabled. Checks may
also be disabled explicitly on a per-event basis with
the L{nospec()} method.
The result of this method is still a mock object, which can be
used like any other mock object to record events. The difference
is that when the mocker is put on replay mode, the *real* object
will be modified to behave according to recorded expectations.
Patching works in individual instances, and also in classes.
When an instance is patched, recorded events will only be
considered on this specific instance, and other instances should
behave normally. When a class is patched, the reproduction of
events will be considered on any instance of this class once
created (collectively).
Observe that, unlike with proxies which catch only events done
through the mock object, *all* accesses to recorded expectations
will be considered; even these coming from the object itself
(e.g. C{self.hello()} is considered if this method was patched).
While this is a very powerful feature, and many times the reason
to use patches in the first place, it's important to keep this
behavior in mind.
Patching of the original object only takes place when the mocker
is put on replay mode, and the patched object will be restored
to its original state once the L{restore()} method is called
(explicitly, or implicitly with alternative conventions, such as
a C{with mocker:} block, or a MockerTestCase class).
"""
if spec is True:
spec = object
patcher = Patcher()
event = self._get_replay_restore_event()
event.add_task(patcher)
mock = Mock(self, object=object, patcher=patcher,
passthrough=True, spec=spec)
patcher.patch_attr(object, '__mocker_mock__', mock)
return mock
def act(self, path):
"""This is called by mock objects whenever something happens to them.
This method is part of the interface between the mocker
and mock objects.
"""
if self._recording:
event = self.add_event(Event(path))
for recorder in self._recorders:
recorder(self, event)
return Mock(self, path)
else:
# First run events that may run, then run unsatisfied events, then
# ones not previously run. We put the index in the ordering tuple
# instead of the actual event because we want a stable sort
# (ordering between 2 events is undefined).
events = self._events
order = [(events[i].satisfied()*2 + events[i].has_run(), i)
for i in range(len(events))]
order.sort()
postponed = None
for weight, i in order:
event = events[i]
if event.matches(path):
if event.may_run(path):
return event.run(path)
elif postponed is None:
postponed = event
if postponed is not None:
return postponed.run(path)
raise MatchError(ERROR_PREFIX + "Unexpected expression: %s" % path)
def get_recorders(cls, self):
"""Return recorders associated with this mocker class or instance.
This method may be called on mocker instances and also on mocker
classes. See the L{add_recorder()} method for more information.
"""
return (self or cls)._recorders[:]
get_recorders = classinstancemethod(get_recorders)
def add_recorder(cls, self, recorder):
"""Add a recorder to this mocker class or instance.
@param recorder: Callable accepting C{(mocker, event)} as parameters.
This is part of the implementation of mocker.
All registered recorders are called for translating events that
happen during recording into expectations to be met once the state
is switched to replay mode.
This method may be called on mocker instances and also on mocker
classes. When called on a class, the recorder will be used by
all instances, and also inherited on subclassing. When called on
instances, the recorder is added only to the given instance.
"""
(self or cls)._recorders.append(recorder)
return recorder
add_recorder = classinstancemethod(add_recorder)
def remove_recorder(cls, self, recorder):
"""Remove the given recorder from this mocker class or instance.
This method may be called on mocker classes and also on mocker
instances. See the L{add_recorder()} method for more information.
"""
(self or cls)._recorders.remove(recorder)
remove_recorder = classinstancemethod(remove_recorder)
def result(self, value):
"""Make the last recorded event return the given value on replay.
@param value: Object to be returned when the event is replayed.
"""
self.call(lambda *args, **kwargs: value)
def generate(self, sequence):
"""Last recorded event will return a generator with the given sequence.
@param sequence: Sequence of values to be generated.
"""
def generate(*args, **kwargs):
for value in sequence:
yield value
self.call(generate)
def throw(self, exception):
"""Make the last recorded event raise the given exception on replay.
@param exception: Class or instance of exception to be raised.
"""
def raise_exception(*args, **kwargs):
raise exception
self.call(raise_exception)
def call(self, func, with_object=False):
"""Make the last recorded event cause the given function to be called.
@param func: Function to be called.
@param with_object: If True, the called function will receive the
patched or proxied object so that its state may be used or verified
in checks.
The result of the function will be used as the event result.
"""
event = self._events[-1]
if with_object and event.path.root_object is None:
raise TypeError("Mock object isn't a proxy")
event.add_task(FunctionRunner(func, with_root_object=with_object))
def count(self, min, max=False):
"""Last recorded event must be replayed between min and max times.
@param min: Minimum number of times that the event must happen.
@param max: Maximum number of times that the event must happen. If
not given, it defaults to the same value of the C{min}
parameter. If set to None, there is no upper limit, and
the expectation is met as long as it happens at least
C{min} times.
"""
event = self._events[-1]
for task in event.get_tasks():
if isinstance(task, RunCounter):
event.remove_task(task)
event.prepend_task(RunCounter(min, max))
def is_ordering(self):
"""Return true if all events are being ordered.
See the L{order()} method.
"""
return self._ordering
def unorder(self):
"""Disable the ordered mode.
See the L{order()} method for more information.
"""
self._ordering = False
self._last_orderer = None
def order(self, *path_holders):
"""Create an expectation of order between two or more events.
@param path_holders: Objects returned as the result of recorded events.
By default, mocker won't force events to happen precisely in
the order they were recorded. Calling this method will change
this behavior so that events will only match if reproduced in
the correct order.
There are two ways in which this method may be used. Which one
is used in a given occasion depends only on convenience.
If no arguments are passed, the mocker will be put in a mode where
all the recorded events following the method call will only be met
if they happen in order. When that's used, the mocker may be put
back in unordered mode by calling the L{unorder()} method, or by
using a 'with' block, like so::
with mocker.ordered():
<record events>
In this case, only expressions in <record events> will be ordered,
and the mocker will be back in unordered mode after the 'with' block.
The second way to use it is by specifying precisely which events
should be ordered. As an example::
mock = mocker.mock()
expr1 = mock.hello()
expr2 = mock.world
expr3 = mock.x.y.z
mocker.order(expr1, expr2, expr3)
This method of ordering only works when the expression returns
another object.
Also check the L{after()} and L{before()} methods, which are
alternative ways to perform this.
"""
if not path_holders:
self._ordering = True
return OrderedContext(self)
last_orderer = None
for path_holder in path_holders:
if type(path_holder) is Path:
path = path_holder
else:
path = path_holder.__mocker_path__
for event in self._events:
if event.path is path:
for task in event.get_tasks():
if isinstance(task, Orderer):
orderer = task
break
else:
orderer = Orderer(path)
event.add_task(orderer)
if last_orderer:
orderer.add_dependency(last_orderer)
last_orderer = orderer
break
def after(self, *path_holders):
"""Last recorded event must happen after events referred to.
@param path_holders: Objects returned as the result of recorded events
which should happen before the last recorded event
As an example, the idiom::
expect(mock.x).after(mock.y, mock.z)
is an alternative way to say::
expr_x = mock.x
expr_y = mock.y
expr_z = mock.z
mocker.order(expr_y, expr_x)
mocker.order(expr_z, expr_x)
See L{order()} for more information.
"""
last_path = self._events[-1].path
for path_holder in path_holders:
self.order(path_holder, last_path)
def before(self, *path_holders):
"""Last recorded event must happen before events referred to.
@param path_holders: Objects returned as the result of recorded events
which should happen after the last recorded event
As an example, the idiom::
expect(mock.x).before(mock.y, mock.z)
is an alternative way to say::
expr_x = mock.x
expr_y = mock.y
expr_z = mock.z
mocker.order(expr_x, expr_y)
mocker.order(expr_x, expr_z)
See L{order()} for more information.
"""
last_path = self._events[-1].path
for path_holder in path_holders:
self.order(last_path, path_holder)
def nospec(self):
"""Don't check method specification of real object on last event.
By default, when using a mock created as the result of a call to
L{proxy()}, L{replace()}, and C{patch()}, or when passing the spec
attribute to the L{mock()} method, method calls on the given object
are checked for correctness against the specification of the real
object (or the explicitly provided spec).
This method will disable that check specifically for the last
recorded event.
"""
event = self._events[-1]
for task in event.get_tasks():
if isinstance(task, SpecChecker):
event.remove_task(task)
def passthrough(self, result_callback=None):
"""Make the last recorded event run on the real object once seen.
@param result_callback: If given, this function will be called with
the result of the *real* method call as the only argument.
This can only be used on proxies, as returned by the L{proxy()}
and L{replace()} methods, or on mocks representing patched objects,
as returned by the L{patch()} method.
"""
event = self._events[-1]
if event.path.root_object is None:
raise TypeError("Mock object isn't a proxy")
event.add_task(PathExecuter(result_callback))
def __enter__(self):
"""Enter in a 'with' context. This will run replay()."""
self.replay()
return self
def __exit__(self, type, value, traceback):
"""Exit from a 'with' context.
This will run restore() at all times, but will only run verify()
if the 'with' block itself hasn't raised an exception. Exceptions
in that block are never swallowed.
"""
self.restore()
if type is None:
self.verify()
return False
def _get_replay_restore_event(self):
"""Return unique L{ReplayRestoreEvent}, creating if needed.
Some tasks only want to replay/restore. When that's the case,
they shouldn't act on other events during replay. Also, they
can all be put in a single event when that's the case. Thus,
we add a single L{ReplayRestoreEvent} as the first element of
the list.
"""
if not self._events or type(self._events[0]) != ReplayRestoreEvent:
self._events.insert(0, ReplayRestoreEvent())
return self._events[0]
class OrderedContext(object):
def __init__(self, mocker):
self._mocker = mocker
def __enter__(self):
return None
def __exit__(self, type, value, traceback):
self._mocker.unorder()
class Mocker(MockerBase):
__doc__ = MockerBase.__doc__
# Decorator to add recorders on the standard Mocker class.
recorder = Mocker.add_recorder
# --------------------------------------------------------------------
# Mock object.
class Mock(object):
def __init__(self, mocker, path=None, name=None, spec=None, type=None,
object=None, passthrough=False, patcher=None, count=True):
self.__mocker__ = mocker
self.__mocker_path__ = path or Path(self, object)
self.__mocker_name__ = name
self.__mocker_spec__ = spec
self.__mocker_object__ = object
self.__mocker_passthrough__ = passthrough
self.__mocker_patcher__ = patcher
self.__mocker_replace__ = False
self.__mocker_type__ = type
self.__mocker_count__ = count
def __mocker_act__(self, kind, args=(), kwargs={}, object=None):
if self.__mocker_name__ is None:
self.__mocker_name__ = find_object_name(self, 2)
action = Action(kind, args, kwargs, self.__mocker_path__)
path = self.__mocker_path__ + action
if object is not None:
path.root_object = object
try:
return self.__mocker__.act(path)
except MatchError, exception:
root_mock = path.root_mock
if (path.root_object is not None and
root_mock.__mocker_passthrough__):
return path.execute(path.root_object)
# Reinstantiate to show raise statement on traceback, and
# also to make the traceback shown shorter.
raise MatchError(str(exception))
except AssertionError, e:
lines = str(e).splitlines()
message = [ERROR_PREFIX + "Unmet expectation:", ""]
message.append("=> " + lines.pop(0))
message.extend([" " + line for line in lines])
message.append("")
raise AssertionError(os.linesep.join(message))
def __getattribute__(self, name):
if name.startswith("__mocker_"):
return super(Mock, self).__getattribute__(name)
if name == "__class__":
if self.__mocker__.is_recording() or self.__mocker_type__ is None:
return type(self)
return self.__mocker_type__
if name == "__length_hint__":
# This is used by Python 2.6+ to optimize the allocation
# of arrays in certain cases. Pretend it doesn't exist.
raise AttributeError("No __length_hint__ here!")
return self.__mocker_act__("getattr", (name,))
def __setattr__(self, name, value):
if name.startswith("__mocker_"):
return super(Mock, self).__setattr__(name, value)
return self.__mocker_act__("setattr", (name, value))
def __delattr__(self, name):
return self.__mocker_act__("delattr", (name,))
def __call__(self, *args, **kwargs):
return self.__mocker_act__("call", args, kwargs)
def __contains__(self, value):
return self.__mocker_act__("contains", (value,))
def __getitem__(self, key):
return self.__mocker_act__("getitem", (key,))
def __setitem__(self, key, value):
return self.__mocker_act__("setitem", (key, value))
def __delitem__(self, key):
return self.__mocker_act__("delitem", (key,))
def __len__(self):
# MatchError is turned on an AttributeError so that list() and
# friends act properly when trying to get length hints on
# something that doesn't offer them.
try:
result = self.__mocker_act__("len")
except MatchError, e:
raise AttributeError(str(e))
if type(result) is Mock:
return 0
return result
def __nonzero__(self):
try:
result = self.__mocker_act__("nonzero")
except MatchError, e:
return True
if type(result) is Mock:
return True
return result
def __iter__(self):
# XXX On py3k, when next() becomes __next__(), we'll be able
# to return the mock itself because it will be considered
# an iterator (we'll be mocking __next__ as well, which we
# can't now).
result = self.__mocker_act__("iter")
if type(result) is Mock:
return iter([])
return result
# When adding a new action kind here, also add support for it on
# Action.execute() and Path.__str__().
def find_object_name(obj, depth=0):
"""Try to detect how the object is named on a previous scope."""
try:
frame = sys._getframe(depth+1)
except:
return None
for name, frame_obj in frame.f_locals.iteritems():
if frame_obj is obj:
return name
self = frame.f_locals.get("self")
if self is not None:
try:
items = list(self.__dict__.iteritems())
except:
pass
else:
for name, self_obj in items:
if self_obj is obj:
return name
return None
# --------------------------------------------------------------------
# Action and path.
class Action(object):
def __init__(self, kind, args, kwargs, path=None):
self.kind = kind
self.args = args
self.kwargs = kwargs
self.path = path
self._execute_cache = {}
def __repr__(self):
if self.path is None:
return "Action(%r, %r, %r)" % (self.kind, self.args, self.kwargs)
return "Action(%r, %r, %r, %r)" % \
(self.kind, self.args, self.kwargs, self.path)
def __eq__(self, other):
return (self.kind == other.kind and
self.args == other.args and
self.kwargs == other.kwargs)
def __ne__(self, other):
return not self.__eq__(other)
def matches(self, other):
return (self.kind == other.kind and
match_params(self.args, self.kwargs, other.args, other.kwargs))
def execute(self, object):
# This caching scheme may fail if the object gets deallocated before
# the action, as the id might get reused. It's somewhat easy to fix
# that with a weakref callback. For our uses, though, the object
# should never get deallocated before the action itself, so we'll
# just keep it simple.
if id(object) in self._execute_cache:
return self._execute_cache[id(object)]
execute = getattr(object, "__mocker_execute__", None)
if execute is not None:
result = execute(self, object)
else:
kind = self.kind
if kind == "getattr":
result = getattr(object, self.args[0])
elif kind == "setattr":
result = setattr(object, self.args[0], self.args[1])
elif kind == "delattr":
result = delattr(object, self.args[0])
elif kind == "call":
result = object(*self.args, **self.kwargs)
elif kind == "contains":
result = self.args[0] in object
elif kind == "getitem":
result = object[self.args[0]]
elif kind == "setitem":
result = object[self.args[0]] = self.args[1]
elif kind == "delitem":
del object[self.args[0]]
result = None
elif kind == "len":
result = len(object)
elif kind == "nonzero":
result = bool(object)
elif kind == "iter":
result = iter(object)
else:
raise RuntimeError("Don't know how to execute %r kind." % kind)
self._execute_cache[id(object)] = result
return result
class Path(object):
def __init__(self, root_mock, root_object=None, actions=()):
self.root_mock = root_mock
self.root_object = root_object
self.actions = tuple(actions)
self.__mocker_replace__ = False
def parent_path(self):
if not self.actions:
return None
return self.actions[-1].path
parent_path = property(parent_path)
def __add__(self, action):
"""Return a new path which includes the given action at the end."""
return self.__class__(self.root_mock, self.root_object,
self.actions + (action,))
def __eq__(self, other):
"""Verify if the two paths are equal.
Two paths are equal if they refer to the same mock object, and
have the actions with equal kind, args and kwargs.
"""
if (self.root_mock is not other.root_mock or
self.root_object is not other.root_object or
len(self.actions) != len(other.actions)):
return False
for action, other_action in zip(self.actions, other.actions):
if action != other_action:
return False
return True
def matches(self, other):
"""Verify if the two paths are equivalent.
Two paths are equal if they refer to the same mock object, and
have the same actions performed on them.
"""
if (self.root_mock is not other.root_mock or
len(self.actions) != len(other.actions)):
return False
for action, other_action in zip(self.actions, other.actions):
if not action.matches(other_action):
return False
return True
def execute(self, object):
"""Execute all actions sequentially on object, and return result.
"""
for action in self.actions:
object = action.execute(object)
return object
def __str__(self):
"""Transform the path into a nice string such as obj.x.y('z')."""
result = self.root_mock.__mocker_name__ or "<mock>"
for action in self.actions:
if action.kind == "getattr":
result = "%s.%s" % (result, action.args[0])
elif action.kind == "setattr":
result = "%s.%s = %r" % (result, action.args[0], action.args[1])
elif action.kind == "delattr":
result = "del %s.%s" % (result, action.args[0])
elif action.kind == "call":
args = [repr(x) for x in action.args]
items = list(action.kwargs.iteritems())
items.sort()
for pair in items:
args.append("%s=%r" % pair)
result = "%s(%s)" % (result, ", ".join(args))
elif action.kind == "contains":
result = "%r in %s" % (action.args[0], result)
elif action.kind == "getitem":
result = "%s[%r]" % (result, action.args[0])
elif action.kind == "setitem":
result = "%s[%r] = %r" % (result, action.args[0],
action.args[1])
elif action.kind == "delitem":
result = "del %s[%r]" % (result, action.args[0])
elif action.kind == "len":
result = "len(%s)" % result
elif action.kind == "nonzero":
result = "bool(%s)" % result
elif action.kind == "iter":
result = "iter(%s)" % result
else:
raise RuntimeError("Don't know how to format kind %r" %
action.kind)
return result
class SpecialArgument(object):
"""Base for special arguments for matching parameters."""
def __init__(self, object=None):
self.object = object
def __repr__(self):
if self.object is None:
return self.__class__.__name__
else:
return "%s(%r)" % (self.__class__.__name__, self.object)
def matches(self, other):
return True
def __eq__(self, other):
return type(other) == type(self) and self.object == other.object
class ANY(SpecialArgument):
"""Matches any single argument."""
ANY = ANY()
class ARGS(SpecialArgument):
"""Matches zero or more positional arguments."""
ARGS = ARGS()
class KWARGS(SpecialArgument):
"""Matches zero or more keyword arguments."""
KWARGS = KWARGS()
class IS(SpecialArgument):
def matches(self, other):
return self.object is other
def __eq__(self, other):
return type(other) == type(self) and self.object is other.object
class CONTAINS(SpecialArgument):
def matches(self, other):
try:
other.__contains__
except AttributeError:
try:
iter(other)
except TypeError:
# If an object can't be iterated, and has no __contains__
# hook, it'd blow up on the test below. We test this in
# advance to prevent catching more errors than we really
# want.
return False
return self.object in other
class IN(SpecialArgument):
def matches(self, other):
return other in self.object
class MATCH(SpecialArgument):
def matches(self, other):
return bool(self.object(other))
def __eq__(self, other):
return type(other) == type(self) and self.object is other.object
def match_params(args1, kwargs1, args2, kwargs2):
"""Match the two sets of parameters, considering special parameters."""
has_args = ARGS in args1
has_kwargs = KWARGS in args1
if has_kwargs:
args1 = [arg1 for arg1 in args1 if arg1 is not KWARGS]
elif len(kwargs1) != len(kwargs2):
return False
if not has_args and len(args1) != len(args2):
return False
# Either we have the same number of kwargs, or unknown keywords are
# accepted (KWARGS was used), so check just the ones in kwargs1.
for key, arg1 in kwargs1.iteritems():
if key not in kwargs2:
return False
arg2 = kwargs2[key]
if isinstance(arg1, SpecialArgument):
if not arg1.matches(arg2):
return False
elif arg1 != arg2:
return False
# Keywords match. Now either we have the same number of
# arguments, or ARGS was used. If ARGS wasn't used, arguments
# must match one-on-one necessarily.
if not has_args:
for arg1, arg2 in zip(args1, args2):
if isinstance(arg1, SpecialArgument):
if not arg1.matches(arg2):
return False
elif arg1 != arg2:
return False
return True
# Easy choice. Keywords are matching, and anything on args is accepted.
if (ARGS,) == args1:
return True
# We have something different there. If we don't have positional
# arguments on the original call, it can't match.
if not args2:
# Unless we have just several ARGS (which is bizarre, but..).
for arg1 in args1:
if arg1 is not ARGS:
return False
return True
# Ok, all bets are lost. We have to actually do the more expensive
# matching. This is an algorithm based on the idea of the Levenshtein
# Distance between two strings, but heavily hacked for this purpose.
args2l = len(args2)
if args1[0] is ARGS:
args1 = args1[1:]
array = [0]*args2l
else:
array = [1]*args2l
for i in range(len(args1)):
last = array[0]
if args1[i] is ARGS:
for j in range(1, args2l):
last, array[j] = array[j], min(array[j-1], array[j], last)
else:
array[0] = i or int(args1[i] != args2[0])
for j in range(1, args2l):
last, array[j] = array[j], last or int(args1[i] != args2[j])
if 0 not in array:
return False
if array[-1] != 0:
return False
return True
# --------------------------------------------------------------------
# Event and task base.
class Event(object):
"""Aggregation of tasks that keep track of a recorded action.
An event represents something that may or may not happen while the
mocked environment is running, such as an attribute access, or a
method call. The event is composed of several tasks that are
orchestrated together to create a composed meaning for the event,
including for which actions it should be run, what happens when it
runs, and what's the expectations about the actions run.
"""
def __init__(self, path=None):
self.path = path
self._tasks = []
self._has_run = False
def add_task(self, task):
"""Add a new task to this task."""
self._tasks.append(task)
return task
def prepend_task(self, task):
"""Add a task at the front of the list."""
self._tasks.insert(0, task)
return task
def remove_task(self, task):
self._tasks.remove(task)
def replace_task(self, old_task, new_task):
"""Replace old_task with new_task, in the same position."""
for i in range(len(self._tasks)):
if self._tasks[i] is old_task:
self._tasks[i] = new_task
return new_task
def get_tasks(self):
return self._tasks[:]
def matches(self, path):
"""Return true if *all* tasks match the given path."""
for task in self._tasks:
if not task.matches(path):
return False
return bool(self._tasks)
def has_run(self):
return self._has_run
def may_run(self, path):
"""Verify if any task would certainly raise an error if run.
This will call the C{may_run()} method on each task and return
false if any of them returns false.
"""
for task in self._tasks:
if not task.may_run(path):
return False
return True
def run(self, path):
"""Run all tasks with the given action.
@param path: The path of the expression run.
Running an event means running all of its tasks individually and in
order. An event should only ever be run if all of its tasks claim to
match the given action.
The result of this method will be the last result of a task
which isn't None, or None if they're all None.
"""
self._has_run = True
result = None
errors = []
for task in self._tasks:
if not errors or not task.may_run_user_code():
try:
task_result = task.run(path)
except AssertionError, e:
error = str(e)
if not error:
raise RuntimeError("Empty error message from %r" % task)
errors.append(error)
else:
# XXX That's actually a bit weird. What if a call() really
# returned None? This would improperly change the semantic
# of this process without any good reason. Test that with two
# call()s in sequence.
if task_result is not None:
result = task_result
if errors:
message = [str(self.path)]
if str(path) != message[0]:
message.append("- Run: %s" % path)
for error in errors:
lines = error.splitlines()
message.append("- " + lines.pop(0))
message.extend([" " + line for line in lines])
raise AssertionError(os.linesep.join(message))
return result
def satisfied(self):
"""Return true if all tasks are satisfied.
Being satisfied means that there are no unmet expectations.
"""
for task in self._tasks:
try:
task.verify()
except AssertionError:
return False
return True
def verify(self):
"""Run verify on all tasks.
The verify method is supposed to raise an AssertionError if the
task has unmet expectations, with a one-line explanation about
why this item is unmet. This method should be safe to be called
multiple times without side effects.
"""
errors = []
for task in self._tasks:
try:
task.verify()
except AssertionError, e:
error = str(e)
if not error:
raise RuntimeError("Empty error message from %r" % task)
errors.append(error)
if errors:
message = [str(self.path)]
for error in errors:
lines = error.splitlines()
message.append("- " + lines.pop(0))
message.extend([" " + line for line in lines])
raise AssertionError(os.linesep.join(message))
def replay(self):
"""Put all tasks in replay mode."""
self._has_run = False
for task in self._tasks:
task.replay()
def restore(self):
"""Restore the state of all tasks."""
for task in self._tasks:
task.restore()
class ReplayRestoreEvent(Event):
"""Helper event for tasks which need replay/restore but shouldn't match."""
def matches(self, path):
return False
class Task(object):
"""Element used to track one specific aspect on an event.
A task is responsible for adding any kind of logic to an event.
Examples of that are counting the number of times the event was
made, verifying parameters if any, and so on.
"""
def matches(self, path):
"""Return true if the task is supposed to be run for the given path.
"""
return True
def may_run(self, path):
"""Return false if running this task would certainly raise an error."""
return True
def may_run_user_code(self):
"""Return true if there's a chance this task may run custom code.
Whenever errors are detected, running user code should be avoided,
because the situation is already known to be incorrect, and any
errors in the user code are side effects rather than the cause.
"""
return False
def run(self, path):
"""Perform the task item, considering that the given action happened.
"""
def verify(self):
"""Raise AssertionError if expectations for this item are unmet.
The verify method is supposed to raise an AssertionError if the
task has unmet expectations, with a one-line explanation about
why this item is unmet. This method should be safe to be called
multiple times without side effects.
"""
def replay(self):
"""Put the task in replay mode.
Any expectations of the task should be reset.
"""
def restore(self):
"""Restore any environmental changes made by the task.
Verify should continue to work after this is called.
"""
# --------------------------------------------------------------------
# Task implementations.
class OnRestoreCaller(Task):
"""Call a given callback when restoring."""
def __init__(self, callback):
self._callback = callback
def restore(self):
self._callback()
class PathMatcher(Task):
"""Match the action path against a given path."""
def __init__(self, path):
self.path = path
def matches(self, path):
return self.path.matches(path)
def path_matcher_recorder(mocker, event):
event.add_task(PathMatcher(event.path))
Mocker.add_recorder(path_matcher_recorder)
class RunCounter(Task):
"""Task which verifies if the number of runs are within given boundaries.
"""
def __init__(self, min, max=False):
self.min = min
if max is None:
self.max = sys.maxint
elif max is False:
self.max = min
else:
self.max = max
self._runs = 0
def replay(self):
self._runs = 0
def may_run(self, path):
return self._runs < self.max
def run(self, path):
self._runs += 1
if self._runs > self.max:
self.verify()
def verify(self):
if not self.min <= self._runs <= self.max:
if self._runs < self.min:
raise AssertionError("Performed fewer times than expected.")
raise AssertionError("Performed more times than expected.")
class ImplicitRunCounter(RunCounter):
"""RunCounter inserted by default on any event.
This is a way to differentiate explicitly added counters and
implicit ones.
"""
def run_counter_recorder(mocker, event):
"""Any event may be repeated once, unless disabled by default."""
if event.path.root_mock.__mocker_count__:
# Rather than appending the task, we prepend it so that the
# issue is raised before any other side-effects happen.
event.prepend_task(ImplicitRunCounter(1))
Mocker.add_recorder(run_counter_recorder)
def run_counter_removal_recorder(mocker, event):
"""
Events created by getattr actions which lead to other events
may be repeated any number of times. For that, we remove implicit
run counters of any getattr actions leading to the current one.
"""
parent_path = event.path.parent_path
for event in mocker.get_events()[::-1]:
if (event.path is parent_path and
event.path.actions[-1].kind == "getattr"):
for task in event.get_tasks():
if type(task) is ImplicitRunCounter:
event.remove_task(task)
Mocker.add_recorder(run_counter_removal_recorder)
class MockReturner(Task):
"""Return a mock based on the action path."""
def __init__(self, mocker):
self.mocker = mocker
def run(self, path):
return Mock(self.mocker, path)
def mock_returner_recorder(mocker, event):
"""Events that lead to other events must return mock objects."""
parent_path = event.path.parent_path
for event in mocker.get_events():
if event.path is parent_path:
for task in event.get_tasks():
if isinstance(task, MockReturner):
break
else:
event.add_task(MockReturner(mocker))
break
Mocker.add_recorder(mock_returner_recorder)
class FunctionRunner(Task):
"""Task that runs a function everything it's run.
Arguments of the last action in the path are passed to the function,
and the function result is also returned.
"""
def __init__(self, func, with_root_object=False):
self._func = func
self._with_root_object = with_root_object
def may_run_user_code(self):
return True
def run(self, path):
action = path.actions[-1]
if self._with_root_object:
return self._func(path.root_object, *action.args, **action.kwargs)
else:
return self._func(*action.args, **action.kwargs)
class PathExecuter(Task):
"""Task that executes a path in the real object, and returns the result."""
def __init__(self, result_callback=None):
self._result_callback = result_callback
def get_result_callback(self):
return self._result_callback
def run(self, path):
result = path.execute(path.root_object)
if self._result_callback is not None:
self._result_callback(result)
return result
class Orderer(Task):
"""Task to establish an order relation between two events.
An orderer task will only match once all its dependencies have
been run.
"""
def __init__(self, path):
self.path = path
self._run = False
self._dependencies = []
def replay(self):
self._run = False
def has_run(self):
return self._run
def may_run(self, path):
for dependency in self._dependencies:
if not dependency.has_run():
return False
return True
def run(self, path):
for dependency in self._dependencies:
if not dependency.has_run():
raise AssertionError("Should be after: %s" % dependency.path)
self._run = True
def add_dependency(self, orderer):
self._dependencies.append(orderer)
def get_dependencies(self):
return self._dependencies
class SpecChecker(Task):
"""Task to check if arguments of the last action conform to a real method.
"""
def __init__(self, method):
self._method = method
self._unsupported = False
if method:
try:
self._args, self._varargs, self._varkwargs, self._defaults = \
inspect.getargspec(method)
except TypeError:
self._unsupported = True
else:
if self._defaults is None:
self._defaults = ()
if type(method) is type(self.run):
self._args = self._args[1:]
def get_method(self):
return self._method
def _raise(self, message):
spec = inspect.formatargspec(self._args, self._varargs,
self._varkwargs, self._defaults)
raise AssertionError("Specification is %s%s: %s" %
(self._method.__name__, spec, message))
def verify(self):
if not self._method:
raise AssertionError("Method not found in real specification")
def may_run(self, path):
try:
self.run(path)
except AssertionError:
return False
return True
def run(self, path):
if not self._method:
raise AssertionError("Method not found in real specification")
if self._unsupported:
return # Can't check it. Happens with builtin functions. :-(
action = path.actions[-1]
obtained_len = len(action.args)
obtained_kwargs = action.kwargs.copy()
nodefaults_len = len(self._args) - len(self._defaults)
for i, name in enumerate(self._args):
if i < obtained_len and name in action.kwargs:
self._raise("%r provided twice" % name)
if (i >= obtained_len and i < nodefaults_len and
name not in action.kwargs):
self._raise("%r not provided" % name)
obtained_kwargs.pop(name, None)
if obtained_len > len(self._args) and not self._varargs:
self._raise("too many args provided")
if obtained_kwargs and not self._varkwargs:
self._raise("unknown kwargs: %s" % ", ".join(obtained_kwargs))
def spec_checker_recorder(mocker, event):
spec = event.path.root_mock.__mocker_spec__
if spec:
actions = event.path.actions
if len(actions) == 1:
if actions[0].kind == "call":
method = getattr(spec, "__call__", None)
event.add_task(SpecChecker(method))
elif len(actions) == 2:
if actions[0].kind == "getattr" and actions[1].kind == "call":
method = getattr(spec, actions[0].args[0], None)
event.add_task(SpecChecker(method))
Mocker.add_recorder(spec_checker_recorder)
class ProxyReplacer(Task):
"""Task which installs and deinstalls proxy mocks.
This task will replace a real object by a mock in all dictionaries
found in the running interpreter via the garbage collecting system.
"""
def __init__(self, mock):
self.mock = mock
self.__mocker_replace__ = False
def replay(self):
global_replace(self.mock.__mocker_object__, self.mock)
def restore(self):
global_replace(self.mock, self.mock.__mocker_object__)
def global_replace(remove, install):
"""Replace object 'remove' with object 'install' on all dictionaries."""
for referrer in gc.get_referrers(remove):
if (type(referrer) is dict and
referrer.get("__mocker_replace__", True)):
for key, value in list(referrer.iteritems()):
if value is remove:
referrer[key] = install
class Undefined(object):
def __repr__(self):
return "Undefined"
Undefined = Undefined()
class Patcher(Task):
def __init__(self):
super(Patcher, self).__init__()
self._monitored = {} # {kind: {id(object): object}}
self._patched = {}
def is_monitoring(self, obj, kind):
monitored = self._monitored.get(kind)
if monitored:
if id(obj) in monitored:
return True
cls = type(obj)
if issubclass(cls, type):
cls = obj
bases = set([id(base) for base in cls.__mro__])
bases.intersection_update(monitored)
return bool(bases)
return False
def monitor(self, obj, kind):
if kind not in self._monitored:
self._monitored[kind] = {}
self._monitored[kind][id(obj)] = obj
def patch_attr(self, obj, attr, value):
original = obj.__dict__.get(attr, Undefined)
self._patched[id(obj), attr] = obj, attr, original
setattr(obj, attr, value)
def get_unpatched_attr(self, obj, attr):
cls = type(obj)
if issubclass(cls, type):
cls = obj
result = Undefined
for mro_cls in cls.__mro__:
key = (id(mro_cls), attr)
if key in self._patched:
result = self._patched[key][2]
if result is not Undefined:
break
elif attr in mro_cls.__dict__:
result = mro_cls.__dict__.get(attr, Undefined)
break
if isinstance(result, object) and hasattr(type(result), "__get__"):
if cls is obj:
obj = None
return result.__get__(obj, cls)
return result
def _get_kind_attr(self, kind):
if kind == "getattr":
return "__getattribute__"
return "__%s__" % kind
def replay(self):
for kind in self._monitored:
attr = self._get_kind_attr(kind)
seen = set()
for obj in self._monitored[kind].itervalues():
cls = type(obj)
if issubclass(cls, type):
cls = obj
if cls not in seen:
seen.add(cls)
unpatched = getattr(cls, attr, Undefined)
self.patch_attr(cls, attr,
PatchedMethod(kind, unpatched,
self.is_monitoring))
self.patch_attr(cls, "__mocker_execute__",
self.execute)
def restore(self):
for obj, attr, original in self._patched.itervalues():
if original is Undefined:
delattr(obj, attr)
else:
setattr(obj, attr, original)
self._patched.clear()
def execute(self, action, object):
attr = self._get_kind_attr(action.kind)
unpatched = self.get_unpatched_attr(object, attr)
try:
return unpatched(*action.args, **action.kwargs)
except AttributeError:
type, value, traceback = sys.exc_info()
if action.kind == "getattr":
# The normal behavior of Python is to try __getattribute__,
# and if it raises AttributeError, try __getattr__. We've
# tried the unpatched __getattribute__ above, and we'll now
# try __getattr__.
try:
__getattr__ = unpatched("__getattr__")
except AttributeError:
pass
else:
return __getattr__(*action.args, **action.kwargs)
raise type, value, traceback
class PatchedMethod(object):
def __init__(self, kind, unpatched, is_monitoring):
self._kind = kind
self._unpatched = unpatched
self._is_monitoring = is_monitoring
def __get__(self, obj, cls=None):
object = obj or cls
if not self._is_monitoring(object, self._kind):
return self._unpatched.__get__(obj, cls)
def method(*args, **kwargs):
if self._kind == "getattr" and args[0].startswith("__mocker_"):
return self._unpatched.__get__(obj, cls)(args[0])
mock = object.__mocker_mock__
return mock.__mocker_act__(self._kind, args, kwargs, object)
return method
def __call__(self, obj, *args, **kwargs):
# At least with __getattribute__, Python seems to use *both* the
# descriptor API and also call the class attribute directly. It
# looks like an interpreter bug, or at least an undocumented
# inconsistency. Coverage tests may show this uncovered, because
# it depends on the Python version.
return self.__get__(obj)(*args, **kwargs)
def patcher_recorder(mocker, event):
mock = event.path.root_mock
if mock.__mocker_patcher__ and len(event.path.actions) == 1:
patcher = mock.__mocker_patcher__
patcher.monitor(mock.__mocker_object__, event.path.actions[0].kind)
Mocker.add_recorder(patcher_recorder)
|
[
"henry.s.huang@gmail.com"
] |
henry.s.huang@gmail.com
|
dfba60468f1f40421de06709e086d2707141c685
|
7513276b34a2c99d081376961c3b8482a3c1c072
|
/ex11.py
|
d3c84f6501f5842e6727e4e312b7c7ffc012ca06
|
[] |
no_license
|
AyeNandar/python-tutorials
|
5ad0a4e6b2c1893a7cc9c97ad100896835e1cd16
|
12c54b69b414e6ce1adce66f92065c8d4e0c49e8
|
refs/heads/master
| 2020-03-21T08:49:17.119862
| 2018-06-24T01:08:44
| 2018-06-24T01:08:44
| 138,367,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 236
|
py
|
print("How old are you?", end=' ')
age = input()
print("How tall are you?", end=' ')
height = input()
print("How much do you weigh?", end=' ')
weight = input()
print("So, you're ", age, " old, ", height," tall and ", weight," heavy.")
|
[
"andhandh1381@gmail.com"
] |
andhandh1381@gmail.com
|
019509507bc3b794d7db41c52c23548000aa61d3
|
317e0c1f1b9511ea47ab6c4c902cc8de3c316cc8
|
/5place.py
|
12f30a9dd7866a39a667f953b50874952a3c81f0
|
[] |
no_license
|
manigowtham19/manigowtham
|
63d7c90209a6bd791b67810cf93ce6208a2a36c8
|
159f99c8342659b4bb630148149a4263751c21bc
|
refs/heads/master
| 2021-09-10T00:22:47.230952
| 2018-03-20T10:55:40
| 2018-03-20T10:55:40
| 116,801,273
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 84
|
py
|
a=float(input("Enter length:"))
b=float(input("Enter Breath:"))
print(round(a*b,5))
|
[
"noreply@github.com"
] |
manigowtham19.noreply@github.com
|
934cc88a5a43e7fdf4fcb3bb9f9a0069f6465f0e
|
921c6ff6f7f31e0349314bc40426fff12107a705
|
/GeneralPython/PyDataStructure/pairsWithGivenSumInAList.py
|
9a348f8832808e9e31098e639ed458531bd2668a
|
[
"BSD-2-Clause"
] |
permissive
|
prashantas/MyDataScience
|
0828d5e443de9633fe1199ef4d13a7699b2ebffa
|
8db4f288b30840161c6422bde7c7a7770f85c09d
|
refs/heads/master
| 2021-06-02T12:06:45.609262
| 2019-04-25T03:03:25
| 2019-04-25T03:03:25
| 96,012,955
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,153
|
py
|
from collections import Counter
def getPairsCount(arr, sum):
mapc = dict(Counter(arr))
twice_cnt = 0
for i in range(len(arr)):
if mapc.get(sum-arr[i], None) is not None:
twice_cnt += mapc.get(sum-arr[i])
# if (arr[i], arr[i]) pair satisfies the
# condition, then we need to ensure that
# the count is decreased by one such
# that the (arr[i], arr[i]) pair is not
# considered
if sum-arr[i] == arr[i]:
twice_cnt -=1
return int(twice_cnt/2)
def getPairs(arr, sum):
#https://www.geeksforgeeks.org/given-an-array-a-and-a-number-x-check-for-pair-in-a-with-sum-as-x/
s = set()
pairList = list()
for i in range(len(arr)):
temp = sum - arr[i]
if temp>=0 and temp in s:
pairList.append((arr[i],temp))
s.add(arr[i])
return pairList
if __name__ == "__main__":
arr = [1, 5, 7, -1,2, 5,4 ]
sum =6
print("Count of pairs is", getPairsCount(arr, sum))
print("#########################################")
print("Pairs are::{}".format(getPairs(arr,sum)))
|
[
"noreply@github.com"
] |
prashantas.noreply@github.com
|
80bea6afe90cb72a4319b21a0ab980eb359be224
|
b1d3876aafc8730e8f3ec709561d83ff622cb63a
|
/2015/day15.py
|
711d70d08e3e90d1e18b19c06e3dc9e91875709d
|
[
"MIT"
] |
permissive
|
XtinaSchelin/adventofcode
|
da7d68a7ae71d4e0198b285a66c489fd02c2eb6f
|
41aa36c8226310ab35a61703cba6b8bbbffe8281
|
refs/heads/master
| 2022-11-11T23:00:49.042943
| 2020-06-15T00:59:30
| 2020-06-15T00:59:30
| 272,014,182
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,784
|
py
|
import itertools
import re
def do_day(day="a"):
inputs = open("d15.txt", "r").read().split("\n")
ingredients = {}
all_items = []
patt = "^([^:]+): capacity ([^,]+), durability ([^,]+), flavor ([^,]+), texture ([^,]+), calories ([^,]+)$"
for line in inputs:
pieces = re.match(patt, line.strip()).groups()
ingredients[pieces[0]] = {
"capacity": int(pieces[1]),
"durability": int(pieces[2]),
"flavor": int(pieces[3]),
"texture": int(pieces[4]),
"calories": int(pieces[5])
}
all_items.append(pieces[0])
max_total = 0
amt_w = 0
amt_x = 0
amt_y = 0
amt_z = 0
amounts = {}
for w in range(1, 101):
for x in range(1, 101):
for y in range(1, 101):
for z in range(1, 101):
if w + x + y + z == 100:
amounts[all_items[0]] = w
amounts[all_items[1]] = x
amounts[all_items[2]] = y
amounts[all_items[3]] = z
total_cap = 0
total_dur = 0
total_flv = 0
total_txt = 0
total_cal = 0
for item in all_items:
total_cap += ingredients[item]["capacity"] * amounts[item]
total_dur += ingredients[item]["durability"] * amounts[item]
total_flv += ingredients[item]["flavor"] * amounts[item]
total_txt += ingredients[item]["texture"] * amounts[item]
total_cal += ingredients[item]["calories"] * amounts[item]
if total_cap < 0: total_cap = 0
if total_dur < 0: total_dur = 0
if total_flv < 0: total_flv = 0
if total_txt < 0: total_txt = 0
if total_cal == 500 or day == "a":
current_total = total_cap * total_dur * total_flv * total_txt
if current_total > max_total:
max_total = current_total
amt_w = w
amt_x = x
amt_y = y
amt_z = z
print(day, max_total)
do_day(day="a")
do_day(day="b")
|
[
"xtina.schelin@gmail.com"
] |
xtina.schelin@gmail.com
|
405603cd97d46988b25e40dc010c932d72b492ff
|
49c21b230dd6e6ba9e0bd188e0481d09a7413e42
|
/readthedocs/organizations/signals.py
|
909d71316cf197e31e5f6a3e2ffc9cbda2620dc2
|
[
"MIT"
] |
permissive
|
InfinitzHost/readthedocs.org
|
1d828702d81ea5a25c52e571c819859237fb8faa
|
b311c732320a4469b474d7f859380bc4ee8cd432
|
refs/heads/master
| 2023-08-05T07:13:17.317371
| 2021-02-23T15:23:01
| 2021-02-23T15:23:01
| 341,595,013
| 1
| 0
|
MIT
| 2021-09-23T06:30:38
| 2021-02-23T15:13:15
|
Python
|
UTF-8
|
Python
| false
| false
| 2,151
|
py
|
"""Organization signals."""
import logging
from allauth.account.signals import user_signed_up
from django.db.models import Count
from django.db.models.signals import pre_delete
from django.dispatch import receiver
from readthedocs.builds.models import Version
from readthedocs.organizations.models import (
Organization,
Team,
TeamInvite,
TeamMember,
)
from readthedocs.projects.models import Project
log = logging.getLogger(__name__)
# pylint: disable=unused-argument
@receiver(user_signed_up)
def attach_org(sender, request, user, **kwargs):
"""Attach invited user to organization."""
team_slug = request.session.get('team')
if team_slug:
team = Team.objects.get(slug=team_slug)
TeamMember.objects.create(team=team, member=user)
# pylint: disable=unused-argument
@receiver(pre_delete, sender=Organization)
def remove_organization_completely(sender, instance, using, **kwargs):
"""
Remove Organization leaf-overs.
This includes:
- Projects
- Versions
- Builds (deleted on cascade)
- Teams
- Team Invitations
- Team Memberships
- Artifacts (HTML, PDF, etc)
"""
organization = instance
log.info('Removing Organization %s completely', organization.slug)
# ``Project`` has a ManyToMany relationship with ``Organization``. We need
# to be sure that the projects we are deleting here belongs only to the
# organization deleted
projects = Project.objects.annotate(
Count('organizations'),
).filter(
organizations__in=[organization],
organizations__count=1,
)
versions = Version.objects.filter(project__in=projects)
teams = Team.objects.filter(organization=organization)
team_invites = TeamInvite.objects.filter(organization=organization)
team_memberships = TeamMember.objects.filter(team__in=teams)
# Bulk delete
team_memberships.delete()
team_invites.delete()
teams.delete()
# Granular delete that trigger other complex tasks
for version in versions:
# Triggers a task to remove artifacts from storage
version.delete()
projects.delete()
|
[
"stsewd@protonmail.com"
] |
stsewd@protonmail.com
|
b4c0a53258584a9d7357750b05cfeb7f6f680772
|
39fbf2339fc5f50d048f5b636aea896aa64ff231
|
/models.py
|
650b7619d47f48581a692968a3170a739b78bd40
|
[] |
no_license
|
jeong-tae/MovieRating-TF
|
de2f0b7c15c06abfd353d48b54893d9c6e0e279a
|
ca131bfd28656c4e484a9b258bf4509edeac8563
|
refs/heads/master
| 2021-01-01T18:20:07.476756
| 2017-07-27T10:51:11
| 2017-07-27T10:51:11
| 98,308,939
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 761
|
py
|
import numpy as np
import tensorflow as tf
def linear_fn(x, K = 20, stddev = 0., name = None):
initializer = tf.random_normal_initializer(stddev = stddev)
_shape = tf.cast(x, tf.float32).get_shape().as_list()
if len(_shape) > 2:
raise ValueError
w = tf.Variable(initializer([_shape[1], K]), name = name)
return tf.matmul(x, w)
def bilinear(user, item):
s = linear_fn(user, K = 20, stddev = 0.02, name = 'U')
s_b = linear_fn(user, K = 1, stddev = 0.02, name = 'u')
t = linear_fn(item, K = 20, stddev = 0.02, name = 'V')
t_b = linear_fn(item, K = 1, stddev =0.02, name = 'v')
b = s_b + t_b
_r = tf.reduce_sum(s * t, reduction_indices = 1)
_r = tf.reshape(_r, [-1, 1])
_r = _r + b
return _r
|
[
"make8286@naver.com"
] |
make8286@naver.com
|
a416d2ebc401874a4eea8d07fd5b4d2df53f92e2
|
66a826499f82e59b6a001ae4682e1fbd867ddb60
|
/sandbox/apps/comment/models.py
|
16a048ec7b8ad271ad6f342223d21cd012c949cb
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] |
permissive
|
aiegoo/django-blog
|
6c6e79828b2973045360752ef1880a44cd1f6588
|
f1c1de7435336825c97d6d1d030c53df400af73c
|
refs/heads/master
| 2023-06-14T01:45:43.639716
| 2020-09-29T21:18:28
| 2020-09-29T21:18:28
| 385,572,369
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,699
|
py
|
from django.db import models
from django.conf import settings
from apps.blog.models import Article
import markdown
import emoji
class Comment(models.Model):
author=models.ForeignKey(settings.AUTH_USER_MODEL, related_name='%(class) s_related', verbose_name='reviewer', on_delete=models.CASCADE)
create_date=models.DateTimeField('Creation time', auto_now_add=True)
content=models.TextField('Comment Content')
parent=models.ForeignKey('self', verbose_name='parent comment', related_name='%(class) s_child_comments', blank=True,
null=True, on_delete=models.CASCADE)
rep_to=models.ForeignKey('self', verbose_name='Reply', related_name='%(class) s_rep_comments', blank=True, null=True, on_delete=models.CASCADE)
class Meta:
'' 'This is a metaclass for inheritance' ''
abstract=True
def __str__(self):
return self.content [:20]
def content_to_markdown(self):
# First converted to emoji and then to markdown, 'escape': all original HTML will be escaped and included in the document
to_emoji_content=emoji.emojize(self.content, use_aliases=True)
to_md=markdown.markdown(to_emoji_content,
safe_mode='escape',
extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
])
return to_md
class ArticleComment(Comment):
belong=models.ForeignKey(Article, related_name='article_comments', verbose_name='belonging articles', on_delete=models.CASCADE)
class Meta:
verbose_name='Post Comment'
verbose_name_plural=verbose_name
ordering=['create_date']
class Notification(models.Model):
create_p=models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='Prompt creator', related_name='notification_create', on_delete=models.CASCADE)
get_p=models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='Prompt Recipient', related_name='notification_get', on_delete=models.CASCADE)
comment=models.ForeignKey(ArticleComment, verbose_name='Owned Comment', related_name='the_comment', on_delete=models.CASCADE)
create_date=models.DateTimeField('Hint time', auto_now_add=True)
is_read=models.BooleanField('Whether read', default=False)
def mark_to_read(self):
self.is_read=True
self.save(update_fields=['is_read'])
class Meta:
verbose_name='Prompt Message'
verbose_name_plural=verbose_name
ordering=['-create_date']
def __str__(self):
return '() @ 了 {}'. format(self.create_p, self.get_p)
|
[
"pawanpaudel93@gmail.com"
] |
pawanpaudel93@gmail.com
|
3f4e4132225aa9e938818ad6e47cc6db321f9891
|
fa1a0c6285e18eabdb6d1dc4ee7584a4b9d7176b
|
/logistic regression for two classes.py
|
11d9a97aa12f981eb3ba40ab13bafebf840009bf
|
[] |
no_license
|
Mostafa202/logistic-regression-from-scratch
|
1294f399b7118e856ccb2dbe4bc6a69fd7b0cf73
|
f69b8d01ba022341cc49cbc1398ce9d8d9dc8c53
|
refs/heads/master
| 2023-02-14T18:07:44.392348
| 2021-01-11T17:46:35
| 2021-01-11T17:46:35
| 328,745,207
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,902
|
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
dataset=pd.read_csv("logistic.csv")
x=dataset.iloc[:,:-1].values
y=dataset.iloc[:,-1].values
from sklearn.preprocessing import *
#en=LabelEncoder()
#x[:,1]=en.fit_transform(x[:,1])
m=MinMaxScaler()
x=m.fit_transform(x)
def sigmoid(theta,x):
return 1.0/(1+np.exp(-np.dot(x,theta.T)))
def calc_val(theta,x,y):
val=sigmoid(theta,x)
first=val-y.reshape(len(y),1)
second=np.dot(first.T,x)
return second
def cost_func(theta,x,y):
log_func_v = sigmoid(theta, x)
y = np.squeeze(y)
y=y.reshape(len(y),1)
step1 = y*np.log(log_func_v)
step2 = (1 - y) * np.log(1 - log_func_v)
final = -step1 - step2
return np.mean(final)
def grad(theta,x,y):
change_cost=1
cost=cost_func(theta,x,y)
learning_rate=0.01
i=1
while(change_cost>0.00001 ):
old_cost=cost
val=calc_val(theta,x,y)
theta=theta-(learning_rate*val)
cost=cost_func(theta,x,y)
change_cost=old_cost-cost
i+=1
return theta,i
X=np.append(np.ones((x.shape[0],1)),x,axis=1)
from sklearn.model_selection import *
train_x,test_x,train_y,test_y=train_test_split(X,y,test_size=0.3,random_state=0)
theta=np.random.rand(1,X.shape[1])
theta,num=grad(theta,train_x,train_y)
#
get_val=np.dot(test_x,theta.T)
y_pred=np.where(get_val>=0.5,1,0)
x0=test_x[np.where(test_y==0)]
x1=test_x[np.where(test_y==1)]
plt.scatter([x1[:,1]],[x1[:,2]],color="G")
plt.scatter([x0[:,1]],[x0[:,2]],color="B")
x1 = np.arange(0, 1, 0.1)
x2 = -(theta[0,0] + theta[0,1]*x1)/theta[0,2]
plt.plot(x1, x2, c='k', label='reg line')
plt.xlabel('input values')
plt.ylabel('predicted values')
plt.title('classification using logistic regression')
plt.show()
accuracy=np.sum(test_y.reshape(-1,1)==y_pred)/len(y_pred)*100
print('Accuracy: ',accuracy,' %')
|
[
"mostafa.mi1000@gmail.com"
] |
mostafa.mi1000@gmail.com
|
02a8803e807fc9deff5bb500cd7bbe1ec1e6e8f5
|
a4ea525e226d6c401fdb87a6e9adfdc5d07e6020
|
/src/azure-cli/azure/cli/command_modules/network/aaz/latest/network/vnet_gateway/packet_capture/_stop.py
|
f2ea3ecffeeb97633ad77215f9c0c9a9fb75268d
|
[
"MIT",
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MPL-2.0",
"LGPL-2.1-only",
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-2-Clause"
] |
permissive
|
Azure/azure-cli
|
13340eeca2e288e66e84d393fa1c8a93d46c8686
|
a40fd14ad0b6e89720a2e58d4d9be3a6ce1535ca
|
refs/heads/dev
| 2023-08-17T06:25:37.431463
| 2023-08-17T06:00:10
| 2023-08-17T06:00:10
| 51,040,886
| 4,018
| 3,310
|
MIT
| 2023-09-14T11:11:05
| 2016-02-04T00:21:51
|
Python
|
UTF-8
|
Python
| false
| false
| 6,463
|
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"network vnet-gateway packet-capture stop",
is_preview=True,
)
class Stop(AAZCommand):
"""Stop packet capture on a virtual network gateway.
:example: Stop packet capture on a virtual network gateway.
az network vnet-gateway packet-capture stop -g MyResourceGroup -n MyVnetGateway --sas-url https://myStorageAct.blob.azure.com/artifacts?st=2019-04-10T22%3A12Z&se=2019-04-11T09%3A12Z&sp=rl&sv=2018-03-28&sr=c&sig=0000000000
"""
_aaz_info = {
"version": "2022-01-01",
"resources": [
["mgmt-plane", "/subscriptions/{}/resourcegroups/{}/providers/microsoft.network/virtualnetworkgateways/{}/stoppacketcapture", "2022-01-01"],
]
}
AZ_SUPPORT_NO_WAIT = True
def _handler(self, command_args):
super()._handler(command_args)
return self.build_lro_poller(self._execute_operations, self._output)
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.resource_group = AAZResourceGroupNameArg(
required=True,
)
_args_schema.name = AAZStrArg(
options=["-n", "--name"],
help="Name of the VNet gateway.",
required=True,
id_part="name",
)
_args_schema.sas_url = AAZStrArg(
options=["--sas-url"],
help="The SAS url to be used for packet capture.",
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
yield self.VirtualNetworkGatewaysStopPacketCapture(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
def _output(self, *args, **kwargs):
result = self.deserialize_output(self.ctx.vars.instance, client_flatten=False)
return result
class VirtualNetworkGatewaysStopPacketCapture(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [202]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
if session.http_response.status_code in [200]:
return self.client.build_lro_polling(
self.ctx.args.no_wait,
session,
self.on_200,
self.on_error,
lro_options={"final-state-via": "location"},
path_format_arguments=self.url_parameters,
)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/stopPacketCapture",
**self.url_parameters
)
@property
def method(self):
return "POST"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"resourceGroupName", self.ctx.args.resource_group,
required=True,
),
**self.serialize_url_param(
"subscriptionId", self.ctx.subscription_id,
required=True,
),
**self.serialize_url_param(
"virtualNetworkGatewayName", self.ctx.args.name,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-01-01",
required=True,
),
}
return parameters
@property
def header_parameters(self):
parameters = {
**self.serialize_header_param(
"Content-Type", "application/json",
),
**self.serialize_header_param(
"Accept", "application/json",
),
}
return parameters
@property
def content(self):
_content_value, _builder = self.new_content_builder(
self.ctx.args,
typ=AAZObjectType,
typ_kwargs={"flags": {"required": True, "client_flatten": True}}
)
_builder.set_prop("sasUrl", AAZStrType, ".sas_url")
return self.serialize_content(_content_value)
def on_200(self, session):
data = self.deserialize_http_content(session)
self.ctx.set_var(
"instance",
data,
schema_builder=self._build_schema_on_200
)
_schema_on_200 = None
@classmethod
def _build_schema_on_200(cls):
if cls._schema_on_200 is not None:
return cls._schema_on_200
cls._schema_on_200 = AAZStrType()
return cls._schema_on_200
class _StopHelper:
"""Helper class for Stop"""
__all__ = ["Stop"]
|
[
"noreply@github.com"
] |
Azure.noreply@github.com
|
97fec5b390f449af99c02b67c5b116a7a433ff93
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startCirq1611.py
|
1633204ade17fab90965b8f69b217ad38da07ded
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,244
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=5
# total number=60
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=3
c.append(cirq.H.on(input_qubit[1])) # number=4
c.append(cirq.H.on(input_qubit[2])) # number=5
c.append(cirq.H.on(input_qubit[3])) # number=6
c.append(cirq.H.on(input_qubit[0])) # number=38
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=39
c.append(cirq.H.on(input_qubit[0])) # number=40
c.append(cirq.H.on(input_qubit[0])) # number=51
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=52
c.append(cirq.H.on(input_qubit[0])) # number=53
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=54
c.append(cirq.Z.on(input_qubit[1])) # number=55
c.append(cirq.H.on(input_qubit[0])) # number=57
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=58
c.append(cirq.H.on(input_qubit[0])) # number=59
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=50
c.append(cirq.H.on(input_qubit[0])) # number=32
c.append(cirq.CZ.on(input_qubit[1],input_qubit[0])) # number=33
c.append(cirq.H.on(input_qubit[0])) # number=34
c.append(cirq.H.on(input_qubit[4])) # number=21
for i in range(2):
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=41
c.append(cirq.Z.on(input_qubit[3])) # number=42
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=43
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[3])) # number=44
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[2])) # number=45
c.append(cirq.H.on(input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[1])) # number=18
c.append(cirq.H.on(input_qubit[2])) # number=19
c.append(cirq.H.on(input_qubit[3])) # number=20
c.append(cirq.X.on(input_qubit[0])) # number=9
c.append(cirq.X.on(input_qubit[1])) # number=10
c.append(cirq.X.on(input_qubit[2])) # number=11
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=35
c.append(cirq.X.on(input_qubit[3])) # number=36
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=37
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=24
c.append(cirq.X.on(input_qubit[0])) # number=25
c.append(cirq.CNOT.on(input_qubit[1],input_qubit[0])) # number=26
c.append(cirq.X.on(input_qubit[1])) # number=14
c.append(cirq.X.on(input_qubit[2])) # number=15
c.append(cirq.X.on(input_qubit[3])) # number=16
c.append(cirq.X.on(input_qubit[3])) # number=46
c.append(cirq.Y.on(input_qubit[1])) # number=47
c.append(cirq.X.on(input_qubit[1])) # number=22
c.append(cirq.X.on(input_qubit[1])) # number=23
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 5
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq1611.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
4bb9946ae758e3b022aaf6ff1e5265b84c6b3e6e
|
828668102c8783b2c057c97493243b9b8efb6d60
|
/src/API/cgi-bin/entrance/sample/test4.py
|
9c716a1e42b55bab24d07837892585c02ca6c6dd
|
[] |
no_license
|
Hirabayashi623/Python
|
edd597d00ace9dc1f94c457a6dc00c9afafaf46a
|
e3023cff3676db21003c6bb0f7609f682e734b8e
|
refs/heads/master
| 2020-03-09T01:06:56.779864
| 2018-07-16T08:33:27
| 2018-07-16T08:33:27
| 127,905,898
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 334
|
py
|
import codecs
import cgi
text_html = ''
fin = codecs.open('./html/entrance/sample/test4.html','r', 'utf_8')
for line in fin:
text_html += line
dict = {}
form=cgi.FieldStorage()
dict['first'] = form.getfirst("check","not found")
dict['list'] = form.getlist("check")
print("Content-type: text/html\n")
print(text_html % dict)
|
[
"ryo662233@gmail.com"
] |
ryo662233@gmail.com
|
8ca43076c3d8bf38e308bed9a671d3351a317c0b
|
86ecd287939bbe097061feaca427bbf5f287c35c
|
/test/node/node_keyword.py
|
dfb088395ed10993ad80d112acca64c34b9507a3
|
[
"MIT"
] |
permissive
|
lwzSoviet/finale
|
2e76d00c77cc2e089c7973fa71502da66c4d492b
|
6166bee0ddbd1d886abccee0c1c699eaf8f53040
|
refs/heads/main
| 2023-06-16T02:18:39.854643
| 2021-07-13T12:06:31
| 2021-07-13T12:06:31
| 385,586,266
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 26
|
py
|
# keyword
add(name="123")
|
[
"pakecalvs@outlook.com"
] |
pakecalvs@outlook.com
|
3f7eacd8ddc425dd484677b87b667097a8ed18b6
|
3821860187e15a4235d541e7db510732c18212b0
|
/frontend/urls.py
|
0cf036fb26e20305a4ec85a41c131ecde41076b5
|
[] |
no_license
|
memadd/todo
|
15fb5901a87b162bb793c0b9f4c73674e38bab8f
|
3ed0acc15596964c50eca863b01fdddff7f5586d
|
refs/heads/master
| 2021-04-02T18:33:27.582092
| 2020-03-31T22:18:29
| 2020-03-31T22:18:29
| 248,308,247
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 118
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path ('list/', views.list, name='list'),
]
|
[
"memad632@gmail.com"
] |
memad632@gmail.com
|
bfdef2e2d892417a7098972ee128fe5199ee9ef1
|
9b43b36f36d9375073c8f1c409c0a7eb03eb6703
|
/day_2_advent_2020.py
|
5f84969de6c72f2f0c27edbc01369e5092d25977
|
[
"MIT"
] |
permissive
|
consentfactory/adventOfCode2020
|
68aa4161d9d6f353a8c0735f29216902f59af89b
|
b2896811a82ee9e6fd28fea92079614e851f7854
|
refs/heads/master
| 2023-01-27T22:52:06.587765
| 2020-12-04T09:21:30
| 2020-12-04T09:21:30
| 318,086,093
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,020
|
py
|
# Advent of Code - Day 2
# day_2_advent_2020.py
# 2020.12.02
# Jimmy Taylor
# Reading data from text file, spliting each value as separate lines without line break
input_data = open('inputs/day2.txt').read().splitlines()
# Cleaning up the data in each row to make it easier to parse later as individual rows
modified_data = [line.replace('-',' ').replace(':','').replace(' ',',') for line in input_data]
# Tracking good and bad passwords
valid_passwords_part_1 = 0
bad_passwords_part_1 = 0
valid_passwords_part_2 = 0
bad_passwords_part_2 = 0
for row in modified_data:
row = list(row.split(','))
# Part 1
min_val = int(row[0])
max_val = int(row[1])
letter = row[2]
password = row[3]
# Counting instances of the letter in the password
letter_count = password.count(letter)
# Checking if letter count is within the range
if (letter_count >= min_val) and (letter_count <= max_val):
valid_passwords_part_1 += 1
else:
bad_passwords_part_1 +=1
# Part 2
# Subtracting by 1 to calibrate character position
first_pos = int(row[0]) - 1
second_pos = int(row[1]) - 1
letter = row[2]
password = row[3]
# Looking through the characters and capturing their positions
positions = [pos for pos, char in enumerate(password) if char == letter]
# Looking if letter is in both positions; if so, bad password
if (first_pos in positions) and (second_pos in positions):
bad_passwords_part_2 +=1
# If letter in one position, valid password
elif (first_pos in positions):
valid_passwords_part_2 += 1
elif (second_pos in positions):
valid_passwords_part_2 += 1
# If letter is not in any position, bad password
else:
bad_passwords_part_2 +=1
print(f"Part 1 Valid Passwords: {valid_passwords_part_1}")
print(f"Part 1 Bad Passwords: {bad_passwords_part_1}")
print(f"Part 2 Valid Passwords: {valid_passwords_part_2}")
print(f"Part 2 Bad Passwords: {bad_passwords_part_2}")
|
[
"jimmy@consentfactory.com"
] |
jimmy@consentfactory.com
|
d5277fa235023a86b95aefe76be2b4347c8fc468
|
5d5f9afb73389b44d4dabcefa4cf3c9a652719af
|
/counter.py
|
76a8d6839406a58327663a0a95f3bdeea7f7fd32
|
[
"MIT"
] |
permissive
|
im085/chibi
|
c171eb3edfd31e5d88b5f2496dd52521e66feacc
|
2caa74efcc9c335b6db086a25b94c61a7475ebe0
|
refs/heads/master
| 2020-08-03T14:56:21.056556
| 2019-12-16T07:40:59
| 2019-12-16T07:40:59
| 211,792,919
| 0
| 0
|
MIT
| 2019-09-30T06:42:07
| 2019-09-30T06:42:07
| null |
UTF-8
|
Python
| false
| false
| 354
|
py
|
class Counter(object):
def __init__(self): #コンストラクタ
self.cnt=0
def doublecount(self):
self.cnt += 2
def reset(self):
self.cnt = 0
def show(self):
print(self.cnt)
def __repr__(self):
return str(self.cnt)
c = Counter()
c.show()
c.doublecount()
c.show()
print(type(c))
print(c)
|
[
"mo8ri5@im085ztto.local"
] |
mo8ri5@im085ztto.local
|
bf3ad5e2691dc69e21891663d552faace47a5481
|
448d70d59cd2ebf5c6f183de227d66180cef3029
|
/scripts/addons_extern/AF_add_objects_specials/add_light_template.py
|
47339cb5f513c23bf7b908b8e1667ac3dc7d0029
|
[] |
no_license
|
motorsep/blenderpython
|
804fb149f7a8d22425e057bf3ae91f36fecb047f
|
f69bc448ed44875d30fcfda1acb33962015386c9
|
refs/heads/master
| 2021-01-18T02:01:54.117018
| 2016-04-10T06:06:36
| 2016-04-10T06:06:36
| 56,364,677
| 1
| 0
| null | 2016-04-16T04:00:56
| 2016-04-16T04:00:55
| null |
UTF-8
|
Python
| false
| false
| 5,084
|
py
|
bl_info = {
"name": "Light Template",
"author": "Rebellion",
"version": (1, 0),
"blender": (2, 7, 0),
"location": "View3D > Add > Lights Template",
"description": "Adds a light/camera template to your scene",
"warning": "",
"wiki_url": "",
"category": "Camera"}
import bpy
from bpy.types import Operator
from bpy.props import BoolProperty
from bpy_extras.object_utils import AddObjectHelper, object_data_add
from mathutils import Vector
def add_lamps(self, context):
if self.bKeyLight:
keyLight = bpy.data.lamps.new(name = "Key_Light",type = "SPOT")
ob = bpy.data.objects.new("Key_Light",keyLight)
constraint = ob.constraints.new(type='COPY_LOCATION')
constraint.use_offset = True
constraint.owner_space = 'LOCAL'
constraint.target = self.camera
constraint = ob.constraints.new(type='TRACK_TO')
constraint.target = self.target
constraint.track_axis = 'TRACK_NEGATIVE_Z'
constraint.up_axis = 'UP_X'
constraint.owner_space = 'LOCAL'
bpy.context.scene.objects.link(ob)
ob.rotation_euler[2] = -0.785398
if self.bFillLight:
fillLight = bpy.data.lamps.new(name = "Fill_Light",type = "SPOT")
ob = bpy.data.objects.new("Fill_Light",fillLight)
constraint = ob.constraints.new(type='COPY_LOCATION')
constraint.use_offset = True
constraint.owner_space = 'LOCAL'
constraint.target = self.camera
constraint = ob.constraints.new(type='TRACK_TO')
constraint.target = self.target
constraint.track_axis = 'TRACK_NEGATIVE_Z'
constraint.up_axis = 'UP_X'
constraint.owner_space = 'LOCAL'
bpy.context.scene.objects.link(ob)
ob.rotation_euler[2] = 0.785398
ob.data.energy = 0.3
if self.bBackLight:
backLight = bpy.data.lamps.new(name = "Back_Light",type = "SPOT")
ob = bpy.data.objects.new("Back_Light",backLight)
constraint = ob.constraints.new(type='COPY_LOCATION')
constraint.use_offset = True
constraint.owner_space = 'LOCAL'
constraint.target = self.camera
constraint = ob.constraints.new(type='TRACK_TO')
constraint.target = self.target
constraint.track_axis = 'TRACK_NEGATIVE_Z'
constraint.up_axis = 'UP_X'
constraint.owner_space = 'LOCAL'
bpy.context.scene.objects.link(ob)
ob.rotation_euler[2] = 3.14159
ob.data.energy = 0.2
if self.camera_constraint:
constraint = self.camera.constraints.new(type = 'TRACK_TO')
constraint.target = self.target
constraint.track_axis = 'TRACK_NEGATIVE_Z'
constraint.up_axis = 'UP_Y'
class OBJECT_OT_add_light_template(Operator):
"""Add light template"""
bl_idname = "object.add_light_template"
bl_label = "Add Light Template"
bl_options = {'REGISTER', 'UNDO'}
camera = None
target = None
bKeyLight = BoolProperty(name = "Key Light" ,default = True)
bFillLight = BoolProperty(name = "Fill Light")
bBackLight = BoolProperty(name = "Back Light")
camera_constraint = BoolProperty(name = "Camera Constraint")
def execute(self, context):
objects = bpy.context.selected_objects
if len(objects) == 2:
for ob in objects:
if ob.type == 'CAMERA':
self.camera = ob
else:
self.target = ob
elif len(objects) == 1:
if objects[0].type == 'CAMERA':
self.camera = objects[0]
bpy.ops.object.empty_add()
self.target = context.active_object
else:
self.camera = context.scene.camera
self.target = context.active_object
elif len(objects)==0:
bpy.ops.object.empty_add()
self.target = context.active_object
self.camera = context.scene.camera
add_lamps(self, context)
return {'FINISHED'}
# Registration
def add_object_button(self, context):
self.layout.operator(
OBJECT_OT_add_light_template.bl_idname,
text="Light Template",
icon='PLUGIN')
# This allows you to right click on a button and link to the manual
#def add_object_manual_map():
# url_manual_prefix = "http://wiki.blender.org/index.php/Doc:2.6/Manual/"
# url_manual_mapping = (
# ("bpy.ops.mesh.add_object", "Modeling/Objects"),
# )
# return url_manual_prefix, url_manual_mapping
#
def register():
bpy.utils.register_class(OBJECT_OT_add_light_template)
#bpy.utils.register_manual_map(add_object_manual_map)
bpy.types.INFO_MT_add.append(add_object_button)
def unregister():
bpy.utils.unregister_class(OBJECT_OT_add_light_template)
#bpy.utils.unregister_manual_map(add_object_manual_map)
bpy.types.INFO_MT_mesh_add.remove(add_object_button)
if __name__ == "__main__":
register()
|
[
"meta.androcto1@gmail.com"
] |
meta.androcto1@gmail.com
|
d3a15eeb5b219cb7402dabe0936aa000fa9631a2
|
dafe53f683a521389c66fc77dddb5f15aa9d0783
|
/M101P/week-1/pyhon/python-simple-print.py
|
043261de35c34f81e0ebb1256fb9c6eaa07ecd25
|
[] |
no_license
|
bozhink/mongodb-university
|
a56fde17d2f0af325875c9ad5e50b62c0b379abc
|
ec0de61cd658980f61ed8054fd1516814fc63896
|
refs/heads/master
| 2020-04-04T10:40:21.194742
| 2017-08-05T12:17:47
| 2017-08-05T12:17:47
| 54,334,262
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 26
|
py
|
print("Hello " + "World")
|
[
"bozhink@gmail.com"
] |
bozhink@gmail.com
|
739e625fd56795bf80d7234bd869372ae0cb58a3
|
6c8f3ab5f952d986a17edda582c5a039bf65c632
|
/python_fundamentals/insertion_sort.py
|
a4b9dc2aee91a839ad7a73818c07f803a09bc758
|
[] |
no_license
|
phillipn/coding_bootcamp_projects
|
3d3bd697728dd4502267e0cd2be7a090952029a8
|
278f96df9d256364583654a00fe585d474ea86a1
|
refs/heads/master
| 2021-01-17T17:30:14.607944
| 2017-03-19T18:12:32
| 2017-03-19T18:12:32
| 82,971,619
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 531
|
py
|
import random
import time
def insertion_sort(arr):
start = time.time()
for i, item in enumerate(arr):
if i == 0:
continue
while i > 0:
if arr[i] < arr[i-1]:
arr[i-1], arr[i] = (arr[i], arr[i-1])
i -= 1
end = time.time()
time_elapsed = end - start
print "List sorted in " + str(time_elapsed) + " seconds."
return arr
rand_vals = []
for i in xrange(100):
rand_vals.append(int(random.randint(0,10000)))
print insertion_sort(rand_vals)
|
[
"phillipn101@gmail.com"
] |
phillipn101@gmail.com
|
7d8989ff8a6cd72c2a4c0e4dc6940fe9a50e6adc
|
86897de26d2e38ccb6209f18beeac779b95f2f8b
|
/setup.py
|
76dea5c25f0b6c3a4edf3cac6446aa451a0a7203
|
[
"BSD-2-Clause"
] |
permissive
|
CivicKnowledge/jupyterhub-authenticators
|
0319220a531fd391779d1112e7447cfd8e9b046d
|
d14c49b0fe44035d66c4ef67ccf4aec84ff79815
|
refs/heads/master
| 2020-03-30T02:50:13.091033
| 2019-05-08T00:11:20
| 2019-05-08T00:11:20
| 150,653,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 403
|
py
|
from setuptools import setup
setup(
name='jh-auth',
version='0.1.5',
description='Custom authenticators for JupyterHub',
url='https://github.com/CivicKnowledge/jupyterhub-authenticators.git',
author='Eric Busboom',
author_email='eric@civicknowledge.com',
license='2 Clause BSD',
packages=['jhauth'],
install_requires=['oauthenticator','requests', 'dockerspawner']
)
|
[
"eric@civicknowledge.com"
] |
eric@civicknowledge.com
|
5186c72e558ed5421e9b77e651b89b5baeeee988
|
98f1d2c94812f43e62afbb238c41b1761615b08c
|
/Paint/paint.py
|
8c43422ff509f5fcf863714ed646a4212c16bab7
|
[] |
no_license
|
aPro313/WebScraping
|
efaf47e505c38c03ac6762fd6b5561f1675ae8aa
|
ee2a15b4fc9ea9a617430881f86d858787493449
|
refs/heads/master
| 2022-12-12T05:52:57.917242
| 2020-09-01T03:24:54
| 2020-09-01T03:24:54
| 282,470,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,996
|
py
|
import requests
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
from selenium.webdriver.support.select import Select
import time
from parsel import Selector
import sys
import xlsxwriter
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as ec
from selenium.webdriver.common.by import By
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.action_chains import ActionChains
import csv
import json
import xlrd
avaSheens =',Matt,Low Sheen,Semi Gloss,Gloss'
Results =[]
#Loading page
driver = webdriver.Chrome('chromedriver.exe')
driver.get('https://www.inspirationspaint.com.au/house-paint/interior-paint/walls/dulux-wash-and-wear')
driver.maximize_window()
#Reading excel file
wb = xlrd.open_workbook('Colours.xlsx')
sheet = wb.sheet_by_index(0)
sheet.cell_value(0, 0)
# Change color range to scrape desired colors
for i in range(883,884):
print(sheet.cell_value(i, 0))
#Finding and clicking Add Color
wait = WebDriverWait(driver,10)
(wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='col-3-4']")))).click()
#Clicking Search
(wait.until(ec.visibility_of_element_located((By.XPATH, "//span[@class='cv-ico-general-search ico-right']")))).click()
#Find input box and search color
searchBox = wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='colour-search-box']//input[@class='form-text k-input']")))
listColor = sheet.cell_value(i, 0) #Color from excel sheet
searchBox.clear()
searchBox.send_keys(listColor) #listColor
searchBox.send_keys(Keys.ENTER)
#Finding searched colors list and iterating throug each color
if not (wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='colour-groups']//div[@class='colour-tile']/div[@class='colour-swatch-label']")))):
continue
colors = driver.find_elements_by_xpath("//div[@class='colour-groups']//div[@class='colour-tile']/div[@class='colour-swatch-label']")
for color in colors:
#Selecting searched colors
Scolor= color.text
color.click()
time.sleep(1)
#check is available sheens appears
try:
driver.find_element_by_xpath('//div[@data-bind="invisible: sheenMatch"][@style=""]')
#saving all available sheen and clicking first one
avaSheens = driver.find_elements_by_xpath("//span[@class='cart-product-title']/a")
avaSheens = ','+','.join(avaSheen.text for avaSheen in avaSheens)
btnAvaSheen = driver.find_element_by_xpath('//button[@class="btn cv-apply"]')
btnAvaSheen.click()
time.sleep(1)
except:
pass
#Clicking Sheen dropdown
(wait.until(ec.visibility_of_element_located((By.XPATH,'//span[@class="k-select"][1]')))).click()
# Getting Sheen List items
time.sleep(1)
sheenList = driver.find_elements_by_xpath('//div[4]/div/div[2]/ul/li')
# First loop to iterate throug Sheen items
for sheen in sheenList:
sheenName = sheen.text
searchSheen = avaSheens.find(','+sheenName)
if searchSheen == -1:
continue
sheen.click()
time.sleep(2)
#Checking notification box
try:
#Notification box click
driver.find_element_by_xpath("//button[@class='btn small']").click()
# Change color link click
(wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='change-colour-link']")))).click()
#Finding color list and click current color
wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='colour-groups']//div[@class='colour-tile']/div[@class='colour-swatch-label'][1]")))
color.click()
time.sleep(1)
#saving all available sheen and clicking first one
wait.until(ec.visibility_of_element_located((By.XPATH,'//div[@id="box-slide"][@style="right: 0px;"]')))
avaSheens = driver.find_elements_by_xpath("//span[@class='cart-product-title']/a")
avaSheens = ','+','.join(avaSheen.text for avaSheen in avaSheens)
btnAvaSheen = driver.find_element_by_xpath('//button[@class="btn cv-apply"]')
btnAvaSheen.click()
time.sleep(1)
#Clicking Sheen dropdown
(wait.until(ec.visibility_of_element_located((By.XPATH,'//span[@class="k-select"][1]')))).click()
time.sleep(1)
continue
except:
pass
#Product Name
ProName = driver.find_element_by_xpath('//h1[contains(@class,"widget-product-title")]').text
# Clicking Size dropdown
(wait.until(ec.visibility_of_element_located((By.XPATH,'//span[1]/div/span/div[3]/span[1]//span[@class="k-select"]')))).click()
# Getting Size list items
time.sleep(1)
sizes = driver.find_elements_by_xpath('//*[@id="body"]/div[18]/div/div[2]/ul/li')
sizeJoin = (','.join(size.text for size in sizes)).strip('Please Select,')
#Apending fields to list
Results.append({
'ColorGoogleSheet' : listColor,
'ColorSearchResult' : Scolor,
'Sheen' : sheenName,
'ProductName': ProName,
'Size': sizeJoin,
})
#Clicking Sheen dropdown
(wait.until(ec.visibility_of_element_located((By.XPATH,'//span[@class="k-select"][1]')))).click()
time.sleep(1)
#Pressing change color link and select next color
avaSheens =',Matt,Low Sheen,Semi Gloss,Gloss'
#Press Esc to close Sheen dropdown
webdriver.ActionChains(driver).send_keys(Keys.ESCAPE).perform()
#Clicking add color link
(wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='change-colour-link']")))).click()
wait.until(ec.visibility_of_element_located((By.XPATH, "//div[@class='colour-groups']//div[@class='colour-tile']/div[@class='colour-swatch-label'][1]")))
#Check if change color slide open then close it
time.sleep(2)
if(driver.find_elements_by_xpath('//div[@id="box-slide"][@style="right: 0px;"]')):
print("window open")
driver.find_element_by_xpath('//div[@id="box-slide"]/button').click()
time.sleep(1)
webdriver.ActionChains(driver).move_by_offset(20,20).perform()
#Printing results in json format
print(json.dumps(Results, indent=2))
# Writing to CSV
with open('colors.csv', 'w', newline='') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames=Results[0].keys())
writer.writeheader()
for row in Results:
writer.writerow(row)
|
[
"Aprogrammer313@gmail.com"
] |
Aprogrammer313@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.