blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6b54a465ce0fb3d99b380b2741c436f2a04aba50
|
d5ab31874dd279656d7f24780e102b352f7f1e08
|
/reports/configs/only_logd_dmpnn8_2/only_logd_dmpnn8_2
|
2bdd9de08945ebddf565738ef8cab7e248ea5be7
|
[
"MIT"
] |
permissive
|
WangYitian123/graph_networks
|
77f76ab9ffa74bb4d52df52b1a17867c0c86be25
|
542f2a59b1b9708abdc718d77db7111f3ba2df96
|
refs/heads/main
| 2023-07-08T22:43:11.775430
| 2021-07-29T13:01:35
| 2021-07-29T13:01:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,815
|
from dataclasses import dataclass, field
from typing import List
import tensorflow as tf
from graph_networks.utilities import *
import logging
import os
ATOM_FEATURE_DIM = DGIN8_ATOM_FEATURE_DIM
EDGE_FEATURE_DIM = DGIN8_EDGE_FEATURE_DIM
@dataclass
class BasicModelConfig:
"""
Config for model1/2/3 run file.
General model parameters
"""
model_name: str = 'only_logd_dmpnn8_2' # without h_w in DGIN gin part - added h_v_0 instead
# whole train/eval split - no more double split within train data set
# random train/test split in get_data_sd - only change overall_seed
# CHANGES dgin3 10.02.2021:
# *added new bondFeaturesDGIN2 and atomFeaturesDGIN2; DGIN2_ATOM_FEATURE_DIM; DGIN2_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags3/' to project_path+'data/processed/lipo/pickled/test_frags3/'
# CHANGES dgin3 16.02.2021:
# *added new bondFeaturesDGIN3 and atomFeaturesDGIN3; DGIN3_ATOM_FEATURE_DIM; DGIN3_EDGE_FEATURE_DIM
# *from project_path+'data/processed/lipo/pickled/train_frags_dgin3/' to project_path+'data/processed/lipo/pickled/test_frags_dgin3/'
# CHANGES dgin4 16.02.2021:
# *added add_species bool in model1 config - previously not there; for dgin2 featurization adds the species type after the dgin
# encoding before logD prediction
# test_frags_dgin4 was added for species inclusion in model2 call()
batch_size: int =15
override_if_exists: bool = True
overall_seed: int = 2
# path to the project folder
project_path:str = "./"
retrain_model: bool = False
retrain_model_name: str = ''
retrain_model_epoch: str = ''
retrain_model_weights_dir: str = project_path+'reports/model_weights/'+retrain_model_name+'/epoch_'+retrain_model_epoch+'/checkp_'+retrain_model_epoch
train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin8_logd/'
test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin8_logd/'
combined_dataset: bool = False
add_train_data_dir: str = project_path+'data/processed/lipo/pickled/train_dgin8_logs/'
add_test_data_dir: str = project_path+'data/processed/lipo/pickled/test_dgin8_logs/'
test_model: bool = False
test_model_epoch: str = '887'
# define the number or test runs for the CI.
# the mean and std of the RMSE and r^2 of the combined runs are taken as the output.
test_n_times: int = 1
# do you want to test the model with consensus mode?
# if yes, a defined ML model will be included in the consensus predictions during the testing.
consensus: bool = False
# include dropout during testing?
include_dropout: bool = False
test_model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/epoch_'+test_model_epoch+'/checkp_'+test_model_epoch
# To save the prediction values for each property set to True
# When this flag is True - the whole test dataset is taken an test_n_times is set to zero!
save_predictions: bool = False
# define the folder where you want to save the predictions.
# For each property, a file is created under the property name ("./logd.txt","./logs.txt","./logp.txt","./others.txt")
test_prediction_output_folder: str = project_path+"reports/predictions/"+model_name+"/"
encode_hidden: bool = False
log_dir: str = project_path+'reports/logs/'+model_name+'.log'
verbosity_level = logging.INFO
model_type: str = 'DMPNN' # added 31.03.2021 to compare models like 'GIN' 'DMPNN' 'DGIN' 'MLP'
plot_dir: str = project_path+'reports/figures/'+model_name+'/'
tensorboard_log_dir: str = project_path+'reports/tensorboard/'+model_name+'/'
config_log_dir: str = project_path+'reports/configs/'+model_name+'/'
model_weights_dir: str = project_path+'reports/model_weights/'+model_name+'/'
stats_log_dir: str = project_path+'reports/stats/'+model_name+'/'
@dataclass
class DGINConfig:
"""
Config for direcpted-mpnn class.
"""
dropout_aggregate_dmpnn: bool = False
layernorm_aggregate_dmpnn: bool = True
dropout_passing_dmpnn: bool = False
layernorm_passing_dmpnn: bool = True
dropout_aggregate_gin: bool = False
layernorm_aggregate_gin: bool = True
dropout_passing_gin: bool = False
layernorm_passing_gin: bool = True
gin_aggregate_bias: bool = False
dmpnn_passing_bias: bool = False
init_bias: bool = False
massge_iteration_dmpnn: int = 4
message_iterations_gin: int = 4
dropout_rate: float = 0.15
input_size: int = (ATOM_FEATURE_DIM+EDGE_FEATURE_DIM) # combination of node feature len (33) and edge feature len (12)
passing_hidden_size: int = 56 # this can be changed
input_size_gin: int = (ATOM_FEATURE_DIM) # changed 31.03.2021
return_hv: bool = True # model3 parameter
@dataclass
class Model1Config:
"""
Config model1 class - no subclass configs are defined here.
"""
validation_split: float = 0.90
learning_rate: float = 0.004
clip_rate: float = 0.6
optimizer = tf.keras.optimizers.Adam(learning_rate)
lipo_loss_mse = tf.keras.losses.mse
lipo_loss_mae = tf.keras.losses.mae
logP_loss_mse = tf.keras.losses.mse
logS_loss_mse = tf.keras.losses.mse
other_loss_mse = tf.keras.losses.mse
mw_loss_mse = tf.keras.losses.mse
metric = tf.keras.losses.mae
epochs: int = 1600
# define the number of epochs for each test run.
save_after_epoch: int = 3
# dropout rate for the general model - mainly the MLP for the different log predictions
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
# the seed to shuffle the training/validation dataset; For the same dataset, even when
# combined_dataset is True, it is the same training/valiation instances
train_data_seed: int = 0
dropout_rate: float = 0.15 # the overall dropout rate of the readout functions
train_data_seed: int = 0
hidden_readout_1: int = 32
hidden_readout_2: int = 14
activation_func_readout = tf.nn.relu
include_logD: bool = True
include_logS: bool = False
include_logP: bool = False
include_other: bool = False
include_mw: bool = False
include_rot_bond: bool = False
include_HBA: bool = False
include_HBD: bool = False
# define the starting threshold for the RMSE of the model. When the comnbined RMSE
# is below this threshold, the model weights are being safed and a new threshold
# is set. It only serves as a starting threshold so that not too many models
# are being safed. Depends on how many log endpoints are being taken into
# consideration - as three endpoints have a higher combined RMSE as only one
# endpoint.
best_evaluation_threshold: float = 2.45 #was introduced on the 25.03.2021/
# define the individual thresholds. If one model is better, the corresponding
# model weights are being saved.
best_evaluation_threshold_logd: float = 1.85
best_evaluation_threshold_logp: float = 1.65
best_evaluation_threshold_logs: float = 2.15
best_evaluation_threshold_other: float = 2.15
# 2.45 for all_logs
# 0.70 logP
# 0.75 logD
# 1.00 logS
# 1.75 logSD
# 1.70 logSP
# 1.45 logDP
include_fragment_conv: bool = False # was introduced on the 4.12.2020
use_rmse: bool = True # uses RMSE instead of MSE for only lipo_loss
shuffle_inside: bool = True # reshuffles the train/valid test seach in each epoch (generalizes)
add_species: bool = False # 16.02 introduction; previously not there; for dgin3 adds the species type after the dgin encoding before logD prediction
@dataclass
class FrACConfig:
"""
Config fragment aggregation class - no subclass configs are defined here.
"""
input_size_gin: int = 28
layernorm_aggregate: bool = True
reduce_mean: bool = True # when false -> reduce_sum
@dataclass
class MLConfig:
"""
Configs for the ML algorithm
"""
# which algorithm do you want to use for the consensus?
# possibilities are: "SVM", "RF", "KNN" or "LR" - all are regression models!
# SVM: Support Vector Machine; RF: Random Forest, KNN: K-Nearest Neigbors; LR: Linear Regression;
algorithm: str = "SVM"
# which fingerprint to use - possibilities are: "ECFP" or "MACCS"
fp_types: str = "ECFP"
# If 'ECFP' fingerprint is used, define the number of bits - maximum is 2048!
n_bits: int = 2048
# If "ECFP" fingerprint is used, define the radius
radius: int = 4
# define if descriptors should be included into the non-GNN molecular representation
include_descriptors: bool = True
# define if the descriptors should be standardizedby scaling and centering (Sklearn)
standardize: bool = True
@dataclass
class Config():
"""
Overall config class for model2 and run file.
Includes all submodels config
"""
basic_model_config: BasicModelConfig
model1_config: Model1Config
d_gin_config: DGINConfig
frag_acc_config: FrACConfig
ml_config: MLConfig
model: str = 'model11'
|
[
"wieder.oliver@gmail.com"
] |
wieder.oliver@gmail.com
|
|
90d59540d8e2afccaf99b13f80cc0a735d81e0a3
|
85a7dde9c48945972a7f521f0fbb2eb56b323aa2
|
/obsolete_files/old/listening_eyes.py
|
69a61d1a1a20e04408df1df5513166b7f89f27b3
|
[] |
no_license
|
jwmcgettigan/renegade
|
1e8f61a14d6a5a7aff5c410f0c26bb166f95bd03
|
ef76bebc6867683e1fb3201be547f42aa6e65881
|
refs/heads/master
| 2021-04-06T13:53:12.945602
| 2018-07-17T22:09:13
| 2018-07-17T22:09:13
| 124,680,527
| 1
| 0
| null | 2018-07-17T22:09:14
| 2018-03-10T17:33:52
|
Makefile
|
UTF-8
|
Python
| false
| false
| 752
|
py
|
#!/usr/bin/env python
import rospy as rp
import cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
bridge = CvBridge()
def left_callback(data):
cv2.imshow("left_eye", bridge.imgmsg_to_cv2(data, desired_encoding="passthrough"))
if cv2.waitKey(20) & 0xFF == ord('q'):
pass
def right_callback(data):
cv2.imshow("right_eye", bridge.imgmsg_to_cv2(data, desired_encoding="passthrough"))
if cv2.waitKey(20) & 0xFF == ord('q'):
pass
def listener():
rp.init_node('listener', anonymous=True)
rp.Subscriber("left_eye", Image, left_callback)
rp.Subscriber("right_eye", Image, right_callback)
rp.spin()
if __name__ == '__main__':
listener()
cv2.destroyAllWindows()
|
[
"jwmcgettigan@gmail.com"
] |
jwmcgettigan@gmail.com
|
5780ccdc240b7902df73907fa6fcdfde16add904
|
234375d9ea3eeeacbfbd4b9dbcd508c7c88455ec
|
/setup.py
|
bcac9084a4162ce6e282d0d68d382f3095154453
|
[] |
no_license
|
shinyy98/Riskfolio-Lib
|
6962043f2ca4a592fb8f86dd5dcc6328345ea2b2
|
33979a32885de8a24c09847ca5a20af584ab54d4
|
refs/heads/master
| 2023-04-20T08:07:39.402230
| 2021-05-06T07:48:05
| 2021-05-06T07:48:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,517
|
py
|
# Copyright (C) 2020-2021 Dany Cajas
DESCRIPTION = "Portfolio Optimization and Quantitative Strategic Asset Allocation in Python"
with open("README.md", encoding='UTF-8') as fh:
LONG_DESCRIPTION = fh.read()
DISTNAME = 'Riskfolio-Lib'
MAINTAINER = 'Dany Cajas'
MAINTAINER_EMAIL = 'dany.cajas.n@uni.pe'
URL = 'https://github.com/dcajasn/Riskfolio-Lib'
LICENSE = 'BSD (3-clause)'
KEYWORDS = 'finance, portfolio, optimization, quant, asset, allocation, investing'
DOWNLOAD_URL = 'https://github.com/dcajasn/Riskfolio-Lib.git'
VERSION = '0.2.0.1'
PYTHON_REQUIRES = ">=3.7"
INSTALL_REQUIRES = [
'numpy>=1.17.0',
'scipy>=1.0.1',
'pandas>=1.0.0',
'matplotlib>=3.3.0',
'cvxpy>=1.0.25',
'scikit-learn>=0.22.0',
'statsmodels>=0.10.1',
'arch>=4.15',
'xlsxwriter>=1.3.7',
]
PACKAGES = [
'riskfolio',
]
CLASSIFIERS = [
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: BSD License',
'Topic :: Office/Business :: Financial :: Investment',
'Topic :: Office/Business :: Financial',
'Topic :: Scientific/Engineering :: Mathematics',
'Operating System :: Microsoft',
'Operating System :: Unix',
'Operating System :: MacOS'
]
if __name__ == "__main__":
from setuptools import setup
import sys
if sys.version_info[:2] < (3, int(PYTHON_REQUIRES[-1])):
raise RuntimeError("Riskfolio-Lib requires python " + PYTHON_REQUIRES)
setup(
name=DISTNAME,
author=MAINTAINER,
author_email=MAINTAINER_EMAIL,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown; charset=UTF-8; variant=GFM',
license=LICENSE,
keywords=KEYWORDS,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
python_requires=PYTHON_REQUIRES,
install_requires=INSTALL_REQUIRES,
packages=PACKAGES,
classifiers=CLASSIFIERS,
project_urls={"Documentation": "https://riskfolio-lib.readthedocs.io/en/latest/",
"Issues": "https://github.com/dcajasn/Riskfolio-Lib/issues",
"Personal website": "http://financioneroncios.wordpress.com",
},
)
|
[
"61527316+dcajasn@users.noreply.github.com"
] |
61527316+dcajasn@users.noreply.github.com
|
3ce2bc9fc56982061e585feab4245d388dd09ad7
|
da489e1e388643174101981fbbdf12fd498a3ba0
|
/ihome13/ihome/api_1_0/houses.py
|
04060d610fb1249835258fd7910565bf95ce96a1
|
[] |
no_license
|
zb14755456464/home
|
f5344b90e91a538283524dbd21fecf51cdfdbe50
|
3ec478083c2f5792ddfbfdb92e8bd43f51d6242d
|
refs/heads/master
| 2023-01-04T16:37:28.869627
| 2018-03-11T08:25:38
| 2018-03-11T08:25:38
| 124,736,942
| 0
| 0
| null | 2022-12-27T14:57:48
| 2018-03-11T08:23:36
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 16,644
|
py
|
# coding=utf-8
import logging
import json
from . import api
from ihome import redis_store, constants, db
from ihome.models import Area
from flask import request, jsonify, g, session, current_app
from ihome.response_code import RET
from ihome.models import House, Facility, HouseImage, User, Order
from ihome.utils.commons import login_required
from ihome.utils.image_storage import storage
from datetime import datetime
@api.route('/areas/')
def get_area_info():
"""
1. 访问redis获取缓存
2. 没有缓存, 查询MySQL
3. 需要对数据转JSON
4. 保存redis中
5. 如果有缓存, 返回缓存数据
6. 返回给浏览器
"""
# 一. 处理业务逻辑
#1. 访问redis获取缓存
try:
# 直接获取JSON数据, 保存的也是JSON数据. 为了方便把数据返回给前端, 因此保存JSON返回JSON
areas_json = redis_store.get('area_info')
except Exception as e:
logging.error(e)
# 为了避免异常的事情发生, 如果执行失败, 就把数据设置为None
areas_json = None
# 2. 没有缓存, 查询MySQL
if areas_json is None:
# 查询MySQL所有的数据
areas_list = Area.query.all()
# 3. 需要对数据转JSON
areas = []
for area in areas_list:
# 调用模型的转字典方法, 不断拼接成一个areas
areas.append(area.to_dict())
# 将areas转换成JSON, 方便将来保存redis, 方便返回数据
areas_json = json.dumps(areas)
# 4. 保存redis中
try:
redis_store.setex('area_info', constants.AREA_INFO_REDIS_EXPIRES, areas_json)
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
# 这里如果出错, 可以不用返回错误信息. 因此如果redis没有保存, 那么下一次会直接访问Mysql读取数据, 再次保存
# 5.如果有缓存, 返回缓存数据
else:
logging.info('当前数据从redis中读取的')
# 二. 返回数据
# return jsonify() --> contentType --> 'application/json'
# 如果调用了jsonify, 那么里面传递的数据, 是字符串. 而我们的城区数据已经转换成了JSON, 因此不能用jsonify
# 此时, 我们可以返回字典, 并告知是json格式的
# return jsonify(errno=RET.THIRDERR, errmsg='上传图像异常')
return '{"errno": 0, "errmsg": "查询城区信息成功", "data":{"areas": %s}}' % areas_json, 200, \
{"Content-Type": "application/json"}
@api.route("/houses/info", methods=["POST"])
@login_required
def save_house_info():
"""保存房屋的基本信息
前端发送过来的json数据
{
"title":"",
"price":"",
"area_id":"1",
"address":"",
"room_count":"",
"acreage":"",
"unit":"",
"capacity":"",
"beds":"",
"deposit":"",
"min_days":"",
"max_days":"",
"facility":["7","8"]
}
"""
# 一. 获取参数
house_data = request.get_json()
if house_data is None:
return jsonify(errno=RET.PARAMERR, errmsg="参数错误")
title = house_data.get("title") # 房屋名称标题
price = house_data.get("price") # 房屋单价
area_id = house_data.get("area_id") # 房屋所属城区的编号
address = house_data.get("address") # 房屋地址
room_count = house_data.get("room_count") # 房屋包含的房间数目
acreage = house_data.get("acreage") # 房屋面积
unit = house_data.get("unit") # 房屋布局(几室几厅)
capacity = house_data.get("capacity") # 房屋容纳人数
beds = house_data.get("beds") # 房屋卧床数目
deposit = house_data.get("deposit") # 押金
min_days = house_data.get("min_days") # 最小入住天数
max_days = house_data.get("max_days") # 最大入住天数
# 二. 校验参数
if not all((title, price, area_id, address, room_count,acreage, unit, capacity, beds, deposit, min_days, max_days)):
return jsonify(errno=RET.PARAMERR, errmsg="参数不完整")
# 判断单价和押金格式是否正确
# 前端传送过来的金额参数是以元为单位,浮点数,数据库中保存的是以分为单位,整数
try:
price = int(float(price) * 100)
deposit = int(float(deposit) * 100)
except Exception as e:
return jsonify(errno=RET.DATAERR, errmsg="参数有误")
# 三. 保存信息
# 1. 创建房屋对象
user_id = g.user_id
house = House(
user_id=user_id,
area_id=area_id,
title=title,
price=price,
address=address,
room_count=room_count,
acreage=acreage,
unit=unit,
capacity=capacity,
beds=beds,
deposit=deposit,
min_days=min_days,
max_days=max_days
)
# 2. 处理房屋的设施信息
facility_id_list = house_data.get("facility")
if facility_id_list:
# 表示用户勾选了房屋设施
# 过滤用户传送的不合理的设施id
# select * from facility where id in (facility_id_list)
try:
facility_list = Facility.query.filter(Facility.id.in_(facility_id_list)).all()
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库异常")
# 为房屋添加设施信息
if facility_list:
house.facilities = facility_list
# 3. 保存数据库
try:
db.session.add(house)
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=RET.DBERR, errmsg="保存数据失败")
# 四. 返回
return jsonify(errno=RET.OK, errmsg="保存成功", data={"house_id": house.id})
@api.route("/houses/image", methods=["POST"])
@login_required
def save_house_image():
"""保存房屋的图片"""
# 获取参数 房屋的图片、房屋编号
house_id = request.form.get("house_id")
image_file = request.files.get("house_image")
# 校验参数
if not all([house_id, image_file]):
return jsonify(errno=RET.PARAMERR, errmsg="参数不完整")
# 1. 判断房屋是否存在
# 2. 上传房屋图片到七牛中
# 3. 保存图片信息到数据库中
# 4. 处理房屋基本信息中的主图片
# 5. 统一提交数据
# 1. 判断房屋是否存在
try:
house = House.query.get(house_id)
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库异常")
if house is None:
return jsonify(errno=RET.NODATA, errmsg="房屋不存在")
# 2. 上传房屋图片到七牛中
image_data = image_file.read()
try:
file_name = storage(image_data)
except Exception as e:
logging.error(e)
return jsonify(errno=RET.THIRDERR, errmsg="保存房屋图片失败")
# 3. 保存图片信息到数据库中
house_image = HouseImage(
house_id=house_id,
url=file_name
)
db.session.add(house_image)
# 4. 处理房屋基本信息中的主图片
if not house.index_image_url:
house.index_image_url = file_name
db.session.add(house)
# 5. 统一提交数据
try:
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=RET.DBERR, errmsg="保存图片信息失败")
image_url = constants.QINIU_URL_DOMAIN + file_name
return jsonify(errno=RET.OK, errmsg="保存图片成功", data={"image_url": image_url})
@api.route("/users/houses", methods=["GET"])
@login_required
def get_user_houses():
"""获取房东发布的房源信息条目"""
user_id = g.user_id
try:
user = User.query.get(user_id)
houses = user.houses
# houses = House.query.filter_by(user_id=user_id)
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DBERR, errmsg="获取数据失败")
# 将查询到的房屋信息转换为字典存放到列表中
houses_list = []
if houses:
for house in houses:
houses_list.append(house.to_basic_dict())
return jsonify(errno=RET.OK, errmsg="OK", data={"houses": houses_list})
@api.route("/houses/index", methods=["GET"])
def get_house_index():
"""获取主页幻灯片展示的房屋基本信息"""
# 从缓存中尝试获取数据
try:
ret = redis_store.get("home_page_data")
except Exception as e:
logging.error(e)
ret = None
if ret:
logging.info("hit house index info redis")
# 因为redis中保存的是json字符串,所以直接进行字符串拼接返回
return '{"errno":0, "errmsg":"OK", "data":%s}' % ret, 200, {"Content-Type": "application/json"}
else:
try:
# 查询数据库,返回房屋订单数目最多的5条数据
houses = House.query.order_by(House.order_count.desc()).limit(constants.HOME_PAGE_MAX_HOUSES)
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DBERR, errmsg="查询数据失败")
if not houses:
return jsonify(errno=RET.NODATA, errmsg="查询无数据")
houses_list = []
for house in houses:
# 如果房屋未设置主图片,则跳过
if not house.index_image_url:
continue
houses_list.append(house.to_basic_dict())
# 将数据转换为json,并保存到redis缓存
json_houses = json.dumps(houses_list)
try:
redis_store.setex("home_page_data", constants.HOME_PAGE_DATA_REDIS_EXPIRES, json_houses)
except Exception as e:
logging.error(e)
return '{"errno":0, "errmsg":"OK", "data":%s}' % json_houses, 200, {"Content-Type": "application/json"}
@api.route("/houses/<int:house_id>", methods=["GET"])
def get_house_detail(house_id):
"""获取房屋详情"""
# 前端在房屋详情页面展示时,如果浏览页面的用户不是该房屋的房东,则展示预定按钮,否则不展示,
# 所以需要后端返回登录用户的user_id
# 尝试获取用户登录的信息,若登录,则返回给前端登录用户的user_id,否则返回user_id=-1
user_id = session.get("user_id", "-1")
# 校验参数
if not house_id:
return jsonify(errno=RET.PARAMERR, errmsg="参数缺失")
# 先从redis缓存中获取信息
try:
ret = redis_store.get("house_info_%s" % house_id)
except Exception as e:
logging.error(e)
ret = None
if ret:
logging.info("hit house info redis")
return '{"errno":"0", "errmsg":"OK", "data":{"user_id":%s, "house":%s}}' % (user_id, ret), 200, {"Content-Type": "application/json"}
# 查询数据库
try:
house = House.query.get(house_id)
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DBERR, errmsg="查询数据失败")
if not house:
return jsonify(errno=RET.NODATA, errmsg="房屋不存在")
# 将房屋对象数据转换为字典
try:
house_data = house.to_full_dict()
except Exception as e:
logging.error(e)
return jsonify(errno=RET.DATAERR, errmsg="数据出错")
# 存入到redis中
json_house = json.dumps(house_data)
try:
redis_store.setex("house_info_%s" % house_id, constants.HOUSE_DETAIL_REDIS_EXPIRE_SECOND, json_house)
except Exception as e:
current_app.logger.error(e)
resp = '{"errno":"0", "errmsg":"OK", "data":{"user_id":%s, "house":%s}}' % (user_id, json_house), 200, {"Content-Type": "application/json"}
return resp
# /api/v1_0/houses?sd=xxxx-xx-xx&ed=xxxx-xx-xx&aid=xx&sk=new&p=1
@api.route("/houses", methods=["GET"])
def get_house_list():
"""获取房屋列表信息"""
# 一. 获取参数
start_date_str = request.args.get("sd", "") # 想要查询的起始时间
end_date_str = request.args.get("ed", "") # 想要查询的终止时间
area_id = request.args.get("aid", "") # 区域id
sort_key = request.args.get("sk", "new") # 排序关键字
page = request.args.get("p", 1) # 页数
# 二. 校验参数
# 2.1判断日期
try:
start_date = None
if start_date_str:
start_date = datetime.strptime(start_date_str, "%Y-%m-%d")
end_date = None
if end_date_str:
end_date = datetime.strptime(end_date_str, "%Y-%m-%d")
if start_date and end_date:
assert start_date <= end_date
except Exception as e:
return jsonify(errno=RET.PARAMERR, errmsg="日期参数有误")
# 2.2判断页数
try:
page = int(page)
except Exception:
page = 1
# 三. 业务逻辑处理
# 3.1 先从redis缓存中获取数据
try:
redis_key = "houses_%s_%s_%s_%s" % (start_date_str, end_date_str, area_id, sort_key)
resp_json = redis_store.hget(redis_key, page)
except Exception as e:
current_app.logger.error(e)
resp_json = None
if resp_json:
# 表示从缓存中拿到了数据
return resp_json, 200, {"Content-Type": "application/json"}
# 3.2 定义查询数据的参数空列表
filter_params = []
# 3.3 处理区域信息
if area_id:
filter_params.append(House.area_id == area_id)
# 3.4 处理时间, 获取不冲突的房屋信息
try:
conflict_orders_li = []
if start_date and end_date:
# 从订单表中查询冲突的订单,进而获取冲突的房屋id
conflict_orders_li = Order.query.filter(Order.begin_date <= end_date, Order.end_date >= start_date).all()
elif start_date:
# 从订单表中查询冲突的订单,进而获取冲突的房屋id
conflict_orders_li = Order.query.filter(Order.end_date >= start_date).all()
elif end_date:
# 从订单表中查询冲突的订单,进而获取冲突的房屋id
conflict_orders_li = Order.query.filter(Order.begin_date <= end_date).all()
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库异常")
if conflict_orders_li:
conflict_house_id_li = [order.house_id for order in conflict_orders_li]
# 添加条件,查询不冲突的房屋
filter_params.append(House.id.notin_(conflict_house_id_li))
# 3.5 排序
if sort_key == "booking":
house_query = House.query.filter(*filter_params).order_by(House.order_count.desc())
elif sort_key == "price-inc":
house_query = House.query.filter(*filter_params).order_by(House.price.asc())
elif sort_key == "price-des":
house_query = House.query.filter(*filter_params).order_by(House.price.desc())
else:
house_query = House.query.filter(*filter_params).order_by(House.create_time.desc())
# 3.6 分页 sqlalchemy的分页
try:
# 页数 每页数量 错误输出
house_page = house_query.paginate(page, constants.HOUSE_LIST_PAGE_CAPACITY, False)
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库异常")
# 3.7 将数据转为JSON
house_li = house_page.items # 当前页中的数据结果
total_page = house_page.pages # 总页数
houses = []
for house in house_li:
houses.append(house.to_basic_dict())
# 将结果转换json字符串
resp = dict(errno=RET.OK, errmsg="查询成功", data={"houses": houses, "total_page": total_page, "current_page":page})
resp_json = json.dumps(resp)
# 3.8 将结果缓存到redis中
if page <= total_page:
# 用redis的哈希类型保存分页数据
redis_key = "houses_%s_%s_%s_%s" % (start_date_str, end_date_str, area_id, sort_key)
try:
# 使用redis中的事务
pipeline = redis_store.pipeline()
# 开启事务
pipeline.multi()
pipeline.hset(redis_key, page, resp_json)
pipeline.expire(redis_key, constants.HOUSE_LIST_PAGE_REDIS_EXPIRES)
# 执行事务
pipeline.execute()
except Exception as e:
current_app.logger.error(e)
# 四. 数据返回
return resp_json, 200, {"Content-Type": "application/json"}
|
[
"1273844671@qq.com"
] |
1273844671@qq.com
|
ec282d154faabb3d27915f38c3c13d823ae008c8
|
39de3097fb024c67a00c8d0e57c937d91f8b2cc9
|
/Graphs/first_depth_first_search.py
|
d08ac89c8316ae345b61554a0dbaf65cbb800397
|
[] |
no_license
|
srajsonu/InterviewBit-Solution-Python
|
4f41da54c18b47db19c3c0ad0e5efa165bfd0cd0
|
6099a7b02ad0d71e08f936b7ac35fe035738c26f
|
refs/heads/master
| 2023-03-07T05:49:15.597928
| 2021-02-24T18:20:07
| 2021-02-24T18:20:07
| 249,359,666
| 0
| 2
| null | 2020-10-06T10:54:07
| 2020-03-23T07:09:53
|
Python
|
UTF-8
|
Python
| false
| false
| 558
|
py
|
from _collections import defaultdict
class Solution:
def __init__(self):
self.graph = defaultdict(list)
def Solve(self,A,B,C):
n=len(A)
for i in range(n):
self.graph[A[i]].append(i+1)
vis=[0]*(n+1)
q=[]
q.append(C)
vis[C]=1
while q:
a=q.pop(0)
for i in self.graph[a]:
if not vis[i]:
q.append(i)
vis[i]=1
return vis[B]
A=[1,1,1,3,3,2,2,7,6]
B=9
C=1
D=Solution()
print(D.Solve(A,B,C))
|
[
"srajsonu02@gmail.com"
] |
srajsonu02@gmail.com
|
d9d896fb355e176f32778e810f56eebdbe390e59
|
f775d4763d10e84d682d2122b2cb87ec18be77b8
|
/setup.py
|
cf72f0ac32fe380f6028da1217fe4f08d6fce348
|
[
"MIT"
] |
permissive
|
chrysn-pull-requests/aiodnsprox
|
d23b8de88c47cd3f20baadcc565805bde4a0b931
|
e53843015fb22a6c0327e467b2956a45c82678e1
|
refs/heads/main
| 2023-08-17T17:21:22.375237
| 2021-10-01T10:21:33
| 2021-10-01T10:32:44
| 413,811,566
| 0
| 0
|
MIT
| 2021-10-05T12:37:27
| 2021-10-05T12:37:26
| null |
UTF-8
|
Python
| false
| false
| 1,992
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright (C) 2021 Freie Universität Berlin
#
# Distributed under terms of the MIT license.
import os
from setuptools import setup, find_packages
PACKAGE = "aiodnsprox"
DESCRIPTION = "A Python-based DNS-over-X proxy based on aiocoap "
AUTHOR = "Martine S. Lenders"
AUTHOR_EMAIL = "m.lenders@fu-berlin.de"
URL = "https://github.com/anr-bmbf-pivot/aiodnsprox"
def get_requirements():
with open("requirements.txt") as req_file:
for line in req_file:
yield line.strip()
def get_version(package):
""" Extract package version without importing file
Importing cause issues with coverage,
(modules can be removed from sys.modules to prevent this)
Importing __init__.py triggers importing rest and then requests too
Inspired from pep8 setup.py
"""
with open(os.path.join(package, '__init__.py')) as init_fd:
for line in init_fd:
if line.startswith('__version__'):
return eval(line.split('=')[-1]) # pylint:disable=eval-used
return None
setup(
name=PACKAGE,
version=get_version(PACKAGE),
description=DESCRIPTION,
long_description=open('README.rst').read(),
long_description_content_type="text/x-rst",
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
license='MIT',
download_url=URL,
packages=find_packages(),
classifiers=['Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.9',
'Intended Audience :: End Users/Desktop',
'Environment :: Console',
'Topic :: Utilities', ],
install_requires=list(get_requirements()),
entry_points= {
'console_scripts': [
'aiodns-proxy = aiodnsprox.cli.proxy:sync_main',
],
},
python_requires='>=3.7',
)
|
[
"m.lenders@fu-berlin.de"
] |
m.lenders@fu-berlin.de
|
783970d24e1d97381791c087441d406eeaf61ecf
|
03c27d0d9c64905b0e52856f78d995256ef1f7c1
|
/details/migrations/0005_auto_20210719_2318.py
|
2fc8a788de4e914b66ed596d5820c7a201da4084
|
[] |
no_license
|
Anish06-crypto/loan-application
|
c7f0ef77aeeac9d6c4dc38b1f1b67c31a732736b
|
34d1275490b8f94fc58c34c6c65d8f41209ebbc9
|
refs/heads/master
| 2023-06-23T00:49:42.152419
| 2021-07-20T06:00:01
| 2021-07-20T06:00:01
| 387,684,923
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,031
|
py
|
# Generated by Django 3.2.5 on 2021-07-19 17:48
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('details', '0004_auto_20210719_2313'),
]
operations = [
migrations.RemoveField(
model_name='personaldetails',
name='aadhar_number',
),
migrations.RemoveField(
model_name='personaldetails',
name='address',
),
migrations.RemoveField(
model_name='personaldetails',
name='country',
),
migrations.RemoveField(
model_name='personaldetails',
name='mailing_address',
),
migrations.RemoveField(
model_name='personaldetails',
name='marital_status',
),
migrations.RemoveField(
model_name='personaldetails',
name='pan_number',
),
migrations.RemoveField(
model_name='personaldetails',
name='pin_code',
),
migrations.RemoveField(
model_name='personaldetails',
name='state',
),
migrations.CreateModel(
name='MoreDetails',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('marital_status', models.CharField(choices=[('Married', 'married'), ('Unmarried', 'unmarried'), ('Separated', 'separated')], max_length=10, null=True)),
('address', models.CharField(max_length=150, null=True)),
('state', models.CharField(choices=[('Karnataka', 'karnataka'), ('Maharashtra', 'maharashtra'), ('New York', 'new york'), ('Texas', 'texas'), ('Melbourne', 'melbourne')], max_length=30, null=True)),
('pin_code', models.CharField(blank=True, max_length=6, null=True, validators=[django.core.validators.RegexValidator(message="Pin number must be entered in the format: '99999'. Up to 6 digits allowed.", regex='^\\d{6,6}$')])),
('country', models.CharField(choices=[('India', 'India'), ('USA', 'USA'), ('Australia', 'Australia')], max_length=30, null=True)),
('mailing_address', models.BooleanField(null=True)),
('aadhar_number', models.CharField(blank=True, max_length=12, null=True, validators=[django.core.validators.RegexValidator(message="Aadhar number must be entered in the format: '1111 1111 1111 1111'. Up to 12 digits allowed.", regex='^\\d{12,12}$')])),
('pan_number', models.CharField(blank=True, max_length=10, null=True, validators=[django.core.validators.RegexValidator(message="Pan number must be entered in the format: 'AhsGr2783d'. Up to 10 digits allowed.", regex='^[A-Za-z]{5}\\d{4}[A-Za-z]{1}$')])),
('name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='details.personaldetails')),
],
),
]
|
[
"anishr62gmail.comgit config --global user.name Anish06-crypto/git config --global user.email anishr62gmail.com"
] |
anishr62gmail.comgit config --global user.name Anish06-crypto/git config --global user.email anishr62gmail.com
|
75cc35602ae659ea024b658db136fe838acb3ec8
|
dae4ab4882080344e5f505def7e2e59e0ed888b4
|
/polyaxon/libs/unique_urls.py
|
9a1268f47539af0e3fffc4d92358250465c22ab1
|
[
"MPL-2.0"
] |
permissive
|
vfdev-5/polyaxon
|
8c3945604e8eaa25ba8b3a39ed0838d0b9f39a28
|
3e1511a993dc1a03e0a0827de0357f4adcc0015f
|
refs/heads/master
| 2021-07-09T22:27:23.272591
| 2018-11-01T23:44:44
| 2018-11-01T23:44:44
| 154,320,634
| 0
| 0
|
MIT
| 2018-10-23T12:01:34
| 2018-10-23T12:01:33
| null |
UTF-8
|
Python
| false
| false
| 1,467
|
py
|
def get_user_url(username):
return '/{}'.format(username)
def get_project_url(unique_name):
values = unique_name.split('.')
return '{}/{}'.format(get_user_url(values[0]), values[1])
def get_user_project_url(username, project_name):
return '{}/{}'.format(get_user_url(username), project_name)
def get_experiment_url(unique_name):
values = unique_name.split('.')
project_url = get_user_project_url(username=values[0], project_name=values[1])
return '{}/experiments/{}'.format(project_url, values[-1])
def get_experiment_health_url(unique_name):
experiment_url = get_experiment_url(unique_name=unique_name)
return '{}/_heartbeat'.format(experiment_url)
def get_experiment_group_url(unique_name):
values = unique_name.split('.')
project_url = get_user_project_url(username=values[0], project_name=values[1])
return '{}/groups/{}'.format(project_url, values[-1])
def get_job_url(unique_name):
values = unique_name.split('.')
project_url = get_user_project_url(username=values[0], project_name=values[1])
return '{}/jobs/{}'.format(project_url, values[-1])
def get_job_health_url(unique_name):
job_url = get_job_url(unique_name=unique_name)
return '{}/_heartbeat'.format(job_url)
def get_build_url(unique_name):
values = unique_name.split('.')
project_url = get_user_project_url(username=values[0], project_name=values[1])
return '{}/builds/{}'.format(project_url, values[-1])
|
[
"mouradmourafiq@gmail.com"
] |
mouradmourafiq@gmail.com
|
24624a2a8ec05dc33a29232d475a8b8132ad41cb
|
df3c1c48abfc3892c7c6d21df5ecbe738ead6b38
|
/app.py
|
dbca7180cf9a9b0d9781eb40ac8188ed9d7a4030
|
[] |
no_license
|
timshik/Store
|
881e489d385c7db84f8edf2600b3c22f09b1847c
|
087f831cf9326b4dfce2648584911e59758228ef
|
refs/heads/master
| 2022-04-15T06:07:15.863061
| 2020-04-14T13:00:00
| 2020-04-14T13:00:00
| 255,578,160
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 957
|
py
|
import os
from flask import Flask
from flask_restful import Api
from flask_jwt import JWT
from security import authenticate, identity
from resources.user import UserRegister
from resources.item import Item, ItemList
from db import db
from resources.store import Store, StoreList
db.init_app(app)
app = Flask(__name__)
app.config["SQLAlchemy_DATABASE_URI"] = os.environ.get("DATABASE_URL","sqlite:///data.db")
app.config["SQLAlchemy_TRACK_MODIFICATIONS"] = False
app.config['PROPAGATE_EXCEPTIONS'] = True
app.secret_key = 'jose'
api = Api(app)
@app.before_first_request
def create_tables():
db.create_all()
jwt = JWT(app, authenticate, identity)
api.add_resource(Store, "/store/<string:name>")
api.add_resource(StoreList, "/stores")
api.add_resource(Item, '/item/<string:name>')
api.add_resource(ItemList, '/items')
api.add_resource(UserRegister, '/register')
if __name__ == '__main__':
app.run(debug=True) # important to mention debug=True
|
[
"timshik@users.noreply.github.com"
] |
timshik@users.noreply.github.com
|
539846eac1b2f133d9cd8effb4190a5c233a6adb
|
1a5d7882b9e89b821851be328256211c65f9c1a2
|
/simple_settings/strategies/__init__.py
|
7d95f88acb1141987187d45f54d012d3e2e30de8
|
[
"MIT"
] |
permissive
|
matthewh/simple-settings
|
2644f3032e5fc7ffa50dc8fa164bf79f640e5641
|
dbddf8d5be7096ee7c4c3cc6d82824befa9b714f
|
refs/heads/master
| 2022-11-04T22:25:55.398073
| 2020-06-22T19:25:03
| 2020-06-22T19:25:03
| 274,223,776
| 0
| 0
|
MIT
| 2020-06-22T19:21:30
| 2020-06-22T19:21:29
| null |
UTF-8
|
Python
| false
| false
| 805
|
py
|
# -*- coding: utf-8 -*-
from .cfg import SettingsLoadStrategyCfg
from .environ import SettingsLoadStrategyEnviron
from .json_file import SettingsLoadStrategyJson
from .python import SettingsLoadStrategyPython
yaml_strategy = None
try:
from .yaml_file import SettingsLoadStrategyYaml
yaml_strategy = SettingsLoadStrategyYaml
except ImportError: # pragma: no cover
pass
toml_strategy = None
try:
from .toml_file import SettingsLoadStrategyToml
toml_strategy = SettingsLoadStrategyToml
except ImportError: # pragma: no cover
pass
strategies = (
SettingsLoadStrategyPython,
SettingsLoadStrategyCfg,
SettingsLoadStrategyJson,
SettingsLoadStrategyEnviron
)
if yaml_strategy:
strategies += (yaml_strategy,)
if toml_strategy:
strategies += (toml_strategy,)
|
[
"drgarcia1986@gmail.com"
] |
drgarcia1986@gmail.com
|
ed2954bdd2ec5424da580a3dbdf86056e9c9e612
|
dd1e2ed53fec3dca0fa60042c04ad8cf6019ed89
|
/python/functions/arguments_passed_as_dictionary/arguments_passed_as_dictionary.py
|
bd77e7ed569887e6547b03ab831fdd645d5f53b0
|
[] |
no_license
|
cloudavail/snippets
|
9be4ee285789ff3cff1a3a71e1f505a1b1697500
|
340f5c2735d6ec88b793f1eea91f2b026c24586e
|
refs/heads/main
| 2023-08-03T10:30:13.976947
| 2023-05-15T04:46:32
| 2023-05-15T04:46:32
| 12,838,293
| 22
| 24
| null | 2023-09-07T03:33:17
| 2013-09-15T00:40:49
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 668
|
py
|
#!/usr/bin/env python
# objective: pass arguments as dictionary
# creates the function "argument_catcher" and accepts the following keywords
def argument_catcher(city, population, size, state):
print 'city: {!s}'.format(city)
print 'state: {!s}'.format(state)
print 'population: {!s}'.format(population)
print 'size: {!s} miles'.format(size)
# creates the dictionary to be passed to the "argument_catcher" function
arguments_dict = {'city': 'San Francisco', 'population': 800000, 'size': 49,
'state': 'California'}
# calls the function "argument_catcher" with the previously created dictionary
argument_catcher(**arguments_dict)
|
[
"colinbjohnson@gmail.com"
] |
colinbjohnson@gmail.com
|
bf74ca17e3732d2678260260cb83b4e680409be7
|
239c573e6d5c478bfd848f813f9d3760db2a6ac4
|
/applyuni/Portal/views/universityportal.py
|
8807ec819c995213404da803604d9e29dbf6d642
|
[] |
no_license
|
Harsha2001-creater/project_2020
|
8b0acf11d02e825cf0b07afedd2b1e06021f855f
|
a784e620e208c112df2477ac20e955aa45d819fc
|
refs/heads/main
| 2023-03-05T05:40:03.807723
| 2021-02-19T10:20:20
| 2021-02-19T10:20:20
| 326,559,622
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,720
|
py
|
from django.shortcuts import render
from django.shortcuts import render,redirect,HttpResponseRedirect
from django.http import HttpResponse
from django.views import View
from Portal.models.newcourse import Newcourse
class Universityportal(View):
def get(self,request):
return render(request,'University_portal/newcourse.html')
def post(self,request):
Coursename=request.POST.get('coursename')
Coursetype=request.POST.get('coursetype')
Facultyname=request.POST.get('facultyname')
Courseapproval=request.POST.get('courseapproval')
Approvalauthority=request.POST.get('approval')
Tutionfee=request.POST.get('fee')
Amount1=request.POST.get('amount1')
Sem1=request.POST.get('sem1')
Sem2=request.POST.get('sem2')
Sem3=request.POST.get('sem3')
Sem4=request.POST.get('sem4')
Sem5=request.POST.get('sem5')
Sem6=request.POST.get('sem6')
Duration1=request.POST.get('duration1')
Noofsems=request.POST.get('noofsems')
Criteria1=request.POST.get('criteria1')
Criteria2=request.POST.get('criteria2')
Criteria3=request.POST.get('criteria3')
Universitymail=request.session['university_Email']
newcourse=Newcourse(Coursename=Coursename,Coursetype=Coursetype,Facultyname=Facultyname,Courseapproval=Courseapproval,
Approvalauthority=Approvalauthority,Tutionfee=Tutionfee,Amount1=Amount1,Sem1=Sem1,Sem2=Sem2,Sem3=Sem3,Sem4=Sem4,
Sem5=Sem5,Sem6=Sem6,Duration1=Duration1,Noofsems=Noofsems,Criteria1=Criteria1,Criteria2=Criteria2,Criteria3=Criteria3,Universitymail=Universitymail)
newcourse.register()
return redirect('Unisettings')
|
[
"niharikareddy561@gmail.com"
] |
niharikareddy561@gmail.com
|
94c209d3d25c989f349ccd38025fa4dd3e3dbd18
|
7f35d7d1b8f203217f47a615ca8efdb5e17976db
|
/algo/second/p693_binary_number_with_alternating_bits.py
|
1c70b23a02fcb9375c33a53430168b55fc331bdc
|
[] |
no_license
|
thinkreed/lc.py
|
767dd61f4c9454f09e66e48b2974b8d049d6e448
|
ba6b2500b86489cc34852ff73ba0915e57aa0275
|
refs/heads/master
| 2020-05-16T14:49:18.261246
| 2019-07-16T23:42:12
| 2019-07-16T23:42:12
| 183,113,318
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 196
|
py
|
class Solution(object):
def hasAlternatingBits(self, n):
"""
:type n: int
:rtype: bool
"""
a = n ^ (n / 2)
b = a + 1
return not (a & b)
|
[
"thinkreed2017@outlook.com"
] |
thinkreed2017@outlook.com
|
5a45bf84ab969517a17806532492d907662c8f93
|
844e0cd4ffbe1ead05b844508276f66cc20953d5
|
/test/testseqdiagbuilder.py
|
111bfcc7200d6457de50b6bc106cb2bee15747bd
|
[] |
no_license
|
Archanciel/cryptopricer
|
a256fa793bb1f2d65b5c032dd81a266ee5be79cc
|
00c0911fe1c25c1da635dbc9b26d45be608f0cc5
|
refs/heads/master
| 2022-06-29T13:13:22.435670
| 2022-05-11T20:37:43
| 2022-05-11T20:37:43
| 100,196,449
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 66,316
|
py
|
import unittest
import os, sys, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
sys.path.insert(0,currentdir) # this instruction is necessary for successful importation of utilityfortest module when
# the test is executed standalone
from testclasses.isolatedclass import IsolatedClass
from testclasses.subtestpackage.isolatedclasssub import IsolatedClassSub
from testclasses.isolatedclasswithinstancevariables import IsolatedClassWithInstanceVariables
from testclasses.foobarclasses import *
from testclasses.subtestpackage.dsub import DSub
from testclasses.subtestpackage.caller import Caller
class Client:
def do(self):
c1 = ChildOne()
c1.getCoordinate()
def make(self):
c1 = ChildOne()
c1.compute()
def perform(self):
c1 = ChildOne()
c1.computeTwo()
def doCall(self):
c1 = ChildOne()
c1.computeThree()
def doProcess(self):
c1 = ChildOfChildTwo()
c1.computeFour()
class Parent:
def getCoordinate(self, location=''):
'''
:param location:
:seqdiag_return Coord
:return:
'''
SeqDiagBuilder.recordFlow()
def getCoordinateNoneSelected(self, location=''):
'''
:param location:
:seqdiag_return Coord
:return:
'''
SeqDiagBuilder.recordFlow()
def compute(self, size = 0):
'''
This a dummy merhod.
:seqdiag_return Analysis
:return:
'''
pass
def computeTwo(self, size = 0):
'''
This a dummy merhod.
:seqdiag_select_method
:seqdiag_return Analysis
:return:
'''
pass
def computeThree(self, size = 0):
'''
This a dummy merhod.
:seqdiag_select_method
:seqdiag_return Analysis
:return:
'''
iso = IsolatedClass()
iso.analyse()
def computeFour(self, size = 0):
'''
This a dummy merhod.
:seqdiag_return Analysis
:return:
'''
pass
def inheritedMethod(self, inhArg):
'''
This a dummy merhod.
:seqdiag_return inhMethResult
:return:
'''
SeqDiagBuilder.recordFlow()
class ChildOne(Parent):
def getCoordinate(self, location=''):
iso = IsolatedClass()
iso.analyse()
def getCoordinateNoneSelected(self, location=''):
iso = IsolatedClass()
iso.analyse()
def m(self):
pass
def compute(self, size = 0):
'''
This a dummy merhod.
:seqdiag_select_method
:seqdiag_return Analysis
:return:
'''
super().compute(size)
iso = IsolatedClass()
iso.analyse()
def computeTwo(self, size = 0):
'''
This a dummy merhod.
:seqdiag_select_method
:seqdiag_return Analysis
:return:
'''
super().compute(size)
iso = IsolatedClass()
iso.analyse()
class ChildTwo(Parent):
def l(self):
pass
def computeFour(self, size = 0):
'''
This a dummy merhod.
:seqdiag_select_method
:seqdiag_return Analysis
:return:
'''
iso = IsolatedClass()
iso.analyse()
def getCoordinateNoneSelected(self, location=''):
SeqDiagBuilder.recordFlow()
class ChildThree(Parent):
def getCoordinate(self, location=''):
'''
:param location:
:seqdiag_return CoordSel
:seqdiag_select_method
:return:
'''
SeqDiagBuilder.recordFlow()
class ChildOfChildTwo(Parent):
def l(self):
pass
def computeFour(self, size = 0):
'''
This a dummy merhod.
:seqdiag_return Analysis
:return:
'''
iso = IsolatedClass()
iso.analyse()
class ClassA:
def doWork(self):
'''
:seqdiag_return ClassAdoWorkRes
:return:
'''
self.internalCall()
def internalCall(self):
'''
:seqdiag_return ResultPrice
:return:
'''
pr = self.internalInnerCall()
b = ClassB()
res = b.createRequest(1, 2)
def internalInnerCall(self):
'''
:seqdiag_return ResultPrice
:return:
'''
b = ClassB()
res = b.createInnerRequest(1)
def aMethod(self, aMarg):
'''
:seqdiag_return ResultAmeth
:return:
'''
child = ChildTwo()
child.inheritedMethod(aMarg)
class ClassB:
def createInnerRequest(self, parm1):
'''
:seqdiag_return Bool
:param parm1:
:return:
'''
SeqDiagBuilder.recordFlow()
def createRequest(self, parm1, parm2):
'''
:seqdiag_return Bool
:param parm1:
:return:
'''
SeqDiagBuilder.recordFlow()
class C:
def c1(self, c1_p1):
'''
:param c1_p1:
:seqdiag_return Cc1Return
:return:
'''
SeqDiagBuilder.recordFlow()
def c2(self, c2_p1):
'''
:param c2_p1:
:seqdiag_return Cc2Return
:return:
'''
d = DSub()
d.d1(1)
def c3(self, c3_p1):
'''
:param c3_p1:
:seqdiag_return Cc3Return
:return:
'''
d = DSub()
d.d2(1)
SeqDiagBuilder.recordFlow()
self.c4(1)
def c4(self, c4_p1):
'''
:param c4_p1:
:seqdiag_return Cc4Return
:return:
'''
d = DSub()
d.d2(1)
SeqDiagBuilder.recordFlow()
def c5(self, c5_p1):
'''
:param c5_p1:
:seqdiag_return Cc5Return
:return:
'''
d = DSub()
d.d3(1)
def fibonaci(self, number):
'''
:param number:
:seqdiag_return CfibonaciReturn
:return:
'''
if number == 1:
SeqDiagBuilder.recordFlow()
return 1
else:
return number + self.fibonaci(number - 1)
class B:
'''
:seqdiag_note Test class note for class B
'''
def b0(self, b1_p1):
'''
:param b1_p1:
:seqdiag_return Bb1Return
:return:
'''
pass
def b1(self, b1_p1):
'''
:param b1_p1:
:seqdiag_return Bb1Return
:return:
'''
SeqDiagBuilder.recordFlow()
def b2(self, b2_p1):
'''
:param b2_p1:
:seqdiag_return Bb2Return
:return:
'''
c = C()
c.c1(1)
def b3(self, b3_p1):
'''
:param b3_p1:
:seqdiag_return Bb3Return
:return:
'''
c = C()
c.c1(1)
c.c1(1)
def b4(self, b4_p1):
'''
:param b4_p1:
:seqdiag_return Bb4Return
:return:
'''
SeqDiagBuilder.recordFlow()
def b5(self, b5_p1):
'''
:param b5_p1:
:seqdiag_return Bb5Return
:return:
'''
SeqDiagBuilder.recordFlow()
def b6(self, b6_p1):
'''
:param b6_p1:
:seqdiag_return Bb6Return
:return:
'''
c = C()
c.c2(1)
def b7(self, b7_p1):
'''
:param b7_p1:
:seqdiag_return Bb7Return
:return:
'''
c = C()
c.c3(1)
SeqDiagBuilder.recordFlow()
d = DSub()
d.d2(1)
def b8(self, b8_p1):
'''
:param b8_p1:
:seqdiag_return Bb8Return
:return:
'''
c = C()
c.c5(1)
d = DSub()
d.d2(1)
class A:
'''
:seqdiag_note Test class note for class A
'''
def a0(self, a1_p1, a1_p2):
'''
:param a1_p1:
:param a1_p2:
:seqdiag_return Aa1Return
:return:
'''
pass
def a1(self, a1_p1, a1_p2):
'''
:param a1_p1:
:param a1_p2:
:seqdiag_return Aa1Return
:return:
'''
SeqDiagBuilder.recordFlow()
def a2(self, a2_p1):
'''
:param a2_p1:
:seqdiag_return Aa2Return
:return:
'''
b = B()
b.b1(1)
def a3(self, a3_p1):
'''
:param a3_p1:
:seqdiag_return Aa3Return
:return:
'''
b = B()
b.b2(1)
def a4(self, a4_p1):
'''
:param a4_p1:
:seqdiag_return Aa4Return
:return:
'''
b = B()
b.b1(1)
b.b1(1)
def a5(self, a5_p1):
'''
:param a5_p1:
:seqdiag_return Aa5Return
:return:
'''
b = B()
b.b1(1)
b.b1(1)
b.b1(1)
def a6(self, a6_p1):
'''
:param a6_p1:
:seqdiag_return Aa6Return
:return:
'''
b = B()
b.b2(1)
b.b2(1)
def a7(self, a7_p1):
'''
:param a7_p1:
:seqdiag_return Aa6Return
:return:
'''
b = B()
b.b3(1)
def a8(self, a8_p1, a8_p2):
'''
:param a8_p1:
:param a8_p2:
:seqdiag_return Aa8Return
:return:
'''
SeqDiagBuilder.recordFlow()
def a9(self, a9_p1):
'''
:param a9_p1:
:seqdiag_return Aa9Return
:return:
'''
SeqDiagBuilder.recordFlow()
def a10(self, a10_p1):
'''
:param a10_p1:
:seqdiag_return Aa10Return
:return:
'''
b = B()
b.b4(1)
b.b5(1)
def a11(self, a11_p1):
'''
:param a11_p1:
:seqdiag_return Aa11Return
:return:
'''
b = B()
b.b6(1)
b.b6(1)
def a12(self, a12_p1):
'''
:param a12_p1:
:seqdiag_return Aa12Return
:return:
'''
b = B()
b.b7(1)
b.b7(1)
SeqDiagBuilder.recordFlow()
def a13(self, a13_p1):
'''
:param a13_p1:
:seqdiag_return Aa13Return
:return:
'''
b = B()
b.b8(1)
b.b8(1)
class TestSeqDiagBuilder(unittest.TestCase):
def setUp(self):
SeqDiagBuilder.deactivate()
def testCreateSeqDiagCommandsOnSimplestCallWithoutRecordFlowCallInLeafMethod(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a0') # activate sequence diagram building
entryPoint.a0(1, 2)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<{}>, entryClass=<A>, entryMethod=<a0>, classArgDic=<None>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: False.</font></b>
<b><font color=red size=14> Specified entry point: A.a0 reached: False.</font></b>
endheader
actor USER
@enduml'''.format(parentdir), commands) # using format() instead og replace fails !
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnSimplestCall(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a1') # activate sequence diagram building
entryPoint.a1(1, 2)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
USER -> A: a1(a1_p1, a1_p2)
activate A
USER <-- A: return Aa1Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnSimplestCallNotPassingProjectDir(self):
entryPoint = A()
SeqDiagBuilder.activate(None, 'A', 'a1') # activate sequence diagram building
entryPoint.a1(1, 2)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<None>, entryClass=<A>, entryMethod=<a1>, classArgDic=<None>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: True.</font></b>
<b><font color=red size=14> Specified entry point: A.a1 reached: False.</font></b>
endheader
actor USER
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnSimplestCallPassingEmptyProjectDir(self):
entryPoint = A()
SeqDiagBuilder.activate('', 'A', 'a1') # activate sequence diagram building
entryPoint.a1(1, 2)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<>, entryClass=<A>, entryMethod=<a1>, classArgDic=<None>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: True.</font></b>
<b><font color=red size=14> Specified entry point: A.a1 reached: False.</font></b>
endheader
actor USER
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsTwoLevelCallTwoDiffMethods(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a10') # activate sequence diagram building
entryPoint.a10(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
USER -> A: a10(a10_p1)
activate A
A -> B: b4(b4_p1)
activate B
A <-- B: return Bb4Return
deactivate B
A -> B: b5(b5_p1)
activate B
A <-- B: return Bb5Return
deactivate B
USER <-- A: return Aa10Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnTwoLevelCall(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a2') # activate sequence diagram building
entryPoint.a2(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
USER -> A: a2(a2_p1)
activate A
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
USER <-- A: return Aa2Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnThreeLevelCallingMidLevelMethodTwice(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a6') # activate sequence diagram building
entryPoint.a6(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
USER -> A: a6(a6_p1)
activate A
A -> B: b2(b2_p1)
activate B
B -> C: c1(c1_p1)
activate C
B <-- C: return Cc1Return
deactivate C
A <-- B: return Bb2Return
deactivate B
A -> B: b2(b2_p1)
activate B
B -> C: c1(c1_p1)
activate C
B <-- C: return Cc1Return
deactivate C
A <-- B: return Bb2Return
deactivate B
USER <-- A: return Aa6Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnFiveLevelCallingSecondLevelMethodTwice(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a11') # activate sequence diagram building
entryPoint.a11(1)
commands = SeqDiagBuilder.createSeqDiaqCommands(actorName='USER', title='Sequence diagram title')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
title Sequence diagram title
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
participant DSub
/note over of DSub
Short note DSub
end note
USER -> A: a11(a11_p1)
activate A
A -> B: b6(b6_p1)
activate B
B -> C: c2(c2_p1)
activate C
C -> DSub: d1(d1_p1)
activate DSub
C <-- DSub: return Dd1Return
deactivate DSub
B <-- C: return Cc2Return
deactivate C
A <-- B: return Bb6Return
deactivate B
A -> B: b6(b6_p1)
activate B
B -> C: c2(c2_p1)
activate C
C -> DSub: d1(d1_p1)
activate DSub
C <-- DSub: return Dd1Return
deactivate DSub
B <-- C: return Cc2Return
deactivate C
A <-- B: return Bb6Return
deactivate B
USER <-- A: return Aa11Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnFiveLevelCallingSecondLevelMethodTwiceProjectPathUnixLike(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir.replace('\\','/'), 'A', 'a11') # activate sequence diagram building
entryPoint.a11(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
participant DSub
/note over of DSub
Short note DSub
end note
USER -> A: a11(a11_p1)
activate A
A -> B: b6(b6_p1)
activate B
B -> C: c2(c2_p1)
activate C
C -> DSub: d1(d1_p1)
activate DSub
C <-- DSub: return Dd1Return
deactivate DSub
B <-- C: return Cc2Return
deactivate C
A <-- B: return Bb6Return
deactivate B
A -> B: b6(b6_p1)
activate B
B -> C: c2(c2_p1)
activate C
C -> DSub: d1(d1_p1)
activate DSub
C <-- DSub: return Dd1Return
deactivate DSub
B <-- C: return Cc2Return
deactivate C
A <-- B: return Bb6Return
deactivate B
USER <-- A: return Aa11Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnFiveLevelCallingSecondLevelMethodTwiceWithRecordFlowInEveryMethod(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a12') # activate sequence diagram building
entryPoint.a12(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
participant DSub
/note over of DSub
Short note DSub
end note
USER -> A: a12(a12_p1)
activate A
A -> B: b7(b7_p1)
activate B
B -> C: c3(c3_p1)
activate C
C -> DSub: d2(d2_p1)
activate DSub
C <-- DSub: return Dd2Return
deactivate DSub
C -> C: c4(c4_p1)
activate C
C -> DSub: d2(d2_p1)
activate DSub
C <-- DSub: return Dd2Return
deactivate DSub
C <-- C: return Cc4Return
deactivate C
B <-- C: return Cc3Return
deactivate C
B -> DSub: d2(d2_p1)
activate DSub
B <-- DSub: return Dd2Return
deactivate DSub
A <-- B: return Bb7Return
deactivate B
A -> B: b7(b7_p1)
activate B
B -> C: c3(c3_p1)
activate C
C -> DSub: d2(d2_p1)
activate DSub
C <-- DSub: return Dd2Return
deactivate DSub
C -> C: c4(c4_p1)
activate C
C -> DSub: d2(d2_p1)
activate DSub
C <-- DSub: return Dd2Return
deactivate DSub
C <-- C: return Cc4Return
deactivate C
B <-- C: return Cc3Return
deactivate C
B -> DSub: d2(d2_p1)
activate DSub
B <-- DSub: return Dd2Return
deactivate DSub
A <-- B: return Bb7Return
deactivate B
USER <-- A: return Aa12Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnFiveLevelCallingSecondLevelMethodTwiceWithRecordFlowInOnePlaceOnlySpecifyingNoteLengthLimit(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a13') # activate sequence diagram building
entryPoint.a13(1)
commands = SeqDiagBuilder.createSeqDiaqCommands(actorName='USER', title=None, maxSigArgNum=None, maxSigCharLen=200, maxNoteCharLen=15)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for
class A
end note
participant B
/note over of B
Test class note for
class B
end note
participant DSub
/note over of DSub
Short note DSub
end note
USER -> A: a13(a13_p1)
activate A
A -> B: b8(b8_p1)
activate B
B -> DSub: d2(d2_p1)
activate DSub
B <-- DSub: return Dd2Return
deactivate DSub
A <-- B: return Bb8Return
deactivate B
A -> B: b8(b8_p1)
activate B
B -> DSub: d2(d2_p1)
activate DSub
B <-- DSub: return Dd2Return
deactivate DSub
A <-- B: return Bb8Return
deactivate B
USER <-- A: return Aa13Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnThreeLevelCallingLastLevelMethodTwice(self):
'''
Calling two level deep method which calls last Level method twice
:return:
'''
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a7') # activate sequence diagram building
entryPoint.a7(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
USER -> A: a7(a7_p1)
activate A
A -> B: b3(b3_p1)
activate B
B -> C: c1(c1_p1)
activate C
B <-- C: return Cc1Return
deactivate C
B -> C: c1(c1_p1)
activate C
B <-- C: return Cc1Return
deactivate C
A <-- B: return Bb3Return
deactivate B
USER <-- A: return Aa6Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnTwoLevelCallCallingMethodTwice(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a4') # activate sequence diagram building
entryPoint.a4(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
USER -> A: a4(a4_p1)
activate A
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
USER <-- A: return Aa4Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnTwoLevelCallCallingMethodThreeTimes(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a5') # activate sequence diagram building
entryPoint.a5(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
USER -> A: a5(a5_p1)
activate A
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
A -> B: b1(b1_p1)
activate B
A <-- B: return Bb1Return
deactivate B
USER <-- A: return Aa5Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnThreeLevelCall(self):
entryPoint = A()
SeqDiagBuilder.activate(parentdir, 'A', 'a3') # activate sequence diagram building
entryPoint.a3(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant A
/note over of A
Test class note for class A
end note
participant B
/note over of B
Test class note for class B
end note
participant C
USER -> A: a3(a3_p1)
activate A
A -> B: b2(b2_p1)
activate B
B -> C: c1(c1_p1)
activate C
B <-- C: return Cc1Return
deactivate C
A <-- B: return Bb2Return
deactivate B
USER <-- A: return Aa3Return
deactivate A
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def test_instanciateClassInitTwoArgs(self):
className = 'IsolatedClassWithInstanceVariables'
packageSpec = 'testclasses.'
moduleName = 'isolatedclasswithinstancevariables'
instance = SeqDiagBuilder._instanciateClass(className, packageSpec, moduleName)
self.assertIsInstance(instance, IsolatedClassWithInstanceVariables)
def test_instanciateClassInitNoArgs(self):
className = 'IsolatedClass'
packageSpec = 'testclasses.'
moduleName = 'isolatedclass'
instance = SeqDiagBuilder._instanciateClass(className, packageSpec, moduleName)
self.assertIsInstance(instance, IsolatedClass)
def test_instanciateClassInitNoArgsSubPackageSpec(self):
className = 'IsolatedClassSub'
packageSpec = 'testclasses.subtestpackage.'
moduleName = 'isolatedclasssub'
instance = SeqDiagBuilder._instanciateClass(className, packageSpec, moduleName)
self.assertIsInstance(instance, IsolatedClassSub)
def test_instanciateClassInitNoArgsEmptyPackageSpec(self):
className = 'Client'
packageSpec = ''
moduleName = 'testseqdiagbuilder'
instance = SeqDiagBuilder._instanciateClass(className, packageSpec, moduleName)
self.assertIsInstance(instance, Client)
def test_instanciateClassInitNoArgsEmptyPackageSpecClassInProjectRoot(self):
className = 'SeqDiagBuilder'
packageSpec = ''
moduleName = 'seqdiagbuilder'
instance = SeqDiagBuilder._instanciateClass(className, packageSpec, moduleName)
self.assertIsInstance(instance, SeqDiagBuilder)
def testRecordFlowWhereMulitpleClassesSupportSameMethodAndOneIsSelected(self):
entryPoint = ChildThree()
SeqDiagBuilder.activate(parentdir, 'ChildThree', 'getCoordinate') # activate sequence diagram building
entryPoint.getCoordinate()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant ChildThree
USER -> ChildThree: getCoordinate(location='')
activate ChildThree
USER <-- ChildThree: return CoordSel
deactivate ChildThree
@enduml''', commands)
SeqDiagBuilder.deactivate()
def testRecordFlowWhereMulitpleClassesSupportSameMethodAndOneIsSelectedInOtherClass(self):
entryPoint = ChildTwo()
SeqDiagBuilder.activate(parentdir, 'ChildTwo', 'getCoordinate') # activate sequence diagram building
entryPoint.getCoordinate()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<{}>, entryClass=<ChildTwo>, entryMethod=<getCoordinate>, classArgDic=<None>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: True.</font></b>
<b><font color=red size=14> Specified entry point: ChildTwo.getCoordinate reached: False.</font></b>
endheader
actor USER
@enduml'''.format(parentdir), commands) # using format() instead og replace fails !
SeqDiagBuilder.deactivate()
def testRecordFlowWhereMulitpleClassesSupportSameMethodAndNoneIsSelected(self):
entryPoint = ChildTwo()
SeqDiagBuilder.activate(parentdir, 'ChildTwo', 'getCoordinateNoneSelected') # activate sequence diagram building
entryPoint.getCoordinateNoneSelected()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=20> 1</font></b>
<b><font color=red size=14> More than one class ['Parent', 'ChildOne', 'ChildTwo', 'ChildThree', 'ChildOfChildTwo'] found in module testseqdiagbuilder do support method getCoordinateNoneSelected(location='').</font></b>
<b><font color=red size=14> Since Python provides no way to determine the exact target class, class Parent was chosen by default for building the sequence diagram.</font></b>
<b><font color=red size=14> To override this selection, put tag :seqdiag_select_method somewhere in the target method documentation or define every class of the hierarchy in its own file.</font></b>
<b><font color=red size=14> See help for more information.</font></b>
<b><font color=red size=20> 2</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<{}>, entryClass=<ChildTwo>, entryMethod=<getCoordinateNoneSelected>, classArgDic=<None>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: True.</font></b>
<b><font color=red size=14> Specified entry point: ChildTwo.getCoordinateNoneSelected reached: False.</font></b>
endheader
actor USER
@enduml'''.format(parentdir), commands) # using format() instead og replace fails !
SeqDiagBuilder.deactivate()
def testRecordFlowWhereMulitpleClassesSupportInheritedMethodAndNoneIsSelected(self):
entryPoint = ClassA()
SeqDiagBuilder.activate(parentdir, 'ClassA', 'aMethod') # activate sequence diagram building
entryPoint.aMethod(1)
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> More than one class ['Parent', 'ChildOne', 'ChildTwo', 'ChildThree', 'ChildOfChildTwo'] found in module testseqdiagbuilder do support method inheritedMethod(inhArg).</font></b>
<b><font color=red size=14> Since Python provides no way to determine the exact target class, class Parent was chosen by default for building the sequence diagram.</font></b>
<b><font color=red size=14> To override this selection, put tag :seqdiag_select_method somewhere in the target method documentation or define every class of the hierarchy in its own file.</font></b>
<b><font color=red size=14> See help for more information.</font></b>
endheader
actor USER
participant TestSeqDiagBuilder
participant ClassA
participant Parent
USER -> ClassA: aMethod(aMarg)
activate ClassA
ClassA -> Parent: inheritedMethod(inhArg)
activate Parent
ClassA <-- Parent: return inhMethResult
deactivate Parent
USER <-- ClassA: return ResultAmeth
deactivate ClassA
@enduml''', commands)
SeqDiagBuilder.deactivate()
def testCreateSeqDiagCommandsOnFullRequestHistoDayPrice(self):
'''
Generates a sequence diagram on a typical CryptoPricer request.
:return:
'''
if not 'CryptoPricer' in parentdir:
return
from datetimeutil import DateTimeUtil
from utilityfortest import UtilityForTest
from pricerequester import PriceRequester
from configurationmanager import ConfigurationManager
from gui.guioutputformatter import GuiOutputFormatter
from controller import Controller
SeqDiagBuilder.activate(parentdir, 'Controller', 'getPrintableResultForInput') # activate sequence diagram building
if os.name == 'posix':
FILE_PATH = '/sdcard/cryptopricer.ini'
else:
FILE_PATH = 'c:\\temp\\cryptopricer.ini'
configMgr = ConfigurationManager(FILE_PATH)
self.controller = Controller(GuiOutputFormatter(configMgr), configMgr, PriceRequester())
timezoneStr = 'Europe/Zurich'
now = DateTimeUtil.localNow(timezoneStr)
eightDaysBeforeArrowDate = now.shift(days=-8)
if eightDaysBeforeArrowDate.year < now.year:
print('{} skipped due to current date {}'.format('testControllerRTThenHistoMinuteThenRThenNewUnit()', now))
SeqDiagBuilder.deactivate()
return
eightDaysBeforeYearStr, eightDaysBeforeMonthStr, eightDaysBeforeDayStr, eightDaysBeforeHourStr, eightDaysBeforeMinuteStr = UtilityForTest.getFormattedDateTimeComponentsForArrowDateTimeObj(eightDaysBeforeArrowDate)
requestYearStr = eightDaysBeforeYearStr
requestDayStr = eightDaysBeforeDayStr
requestMonthStr = eightDaysBeforeMonthStr
inputStr = 'eth btc {}/{} all'.format(requestDayStr, requestMonthStr)
printResult, fullCommandStr, fullCommandStrWithOptions, fullCommandStrWithSaveModeOptions, fullCommandStrForStatusBar = self.controller.getPrintableResultForInput(
inputStr)
if DateTimeUtil.isDateOlderThan(eightDaysBeforeArrowDate, 7):
hourStr = '00'
minuteStr = '00'
priceType = 'C'
else:
hourStr = eightDaysBeforeHourStr
minuteStr = eightDaysBeforeMinuteStr
priceType = 'M'
self.assertEqual(
'ETH/BTC on AVG: ' + '{}/{}/{} {}:{}{}'.format(requestDayStr, requestMonthStr, requestYearStr, hourStr, minuteStr, priceType),
UtilityForTest.removeOneEndPriceFromResult(printResult))
self.assertEqual('eth btc {}/{}/{} {}:{} all'.format(requestDayStr, requestMonthStr, requestYearStr, hourStr, minuteStr), fullCommandStr)
self.assertEqual(None, fullCommandStrWithSaveModeOptions)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
commands = SeqDiagBuilder.createSeqDiaqCommands('GUI')
with open("c:\\temp\\sqCryptoPricerFullSig.txt","w") as f:
f.write(commands)
SeqDiagBuilder.deactivate()
self.assertEqual(
'''@startuml
actor GUI
participant Controller
/note over of Controller
Client in the GOF Command pattern. Entry point of the business layer. Instanciates the business layer classes.
end note
participant Requester
/note over of Requester
Parses the user requests, storing the request parms into the the appropriate Command.
end note
participant CommandPrice
/note over of CommandPrice
Command in the GOF Command pattern. Stores all the request parms parsed by the Requester. Stores aswell the currently active request parms which will
be used in case of partial request. Validates part of the request elements and computes the int request date/time components. Calls the receiver, i.e.
the Processor, passing to it the required request parms.
end note
participant Processor
/note over of Processor
Receiver in the GOF Command pattern. Validates and obtains real exchange name for crypto/unit and unit/fiat pairs. Determines if RT or historical
price must be asked to the PriceRequester. After getting the price, computes the fiat (-f) and value (-v) option values and add them to the returned
ResultData. In case a crypto/unit or a fiat/unit pair is not supported by the pair exchange, try to obtain a unit/crypto, respectively a unit/fiat
pair price.
end note
participant PriceRequester
/note over of PriceRequester
Obtains the RT or historical rates from the cryptocompare.com web site. For historical rates, determines if a minute or close rate is to be obtained.
end note
participant GuiOutputFormatter
/note over of GuiOutputFormatter
Formats the result data printed to the output zone of the application and to the status bar.
end note
GUI -> Controller: getPrintableResultForInput(inputStr)
activate Controller
Controller -> Requester: getCommand(inputStr)
activate Requester
Requester -> Requester: _parseAndFillCommandPrice(inputStr)
activate Requester
Requester -> Requester: _buildFullCommandPriceOrderFreeParmsDic(orderFreeParmList)
activate Requester
Requester <-- Requester: return optionalParsedParmDataDic
deactivate Requester
Requester <-- Requester: return CommandPrice or CommandError
deactivate Requester
Controller <-- Requester: return AbstractCommand
deactivate Requester
note right
May return a CommandError in case of parsing problem.
end note
Controller -> CommandPrice: execute()
activate CommandPrice
CommandPrice -> Processor: getCryptoPrice(crypto, unit, exchange, day, month, year, hour, minute, optionValueSymbol=None, ...)
activate Processor
Processor -> Processor: _getPrice(currency, targetCurrency, exchange, year, month, day, hour, minute, dateTimeFormat, localTz, ...)
activate Processor
Processor -> PriceRequester: getHistoricalPriceAtUTCTimeStamp(crypto, unit, timeStampLocalForHistoMinute, localTz, timeStampUTCNoHHMMForHistoDay, exchange)
activate PriceRequester
note right
Obtainins a minute price if request date < 7 days from now, else a day close price.
end note
PriceRequester -> PriceRequester: _getHistoDayPriceAtUTCTimeStamp(crypto, unit, timeStampUTC, exchange, resultData)
activate PriceRequester
PriceRequester <-- PriceRequester: return ResultData
deactivate PriceRequester
Processor <-- PriceRequester: return ResultData
deactivate PriceRequester
Processor <-- Processor: return ResultData
deactivate Processor
CommandPrice <-- Processor: return ResultData
deactivate Processor
Controller <-- CommandPrice: return ResultData or False
deactivate CommandPrice
Controller -> GuiOutputFormatter: getFullCommandString(resultData)
activate GuiOutputFormatter
GuiOutputFormatter -> GuiOutputFormatter: _buildFullDateAndTimeStrings(commandDic, timezoneStr)
activate GuiOutputFormatter
GuiOutputFormatter <-- GuiOutputFormatter: return requestDateDMY, requestDateHM
deactivate GuiOutputFormatter
Controller <-- GuiOutputFormatter: return printResult, fullCommandStrNoOptions, fullCommandStrWithNoSaveOptions, ...
deactivate GuiOutputFormatter
GUI <-- Controller: return printResult, fullCommandStrNoOptions, fullCommandStrWithNoSaveOptions, ...
deactivate Controller
@enduml''', commands)
def testCreateSeqDiagCommandsOnFullRequestHistoDayPriceWithSignatureLimitation(self):
'''
Generates a sequence diagram on a typical CryptoPricer request
with specifying a maximum size for the method signatures.
:return:
'''
if not 'CryptoPricer' in parentdir:
return
from datetimeutil import DateTimeUtil
from utilityfortest import UtilityForTest
from pricerequester import PriceRequester
from configurationmanager import ConfigurationManager
from gui.guioutputformatter import GuiOutputFormatter
from controller import Controller
SeqDiagBuilder.activate(parentdir, 'Controller', 'getPrintableResultForInput') # activate sequence diagram building
if os.name == 'posix':
FILE_PATH = '/sdcard/cryptopricer.ini'
else:
FILE_PATH = 'c:\\temp\\cryptopricer.ini'
configMgr = ConfigurationManager(FILE_PATH)
self.controller = Controller(GuiOutputFormatter(configMgr), configMgr, PriceRequester())
timezoneStr = 'Europe/Zurich'
now = DateTimeUtil.localNow(timezoneStr)
eightDaysBeforeArrowDate = now.shift(days=-8)
eightDaysBeforeYearStr, eightDaysBeforeMonthStr, eightDaysBeforeDayStr, eightDaysBeforeHourStr, eightDaysBeforeMinuteStr = UtilityForTest.getFormattedDateTimeComponentsForArrowDateTimeObj(
eightDaysBeforeArrowDate)
requestYearStr = eightDaysBeforeYearStr
requestDayStr = eightDaysBeforeDayStr
requestMonthStr = eightDaysBeforeMonthStr
inputStr = 'btc usd {}/{} all'.format(requestDayStr, requestMonthStr)
printResult, fullCommandStr, fullCommandStrWithOptions, fullCommandStrWithSaveModeOptions, fullCommandStrForStatusBar = self.controller.getPrintableResultForInput(
inputStr)
commands = SeqDiagBuilder.createSeqDiaqCommands(actorName='GUI', title='CryptoPricer sequence diagram', maxSigArgNum=None, maxSigCharLen=20, maxNoteCharLen=20)
plantUmlOutputDir = "c:\\temp\\"
plantUmlOutputFileName = 'sqCryptoPricerShortSig.txt'
plantUmlOutputFilePathName = plantUmlOutputDir + plantUmlOutputFileName
with open(plantUmlOutputFilePathName, "w") as f:
f.write(commands)
try:
self.assertEqual(
'''@startuml
title CryptoPricer sequence diagram
actor GUI
participant Controller
/note over of Controller
Client in the GOF Command
pattern. Entry point of the
business layer. Instanciates
the business layer classes.
end note
participant Requester
/note over of Requester
Parses the user requests,
storing the request parms into
the the appropriate Command.
end note
participant CommandPrice
/note over of CommandPrice
Command in the GOF Command
pattern. Stores all the
request parms parsed by the
Requester. Stores aswell the
currently active request parms
which will be used in case of
partial request. Validates
part of the request elements
and computes the int request
date/time components. Calls
the receiver, i.e. the
Processor, passing to it the
required request parms.
end note
participant Processor
/note over of Processor
Receiver in the GOF Command
pattern. Validates and obtains
real exchange name for
crypto/unit and unit/fiat
pairs. Determines if RT or
historical price must be asked
to the PriceRequester. After
getting the price, computes
the fiat (-f) and value (-v)
option values and add them to
the returned ResultData. In
case a crypto/unit or a
fiat/unit pair is not
supported by the pair
exchange, try to obtain a
unit/crypto, respectively a
unit/fiat pair price.
end note
participant PriceRequester
/note over of PriceRequester
Obtains the RT or historical
rates from the
cryptocompare.com web site.
For historical rates,
determines if a minute or
close rate is to be obtained.
end note
participant GuiOutputFormatter
/note over of GuiOutputFormatter
Formats the result data
printed to the output zone of
the application and to the
status bar.
end note
GUI -> Controller: getPrintableResultForInput(inputStr)
activate Controller
Controller -> Requester: getCommand(inputStr)
activate Requester
Requester -> Requester: _parseAndFillCommandPrice(inputStr)
activate Requester
Requester -> Requester: _buildFullCommandPriceOrderFreeParmsDic(orderFreeParmList)
activate Requester
Requester <-- Requester: return ...
deactivate Requester
Requester <-- Requester: return ...
deactivate Requester
Controller <-- Requester: return AbstractCommand
deactivate Requester
note right
May return a CommandError in
case of parsing problem.
end note
Controller -> CommandPrice: execute()
activate CommandPrice
CommandPrice -> Processor: getCryptoPrice(crypto, unit, ...)
activate Processor
Processor -> Processor: _getPrice(currency, ...)
activate Processor
Processor -> PriceRequester: getHistoricalPriceAtUTCTimeStamp(crypto, unit, ...)
activate PriceRequester
note right
Obtainins a minute price if
request date < 7 days from
now, else a day close price.
end note
PriceRequester -> PriceRequester: _getHistoDayPriceAtUTCTimeStamp(crypto, unit, ...)
activate PriceRequester
PriceRequester <-- PriceRequester: return ResultData
deactivate PriceRequester
Processor <-- PriceRequester: return ResultData
deactivate PriceRequester
Processor <-- Processor: return ResultData
deactivate Processor
CommandPrice <-- Processor: return ResultData
deactivate Processor
Controller <-- CommandPrice: return ResultData or False
deactivate CommandPrice
Controller -> GuiOutputFormatter: getFullCommandString(resultData)
activate GuiOutputFormatter
GuiOutputFormatter -> GuiOutputFormatter: _buildFullDateAndTimeStrings(commandDic, ...)
activate GuiOutputFormatter
GuiOutputFormatter <-- GuiOutputFormatter: return requestDateDMY, ...
deactivate GuiOutputFormatter
Controller <-- GuiOutputFormatter: return printResult, ...
deactivate GuiOutputFormatter
GUI <-- Controller: return printResult, ...
deactivate Controller
@enduml''' \
, commands)
except TypeError as e:
print(e)
pass
SeqDiagBuilder.deactivate()
print('In order to generate the sequence diagram image file, open a command window on {}\nand execute the command java -jar plantuml.jar -tsvg {}\n'.format(plantUmlOutputDir, plantUmlOutputFileName))
def testCreateSeqDiagCommandsOnClassesWithEmbededSelfCalls(self):
entryPoint = ClassA()
SeqDiagBuilder.activate(parentdir,'ClassA', 'doWork') # activate sequence diagram building
entryPoint.doWork()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt","w") as f:
f.write(commands)
self.assertEqual(
'''@startuml
actor USER
participant TestSeqDiagBuilder
participant ClassA
participant ClassB
USER -> ClassA: doWork()
activate ClassA
ClassA -> ClassA: internalCall()
activate ClassA
ClassA -> ClassA: internalInnerCall()
activate ClassA
ClassA -> ClassB: createInnerRequest(parm1)
activate ClassB
ClassA <-- ClassB: return Bool
deactivate ClassB
ClassA <-- ClassA: return ResultPrice
deactivate ClassA
ClassA -> ClassB: createRequest(parm1, parm2)
activate ClassB
ClassA <-- ClassB: return Bool
deactivate ClassB
ClassA <-- ClassA: return ResultPrice
deactivate ClassA
USER <-- ClassA: return ClassAdoWorkRes
deactivate ClassA
@enduml''', commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsWithoutActivatingSeqDiagBuilder(self):
entryPoint = ClassA()
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
entryPoint.doWork()
SeqDiagBuilder.createSeqDiaqCommands('USER')
warningList = SeqDiagBuilder.getWarningList()
self.assertEqual(len(warningList), 1)
self.assertEqual('No control flow recorded.\nMethod activate() called: False.\nMethod recordFlow() called: True.\nSpecified entry point: None.None reached: False.', warningList[0])
def testCreateSeqDiagCommandsOnClassLocatedInPackage(self):
entryPoint = IsolatedClass()
SeqDiagBuilder.activate(parentdir, 'IsolatedClass', 'analyse') # activate sequence diagram building
entryPoint.analyse()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant IsolatedClass
USER -> IsolatedClass: analyse()
activate IsolatedClass
USER <-- IsolatedClass: return Analysis
deactivate IsolatedClass
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCreateSeqDiagCommandsOnClassLocatedInSubPackage(self):
entryPoint = IsolatedClassSub()
SeqDiagBuilder.activate(parentdir, 'IsolatedClassSub', 'analyse') # activate sequence diagram building
entryPoint.analyse()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant IsolatedClassSub
USER -> IsolatedClassSub: analyse()
activate IsolatedClassSub
USER <-- IsolatedClassSub: return Analysis
deactivate IsolatedClassSub
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructorParmWithoutPassingClassArgsDic(self):
entryPoint = Caller()
SeqDiagBuilder.activate(parentdir, 'Caller', 'call') # activate sequence diagram building
entryPoint.call()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> ERROR - constructor for class FileReader in module testclasses.subtestpackage.filereader failed due to invalid argument(s).</font></b>
<b><font color=red size=14> To solve the problem, pass a class argument dictionary with an entry for FileReader to the SeqDiagBuilder.activate() method.</font></b>
endheader
actor USER
participant Caller
USER -> Caller: call()
activate Caller
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDic(self):
entryPoint = Caller()
classArgDic = {'FileReader': ['testfile.txt']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'call', classArgDic) # activate sequence diagram building
entryPoint.call()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant Caller
participant FileReader
USER -> Caller: call()
activate Caller
Caller -> FileReader: getContentAsList()
activate FileReader
Caller <-- FileReader:
deactivate FileReader
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDicWithTwoEntries(self):
'''
Test case where the flow requires to instanciate the same class (FileReader) twice with
different values passed to the ctor at each instanciation.
'''
entryPoint = Caller()
classArgDic = {'FileReader_1': ['testfile.txt'], 'FileReader_2': ['testfile2.txt']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'callUsingTwoFileReaders',
classArgDic) # activate sequence diagram building
entryPoint.callUsingTwoFileReaders()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant Caller
participant FileReader
USER -> Caller: callUsingTwoFileReaders()
activate Caller
Caller -> FileReader: getContentAsList()
activate FileReader
Caller <-- FileReader:
deactivate FileReader
Caller -> FileReader: getContentAsList()
activate FileReader
Caller <-- FileReader:
deactivate FileReader
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDicWithOneEntry(self):
'''
Test case where the flow requires to instanciate the same class (FileReader) twice with
the same value passed to the ctor at each instanciation.
'''
entryPoint = Caller()
classArgDic = {'FileReader': ['testfile.txt']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'callUsingTwoFileReaders',
classArgDic) # activate sequence diagram building
entryPoint.callUsingTwoFileReaders()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant Caller
participant FileReader
USER -> Caller: callUsingTwoFileReaders()
activate Caller
Caller -> FileReader: getContentAsList()
activate FileReader
Caller <-- FileReader:
deactivate FileReader
Caller -> FileReader: getContentAsList()
activate FileReader
Caller <-- FileReader:
deactivate FileReader
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDicWithOneEntryOneBooleanArg(self):
'''
Test case where the flow requires to instanciate a class (FileReaderSupportingVerboseMode) whose ctor
requires a string and a boolean value. To handle this situation, a class ctor arg dictionary
must be passed to the SeqDiagBuilder.activate() method.
'''
entryPoint = Caller()
classArgDic = {'FileReaderSupportingVerboseMode': ['testfile.txt', False]}
SeqDiagBuilder.activate(parentdir, 'Caller', 'callUsingVerboseFileReader',
classArgDic) # activate sequence diagram building
entryPoint.callUsingVerboseFileReader()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant Caller
participant FileReaderSupportingVerboseMode
USER -> Caller: callUsingVerboseFileReader()
activate Caller
Caller -> FileReaderSupportingVerboseMode: getContentAsList()
activate FileReaderSupportingVerboseMode
Caller <-- FileReaderSupportingVerboseMode:
deactivate FileReaderSupportingVerboseMode
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructorParmWithPassingClassArgsDicWithOneEntryOneBooleanArgCallSuperClassMethod(self):
'''
Test case where the flow requires to instanciate a class (FileReaderSupportingVerboseMode) whose ctor
requires a string and a boolean value. To handle this situation, a class ctor arg dictionary
must be passed to the SeqDiagBuilder.activate() method. But here, since the method
FileReaderSupportingVerboseMode.getContentAsListFromSuper() calls a method of its parent
class, the class ctor arg dictionary must also contain an entry for the parent class (FileReader)
since its ctor __init__ method also requires an argument (a file name) !
'''
entryPoint = Caller()
# this is the argument dictionary which should be defined for successful sequence
# diagram generation:
#classArgDic = {'FileReaderSupportingVerboseMode': ['testfile.txt', False],
# 'FileReader': ['testfile.txt']}
# but we forget to add an entry for the FileReader base class ctor in order
# to ensure a correct warning will be added to the generated sequence diagram
classArgDic = {'FileReaderSupportingVerboseMode': ['testfile.txt', False]}
SeqDiagBuilder.activate(parentdir, 'Caller', 'callUsingVerboseFileReaderWithCallToSuper',
classArgDic) # activate sequence diagram building
entryPoint.callUsingVerboseFileReaderWithCallToSuper()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> ERROR - constructor for class FileReader in module testclasses.subtestpackage.filereader failed due to invalid argument(s).</font></b>
<b><font color=red size=14> To solve the problem, pass a class argument dictionary with an entry for FileReader to the SeqDiagBuilder.activate() method.</font></b>
endheader
actor USER
participant Caller
participant FileReaderSupportingVerboseMode
USER -> Caller: callUsingVerboseFileReaderWithCallToSuper()
activate Caller
Caller -> FileReaderSupportingVerboseMode: getContentAsListFromSuper()
activate FileReaderSupportingVerboseMode
Caller <-- FileReaderSupportingVerboseMode:
deactivate FileReaderSupportingVerboseMode
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDicWithOneEntryOneBooleanArgCallSuperClassMethodEntryAddedForParentClass(
self):
'''
Test case where the flow requires to instanciate a class (FileReaderSupportingVerboseMode) whose ctor
requires a string and a boolean value. To handle this situation, a class ctor arg dictionary
must be passed to the SeqDiagBuilder.activate() method. But here, since the method
FileReaderSupportingVerboseMode.getContentAsListFromSuper() calls a method of its parent
class, the class ctor arg dictionary must also contain an entry for the parent class since
its ctor __init__ method also requires arguments. In this tst case, this requirement has
been satisfied !
'''
entryPoint = Caller()
classArgDic = {'FileReaderSupportingVerboseMode': ['testfile.txt', False], 'FileReader': ['testfile.txt']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'callUsingVerboseFileReaderWithCallToSuper',
classArgDic) # activate sequence diagram building
entryPoint.callUsingVerboseFileReaderWithCallToSuper()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 0)
self.assertEqual(
'''@startuml
actor USER
participant Caller
participant FileReaderSupportingVerboseMode
participant FileReader
USER -> Caller: callUsingVerboseFileReaderWithCallToSuper()
activate Caller
Caller -> FileReaderSupportingVerboseMode: getContentAsListFromSuper()
activate FileReaderSupportingVerboseMode
FileReaderSupportingVerboseMode -> FileReader: getContentAsList()
activate FileReader
FileReaderSupportingVerboseMode <-- FileReader:
deactivate FileReader
Caller <-- FileReaderSupportingVerboseMode:
deactivate FileReaderSupportingVerboseMode
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingClassArgsDicWithTwoEntriesSpecifyingWrongMethodName(self):
'''
Test case where the flow requires to instanciate the same class (FileReader) twice with
different values passed to the ctor at each instanciation. But here, we do not specify
the right method for the SeqDiagBuilder.activate() method: 'callUsingTwoFileReaders'
should be specified, not 'call' !
:return:
'''
entryPoint = Caller()
classArgDic = {'FileReader_1': ['testfile.txt'], 'FileReader_2': ['testfile2.txt']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'call',
classArgDic) # activate sequence diagram building
entryPoint.callUsingTwoFileReaders()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
"""@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> No control flow recorded.</font></b>
<b><font color=red size=14> Method activate() called with arguments projectPath=<{}>, entryClass=<Caller>, entryMethod=<call>, classArgDic=<{{'FileReader_1': ['testfile.txt'], 'FileReader_2': ['testfile2.txt']}}>: True.</font></b>
<b><font color=red size=14> Method recordFlow() called: True.</font></b>
<b><font color=red size=14> Specified entry point: Caller.call reached: False.</font></b>
endheader
actor USER
@enduml""".format(parentdir), commands) # using format() instead og replace fails !
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
def testCallingMethodOnClassRequiringNonNoneConstructotParmWithPassingInvalidClassArgsDic(self):
entryPoint = Caller()
classArgDic = {'FileReader': ['testfile.txt', 'inval arg']}
SeqDiagBuilder.activate(parentdir, 'Caller', 'call', classArgDic) # activate sequence diagram building
entryPoint.call()
commands = SeqDiagBuilder.createSeqDiaqCommands('USER')
with open("c:\\temp\\ess.txt", "w") as f:
f.write(commands)
self.assertEqual(len(SeqDiagBuilder.getWarningList()), 1)
self.assertEqual(
'''@startuml
center header
<b><font color=red size=20> Warnings</font></b>
<b><font color=red size=14> ERROR - constructor for class FileReader in module testclasses.subtestpackage.filereader failed due to invalid argument(s) (['testfile.txt', 'inval arg']) defined in the class argument dictionary passed to the SeqDiagBuilder.activate() method.</font></b>
endheader
actor USER
participant Caller
USER -> Caller: call()
activate Caller
USER <-- Caller:
deactivate Caller
@enduml''', commands)
SeqDiagBuilder.deactivate() # deactivate sequence diagram building
if __name__ == '__main__':
unittest.main()
|
[
"jp.schnyder@gmail.com"
] |
jp.schnyder@gmail.com
|
eac1ab541e86524ea94c08d6969cdedaa9046529
|
cf8e42cc0846e07c391fad400265ae7579299e7e
|
/App/musixscore/views.py
|
5f55a02212bbe6cf3c828f87d5edfddce58ec5e6
|
[] |
no_license
|
breezyrush/group1
|
0e39057665d6cda942af5e874fe55873f738df52
|
b9b259ad95c56512533f75101e77d35e7f6905c8
|
refs/heads/master
| 2021-01-21T14:01:36.752975
| 2015-12-17T13:50:57
| 2015-12-17T13:50:57
| 49,046,435
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,317
|
py
|
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from .models import *
def home(request):
return HttpResponseRedirect(reverse('browse'))
def clear(request):
cart = Cart.objects.all()
for c in cart:
c.delete()
return HttpResponseRedirect(reverse('browse'))
def browse(request):
genre = request.GET.get('genre', '')
performer = request.GET.get('performer', '')
cd = request.GET.get('cd','')
if request.method == 'POST':
songs = request.POST.getlist('songs')
for song in songs:
song = Song.objects.get(pk=song)
i = Cart(song=song)
i.save()
genre_list = Genre.objects.all()
performer_list = []
cd_list = []
songs_list = []
if not genre == '':
performer_list = Performer.objects.all().filter(genre=genre)
if not genre == '' and not performer == '':
cd_list = CD.objects.all().filter(performer=performer)
if not genre == '' and not performer == '' and not cd == '':
songs_list = Song.objects.all().filter(cd=cd)
cart = Cart.objects.all()
return render(request, 'browse/browse.html', {
'genre_list': genre_list,
'performer_list' : performer_list,
'cd_list': cd_list,
'songs_list': songs_list,
'genre' : genre,
'performer' : performer,
'cd': cd,
'cart': cart,
})
|
[
"roselle.ebarle04@gmail.com"
] |
roselle.ebarle04@gmail.com
|
d3ef66b13c17f8fe1ee580b188cfbdc448362ae2
|
8a2736b2f6ff848d0296aaf64f615ffab10d657d
|
/b_NaiveBayes/Original/Basic.py
|
c43274031e68abacbf14c82fc4271fc557f866f9
|
[] |
no_license
|
amorfortune/MachineLearning
|
4d73edee44941da517f19ff0947dfcc2aab80bb1
|
1923557870002e1331306f651ad7fc7a1c1c1344
|
refs/heads/master
| 2021-01-09T06:02:56.852816
| 2017-02-03T07:22:22
| 2017-02-03T07:22:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,477
|
py
|
import numpy as np
from math import pi, exp
sqrt_pi = (2 * pi) ** 0.5
class NBFunctions:
@staticmethod
def gaussian(x, mu, sigma):
return exp(-(x - mu) ** 2 / (2 * sigma ** 2)) / (sqrt_pi * sigma)
@staticmethod
def gaussian_maximum_likelihood(labelled_x, n_category, dim):
mu = [np.sum(
labelled_x[c][dim]) / len(labelled_x[c][dim]) for c in range(n_category)]
sigma = [np.sum(
(labelled_x[c][dim] - mu[c]) ** 2) / len(labelled_x[c][dim]) for c in range(n_category)]
def func(_c):
def sub(_x):
return NBFunctions.gaussian(_x, mu[_c], sigma[_c])
return sub
return [func(_c=c) for c in range(n_category)]
class NaiveBayes:
def __init__(self):
self._x = self._y = None
self._data = self._func = None
self._n_possibilities = None
self._labelled_x = self._label_zip = None
self._cat_counter = self._con_counter = None
self.label_dic = self._feat_dics = None
def __getitem__(self, item):
if isinstance(item, str):
return getattr(self, "_" + item)
def feed_data(self, x, y, sample_weights=None):
pass
def feed_sample_weights(self, sample_weights=None):
pass
def get_prior_probability(self, lb=1):
return [(_c_num + lb) / (len(self._y) + lb * len(self._cat_counter))
for _c_num in self._cat_counter]
def fit(self, x=None, y=None, sample_weights=None, lb=1):
if x is not None and y is not None:
self.feed_data(x, y, sample_weights)
self._func = self._fit(lb)
def _fit(self, lb):
pass
def predict_one(self, x, get_raw_result=False):
if isinstance(x, np.ndarray):
x = x.tolist()
else:
x = x[:]
x = self._transfer_x(x)
m_arg, m_probability = 0, 0
for i in range(len(self._cat_counter)):
p = self._func(x, i)
if p > m_probability:
m_arg, m_probability = i, p
if not get_raw_result:
return self.label_dic[m_arg]
return m_probability
def predict(self, x, get_raw_result=False):
return np.array([self.predict_one(xx, get_raw_result) for xx in x])
def estimate(self, x, y):
y_pred = self.predict(x)
print("Acc: {:12.6} %".format(100 * np.sum(y_pred == y) / len(y)))
def _transfer_x(self, x):
return x
|
[
"syameimaru_kurumi@pku.edu.cn"
] |
syameimaru_kurumi@pku.edu.cn
|
9f5e24ce50922fcb8e5c47d264958237365389b8
|
8540cb55d0e353f89e7656beae6b60f9db5b2bc6
|
/COURSE 2 Improving Deep Neural Networks_Hyperparameter tuning, Regularization and Optimization/Optimization+methods.py
|
a98da16ee119ab200ebbf26637279ce020e19d63
|
[] |
no_license
|
zzw95/deeplearning.ai
|
2c6687481bbde423f52223462d7124d20e6b50d8
|
6fcdb44f0c2aa685a9a0e9acc85be8cbd81d65b3
|
refs/heads/master
| 2018-12-09T22:22:20.534618
| 2018-09-12T03:03:55
| 2018-09-12T03:03:55
| 119,778,333
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 43,010
|
py
|
# coding: utf-8
# # Optimization Methods
#
# Until now, you've always used Gradient Descent to update the parameters and minimize the cost. In this notebook, you will learn more advanced optimization methods that can speed up learning and perhaps even get you to a better final value for the cost function. Having a good optimization algorithm can be the difference between waiting days vs. just a few hours to get a good result.
#
# Gradient descent goes "downhill" on a cost function $J$. Think of it as trying to do this:
# <img src="images/cost.jpg" style="width:650px;height:300px;">
# <caption><center> <u> **Figure 1** </u>: **Minimizing the cost is like finding the lowest point in a hilly landscape**<br> At each step of the training, you update your parameters following a certain direction to try to get to the lowest possible point. </center></caption>
#
# **Notations**: As usual, $\frac{\partial J}{\partial a } = $ `da` for any variable `a`.
#
# To get started, run the following code to import the libraries you will need.
# In[3]:
import numpy as np
import matplotlib.pyplot as plt
import scipy.io
import math
import sklearn
import sklearn.datasets
from opt_utils import load_params_and_grads, initialize_parameters, forward_propagation, backward_propagation
from opt_utils import compute_cost, predict, predict_dec, plot_decision_boundary, load_dataset
from testCases import *
get_ipython().magic('matplotlib inline')
plt.rcParams['figure.figsize'] = (7.0, 4.0) # set default size of plots
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
# ## 1 - Gradient Descent
#
# A simple optimization method in machine learning is gradient descent (GD). When you take gradient steps with respect to all $m$ examples on each step, it is also called Batch Gradient Descent.
#
# **Warm-up exercise**: Implement the gradient descent update rule. The gradient descent rule is, for $l = 1, ..., L$:
# $$ W^{[l]} = W^{[l]} - \alpha \text{ } dW^{[l]} \tag{1}$$
# $$ b^{[l]} = b^{[l]} - \alpha \text{ } db^{[l]} \tag{2}$$
#
# where L is the number of layers and $\alpha$ is the learning rate. All parameters should be stored in the `parameters` dictionary. Note that the iterator `l` starts at 0 in the `for` loop while the first parameters are $W^{[1]}$ and $b^{[1]}$. You need to shift `l` to `l+1` when coding.
# In[4]:
# GRADED FUNCTION: update_parameters_with_gd
def update_parameters_with_gd(parameters, grads, learning_rate):
"""
Update parameters using one step of gradient descent
Arguments:
parameters -- python dictionary containing your parameters to be updated:
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
grads -- python dictionary containing your gradients to update each parameters:
grads['dW' + str(l)] = dWl
grads['db' + str(l)] = dbl
learning_rate -- the learning rate, scalar.
Returns:
parameters -- python dictionary containing your updated parameters
"""
L = len(parameters) // 2 # number of layers in the neural networks
# Update rule for each parameter
for l in range(L):
### START CODE HERE ### (approx. 2 lines)
parameters["W" + str(l+1)] = parameters["W" + str(l+1)] - learning_rate * grads["dW" + str(l+1)]
parameters["b" + str(l+1)] = parameters["b" + str(l+1)] - learning_rate * grads["db" + str(l+1)]
### END CODE HERE ###
return parameters
# In[5]:
parameters, grads, learning_rate = update_parameters_with_gd_test_case()
parameters = update_parameters_with_gd(parameters, grads, learning_rate)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
# **Expected Output**:
#
# <table>
# <tr>
# <td > **W1** </td>
# <td > [[ 1.63535156 -0.62320365 -0.53718766]
# [-1.07799357 0.85639907 -2.29470142]] </td>
# </tr>
#
# <tr>
# <td > **b1** </td>
# <td > [[ 1.74604067]
# [-0.75184921]] </td>
# </tr>
#
# <tr>
# <td > **W2** </td>
# <td > [[ 0.32171798 -0.25467393 1.46902454]
# [-2.05617317 -0.31554548 -0.3756023 ]
# [ 1.1404819 -1.09976462 -0.1612551 ]] </td>
# </tr>
#
# <tr>
# <td > **b2** </td>
# <td > [[-0.88020257]
# [ 0.02561572]
# [ 0.57539477]] </td>
# </tr>
# </table>
#
# A variant of this is Stochastic Gradient Descent (SGD), which is equivalent to mini-batch gradient descent where each mini-batch has just 1 example. The update rule that you have just implemented does not change. What changes is that you would be computing gradients on just one training example at a time, rather than on the whole training set. The code examples below illustrate the difference between stochastic gradient descent and (batch) gradient descent.
#
# - **(Batch) Gradient Descent**:
#
# ``` python
# X = data_input
# Y = labels
# parameters = initialize_parameters(layers_dims)
# for i in range(0, num_iterations):
# # Forward propagation
# a, caches = forward_propagation(X, parameters)
# # Compute cost.
# cost = compute_cost(a, Y)
# # Backward propagation.
# grads = backward_propagation(a, caches, parameters)
# # Update parameters.
# parameters = update_parameters(parameters, grads)
#
# ```
#
# - **Stochastic Gradient Descent**:
#
# ```python
# X = data_input
# Y = labels
# parameters = initialize_parameters(layers_dims)
# for i in range(0, num_iterations):
# for j in range(0, m):
# # Forward propagation
# a, caches = forward_propagation(X[:,j], parameters)
# # Compute cost
# cost = compute_cost(a, Y[:,j])
# # Backward propagation
# grads = backward_propagation(a, caches, parameters)
# # Update parameters.
# parameters = update_parameters(parameters, grads)
# ```
#
# In Stochastic Gradient Descent, you use only 1 training example before updating the gradients. When the training set is large, SGD can be faster. But the parameters will "oscillate" toward the minimum rather than converge smoothly. Here is an illustration of this:
#
# <img src="images/kiank_sgd.png" style="width:750px;height:250px;">
# <caption><center> <u> <font color='purple'> **Figure 1** </u><font color='purple'> : **SGD vs GD**<br> "+" denotes a minimum of the cost. SGD leads to many oscillations to reach convergence. But each step is a lot faster to compute for SGD than for GD, as it uses only one training example (vs. the whole batch for GD). </center></caption>
#
# **Note** also that implementing SGD requires 3 for-loops in total:
# 1. Over the number of iterations
# 2. Over the $m$ training examples
# 3. Over the layers (to update all parameters, from $(W^{[1]},b^{[1]})$ to $(W^{[L]},b^{[L]})$)
#
# In practice, you'll often get faster results if you do not use neither the whole training set, nor only one training example, to perform each update. Mini-batch gradient descent uses an intermediate number of examples for each step. With mini-batch gradient descent, you loop over the mini-batches instead of looping over individual training examples.
#
# <img src="images/kiank_minibatch.png" style="width:750px;height:250px;">
# <caption><center> <u> <font color='purple'> **Figure 2** </u>: <font color='purple'> **SGD vs Mini-Batch GD**<br> "+" denotes a minimum of the cost. Using mini-batches in your optimization algorithm often leads to faster optimization. </center></caption>
#
# <font color='blue'>
# **What you should remember**:
# - The difference between gradient descent, mini-batch gradient descent and stochastic gradient descent is the number of examples you use to perform one update step.
# - You have to tune a learning rate hyperparameter $\alpha$.
# - With a well-turned mini-batch size, usually it outperforms either gradient descent or stochastic gradient descent (particularly when the training set is large).
# ## 2 - Mini-Batch Gradient descent
#
# Let's learn how to build mini-batches from the training set (X, Y).
#
# There are two steps:
# - **Shuffle**: Create a shuffled version of the training set (X, Y) as shown below. Each column of X and Y represents a training example. Note that the random shuffling is done synchronously between X and Y. Such that after the shuffling the $i^{th}$ column of X is the example corresponding to the $i^{th}$ label in Y. The shuffling step ensures that examples will be split randomly into different mini-batches.
#
# <img src="images/kiank_shuffle.png" style="width:550px;height:300px;">
#
# - **Partition**: Partition the shuffled (X, Y) into mini-batches of size `mini_batch_size` (here 64). Note that the number of training examples is not always divisible by `mini_batch_size`. The last mini batch might be smaller, but you don't need to worry about this. When the final mini-batch is smaller than the full `mini_batch_size`, it will look like this:
#
# <img src="images/kiank_partition.png" style="width:550px;height:300px;">
#
# **Exercise**: Implement `random_mini_batches`. We coded the shuffling part for you. To help you with the partitioning step, we give you the following code that selects the indexes for the $1^{st}$ and $2^{nd}$ mini-batches:
# ```python
# first_mini_batch_X = shuffled_X[:, 0 : mini_batch_size]
# second_mini_batch_X = shuffled_X[:, mini_batch_size : 2 * mini_batch_size]
# ...
# ```
#
# Note that the last mini-batch might end up smaller than `mini_batch_size=64`. Let $\lfloor s \rfloor$ represents $s$ rounded down to the nearest integer (this is `math.floor(s)` in Python). If the total number of examples is not a multiple of `mini_batch_size=64` then there will be $\lfloor \frac{m}{mini\_batch\_size}\rfloor$ mini-batches with a full 64 examples, and the number of examples in the final mini-batch will be ($m-mini_\_batch_\_size \times \lfloor \frac{m}{mini\_batch\_size}\rfloor$).
# In[6]:
# GRADED FUNCTION: random_mini_batches
def random_mini_batches(X, Y, mini_batch_size = 64, seed = 0):
"""
Creates a list of random minibatches from (X, Y)
Arguments:
X -- input data, of shape (input size, number of examples)
Y -- true "label" vector (1 for blue dot / 0 for red dot), of shape (1, number of examples)
mini_batch_size -- size of the mini-batches, integer
Returns:
mini_batches -- list of synchronous (mini_batch_X, mini_batch_Y)
"""
np.random.seed(seed) # To make your "random" minibatches the same as ours
m = X.shape[1] # number of training examples
mini_batches = []
# Step 1: Shuffle (X, Y)
permutation = list(np.random.permutation(m))
shuffled_X = X[:, permutation]
shuffled_Y = Y[:, permutation].reshape((1,m))
# Step 2: Partition (shuffled_X, shuffled_Y). Minus the end case.
num_complete_minibatches = math.floor(m/mini_batch_size) # number of mini batches of size mini_batch_size in your partitionning
for k in range(0, num_complete_minibatches):
### START CODE HERE ### (approx. 2 lines)
mini_batch_X = shuffled_X[:,k*mini_batch_size:(k+1)*mini_batch_size]
mini_batch_Y = shuffled_Y[:,k*mini_batch_size:(k+1)*mini_batch_size]
### END CODE HERE ###
mini_batch = (mini_batch_X, mini_batch_Y)
mini_batches.append(mini_batch)
# Handling the end case (last mini-batch < mini_batch_size)
if m % mini_batch_size != 0:
### START CODE HERE ### (approx. 2 lines)
mini_batch_X = shuffled_X[:,num_complete_minibatches*mini_batch_size:]
mini_batch_Y = shuffled_Y[:,num_complete_minibatches*mini_batch_size:]
### END CODE HERE ###
mini_batch = (mini_batch_X, mini_batch_Y)
mini_batches.append(mini_batch)
return mini_batches
# In[7]:
X_assess, Y_assess, mini_batch_size = random_mini_batches_test_case()
mini_batches = random_mini_batches(X_assess, Y_assess, mini_batch_size)
print ("shape of the 1st mini_batch_X: " + str(mini_batches[0][0].shape))
print ("shape of the 2nd mini_batch_X: " + str(mini_batches[1][0].shape))
print ("shape of the 3rd mini_batch_X: " + str(mini_batches[2][0].shape))
print ("shape of the 1st mini_batch_Y: " + str(mini_batches[0][1].shape))
print ("shape of the 2nd mini_batch_Y: " + str(mini_batches[1][1].shape))
print ("shape of the 3rd mini_batch_Y: " + str(mini_batches[2][1].shape))
print ("mini batch sanity check: " + str(mini_batches[0][0][0][0:3]))
# **Expected Output**:
#
# <table style="width:50%">
# <tr>
# <td > **shape of the 1st mini_batch_X** </td>
# <td > (12288, 64) </td>
# </tr>
#
# <tr>
# <td > **shape of the 2nd mini_batch_X** </td>
# <td > (12288, 64) </td>
# </tr>
#
# <tr>
# <td > **shape of the 3rd mini_batch_X** </td>
# <td > (12288, 20) </td>
# </tr>
# <tr>
# <td > **shape of the 1st mini_batch_Y** </td>
# <td > (1, 64) </td>
# </tr>
# <tr>
# <td > **shape of the 2nd mini_batch_Y** </td>
# <td > (1, 64) </td>
# </tr>
# <tr>
# <td > **shape of the 3rd mini_batch_Y** </td>
# <td > (1, 20) </td>
# </tr>
# <tr>
# <td > **mini batch sanity check** </td>
# <td > [ 0.90085595 -0.7612069 0.2344157 ] </td>
# </tr>
#
# </table>
# <font color='blue'>
# **What you should remember**:
# - Shuffling and Partitioning are the two steps required to build mini-batches
# - Powers of two are often chosen to be the mini-batch size, e.g., 16, 32, 64, 128.
# ## 3 - Momentum
#
# Because mini-batch gradient descent makes a parameter update after seeing just a subset of examples, the direction of the update has some variance, and so the path taken by mini-batch gradient descent will "oscillate" toward convergence. Using momentum can reduce these oscillations.
#
# Momentum takes into account the past gradients to smooth out the update. We will store the 'direction' of the previous gradients in the variable $v$. Formally, this will be the exponentially weighted average of the gradient on previous steps. You can also think of $v$ as the "velocity" of a ball rolling downhill, building up speed (and momentum) according to the direction of the gradient/slope of the hill.
#
# <img src="images/opt_momentum.png" style="width:400px;height:250px;">
# <caption><center> <u><font color='purple'>**Figure 3**</u><font color='purple'>: The red arrows shows the direction taken by one step of mini-batch gradient descent with momentum. The blue points show the direction of the gradient (with respect to the current mini-batch) on each step. Rather than just following the gradient, we let the gradient influence $v$ and then take a step in the direction of $v$.<br> <font color='black'> </center>
#
#
# **Exercise**: Initialize the velocity. The velocity, $v$, is a python dictionary that needs to be initialized with arrays of zeros. Its keys are the same as those in the `grads` dictionary, that is:
# for $l =1,...,L$:
# ```python
# v["dW" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["W" + str(l+1)])
# v["db" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["b" + str(l+1)])
# ```
# **Note** that the iterator l starts at 0 in the for loop while the first parameters are v["dW1"] and v["db1"] (that's a "one" on the superscript). This is why we are shifting l to l+1 in the `for` loop.
# In[8]:
# GRADED FUNCTION: initialize_velocity
def initialize_velocity(parameters):
"""
Initializes the velocity as a python dictionary with:
- keys: "dW1", "db1", ..., "dWL", "dbL"
- values: numpy arrays of zeros of the same shape as the corresponding gradients/parameters.
Arguments:
parameters -- python dictionary containing your parameters.
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
Returns:
v -- python dictionary containing the current velocity.
v['dW' + str(l)] = velocity of dWl
v['db' + str(l)] = velocity of dbl
"""
L = len(parameters) // 2 # number of layers in the neural networks
v = {}
# Initialize velocity
for l in range(L):
### START CODE HERE ### (approx. 2 lines)
v["dW" + str(l+1)] = np.zeros(parameters["W" + str(l+1)].shape)
v["db" + str(l+1)] = np.zeros(parameters["b" + str(l+1)].shape)
### END CODE HERE ###
return v
# In[9]:
parameters = initialize_velocity_test_case()
v = initialize_velocity(parameters)
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
# **Expected Output**:
#
# <table style="width:40%">
# <tr>
# <td > **v["dW1"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["db1"]** </td>
# <td > [[ 0.]
# [ 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["dW2"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["db2"]** </td>
# <td > [[ 0.]
# [ 0.]
# [ 0.]] </td>
# </tr>
# </table>
#
# **Exercise**: Now, implement the parameters update with momentum. The momentum update rule is, for $l = 1, ..., L$:
#
# $$ \begin{cases}
# v_{dW^{[l]}} = \beta v_{dW^{[l]}} + (1 - \beta) dW^{[l]} \\
# W^{[l]} = W^{[l]} - \alpha v_{dW^{[l]}}
# \end{cases}\tag{3}$$
#
# $$\begin{cases}
# v_{db^{[l]}} = \beta v_{db^{[l]}} + (1 - \beta) db^{[l]} \\
# b^{[l]} = b^{[l]} - \alpha v_{db^{[l]}}
# \end{cases}\tag{4}$$
#
# where L is the number of layers, $\beta$ is the momentum and $\alpha$ is the learning rate. All parameters should be stored in the `parameters` dictionary. Note that the iterator `l` starts at 0 in the `for` loop while the first parameters are $W^{[1]}$ and $b^{[1]}$ (that's a "one" on the superscript). So you will need to shift `l` to `l+1` when coding.
# In[10]:
# GRADED FUNCTION: update_parameters_with_momentum
def update_parameters_with_momentum(parameters, grads, v, beta, learning_rate):
"""
Update parameters using Momentum
Arguments:
parameters -- python dictionary containing your parameters:
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
grads -- python dictionary containing your gradients for each parameters:
grads['dW' + str(l)] = dWl
grads['db' + str(l)] = dbl
v -- python dictionary containing the current velocity:
v['dW' + str(l)] = ...
v['db' + str(l)] = ...
beta -- the momentum hyperparameter, scalar
learning_rate -- the learning rate, scalar
Returns:
parameters -- python dictionary containing your updated parameters
v -- python dictionary containing your updated velocities
"""
L = len(parameters) // 2 # number of layers in the neural networks
# Momentum update for each parameter
for l in range(L):
### START CODE HERE ### (approx. 4 lines)
# compute velocities
v["dW" + str(l+1)] = beta * v["dW" + str(l+1)] + (1-beta) * grads["dW" + str(l+1)]
v["db" + str(l+1)] = beta * v["db" + str(l+1)] + (1-beta) * grads["db" + str(l+1)]
# update parameters
parameters["W" + str(l+1)] = parameters["W" + str(l+1)] - learning_rate * v["dW" + str(l+1)]
parameters["b" + str(l+1)] = parameters["b" + str(l+1)] - learning_rate * v["db" + str(l+1)]
### END CODE HERE ###
return parameters, v
# In[11]:
parameters, grads, v = update_parameters_with_momentum_test_case()
parameters, v = update_parameters_with_momentum(parameters, grads, v, beta = 0.9, learning_rate = 0.01)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
# **Expected Output**:
#
# <table style="width:90%">
# <tr>
# <td > **W1** </td>
# <td > [[ 1.62544598 -0.61290114 -0.52907334]
# [-1.07347112 0.86450677 -2.30085497]] </td>
# </tr>
#
# <tr>
# <td > **b1** </td>
# <td > [[ 1.74493465]
# [-0.76027113]] </td>
# </tr>
#
# <tr>
# <td > **W2** </td>
# <td > [[ 0.31930698 -0.24990073 1.4627996 ]
# [-2.05974396 -0.32173003 -0.38320915]
# [ 1.13444069 -1.0998786 -0.1713109 ]] </td>
# </tr>
#
# <tr>
# <td > **b2** </td>
# <td > [[-0.87809283]
# [ 0.04055394]
# [ 0.58207317]] </td>
# </tr>
#
# <tr>
# <td > **v["dW1"]** </td>
# <td > [[-0.11006192 0.11447237 0.09015907]
# [ 0.05024943 0.09008559 -0.06837279]] </td>
# </tr>
#
# <tr>
# <td > **v["db1"]** </td>
# <td > [[-0.01228902]
# [-0.09357694]] </td>
# </tr>
#
# <tr>
# <td > **v["dW2"]** </td>
# <td > [[-0.02678881 0.05303555 -0.06916608]
# [-0.03967535 -0.06871727 -0.08452056]
# [-0.06712461 -0.00126646 -0.11173103]] </td>
# </tr>
#
# <tr>
# <td > **v["db2"]** </td>
# <td > [[ 0.02344157]
# [ 0.16598022]
# [ 0.07420442]]</td>
# </tr>
# </table>
#
#
# **Note** that:
# - The velocity is initialized with zeros. So the algorithm will take a few iterations to "build up" velocity and start to take bigger steps.
# - If $\beta = 0$, then this just becomes standard gradient descent without momentum.
#
# **How do you choose $\beta$?**
#
# - The larger the momentum $\beta$ is, the smoother the update because the more we take the past gradients into account. But if $\beta$ is too big, it could also smooth out the updates too much.
# - Common values for $\beta$ range from 0.8 to 0.999. If you don't feel inclined to tune this, $\beta = 0.9$ is often a reasonable default.
# - Tuning the optimal $\beta$ for your model might need trying several values to see what works best in term of reducing the value of the cost function $J$.
# <font color='blue'>
# **What you should remember**:
# - Momentum takes past gradients into account to smooth out the steps of gradient descent. It can be applied with batch gradient descent, mini-batch gradient descent or stochastic gradient descent.
# - You have to tune a momentum hyperparameter $\beta$ and a learning rate $\alpha$.
# ## 4 - Adam
#
# Adam is one of the most effective optimization algorithms for training neural networks. It combines ideas from RMSProp (described in lecture) and Momentum.
#
# **How does Adam work?**
# 1. It calculates an exponentially weighted average of past gradients, and stores it in variables $v$ (before bias correction) and $v^{corrected}$ (with bias correction).
# 2. It calculates an exponentially weighted average of the squares of the past gradients, and stores it in variables $s$ (before bias correction) and $s^{corrected}$ (with bias correction).
# 3. It updates parameters in a direction based on combining information from "1" and "2".
#
# The update rule is, for $l = 1, ..., L$:
#
# $$\begin{cases}
# v_{dW^{[l]}} = \beta_1 v_{dW^{[l]}} + (1 - \beta_1) \frac{\partial \mathcal{J} }{ \partial W^{[l]} } \\
# v^{corrected}_{dW^{[l]}} = \frac{v_{dW^{[l]}}}{1 - (\beta_1)^t} \\
# s_{dW^{[l]}} = \beta_2 s_{dW^{[l]}} + (1 - \beta_2) (\frac{\partial \mathcal{J} }{\partial W^{[l]} })^2 \\
# s^{corrected}_{dW^{[l]}} = \frac{s_{dW^{[l]}}}{1 - (\beta_1)^t} \\
# W^{[l]} = W^{[l]} - \alpha \frac{v^{corrected}_{dW^{[l]}}}{\sqrt{s^{corrected}_{dW^{[l]}}} + \varepsilon}
# \end{cases}$$
# where:
# - t counts the number of steps taken of Adam
# - L is the number of layers
# - $\beta_1$ and $\beta_2$ are hyperparameters that control the two exponentially weighted averages.
# - $\alpha$ is the learning rate
# - $\varepsilon$ is a very small number to avoid dividing by zero
#
# As usual, we will store all parameters in the `parameters` dictionary
# **Exercise**: Initialize the Adam variables $v, s$ which keep track of the past information.
#
# **Instruction**: The variables $v, s$ are python dictionaries that need to be initialized with arrays of zeros. Their keys are the same as for `grads`, that is:
# for $l = 1, ..., L$:
# ```python
# v["dW" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["W" + str(l+1)])
# v["db" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["b" + str(l+1)])
# s["dW" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["W" + str(l+1)])
# s["db" + str(l+1)] = ... #(numpy array of zeros with the same shape as parameters["b" + str(l+1)])
#
# ```
# In[14]:
# GRADED FUNCTION: initialize_adam
def initialize_adam(parameters) :
"""
Initializes v and s as two python dictionaries with:
- keys: "dW1", "db1", ..., "dWL", "dbL"
- values: numpy arrays of zeros of the same shape as the corresponding gradients/parameters.
Arguments:
parameters -- python dictionary containing your parameters.
parameters["W" + str(l)] = Wl
parameters["b" + str(l)] = bl
Returns:
v -- python dictionary that will contain the exponentially weighted average of the gradient.
v["dW" + str(l)] = ...
v["db" + str(l)] = ...
s -- python dictionary that will contain the exponentially weighted average of the squared gradient.
s["dW" + str(l)] = ...
s["db" + str(l)] = ...
"""
L = len(parameters) // 2 # number of layers in the neural networks
v = {}
s = {}
# Initialize v, s. Input: "parameters". Outputs: "v, s".
for l in range(L):
### START CODE HERE ### (approx. 4 lines)
v["dW" + str(l+1)] = np.zeros(parameters["W" + str(l+1)].shape)
v["db" + str(l+1)] = np.zeros(parameters["b" + str(l+1)].shape)
s["dW" + str(l+1)] = np.zeros(parameters["W" + str(l+1)].shape)
s["db" + str(l+1)] = np.zeros(parameters["b" + str(l+1)].shape)
### END CODE HERE ###
return v, s
# In[15]:
parameters = initialize_adam_test_case()
v, s = initialize_adam(parameters)
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
print("s[\"dW1\"] = " + str(s["dW1"]))
print("s[\"db1\"] = " + str(s["db1"]))
print("s[\"dW2\"] = " + str(s["dW2"]))
print("s[\"db2\"] = " + str(s["db2"]))
# **Expected Output**:
#
# <table style="width:40%">
# <tr>
# <td > **v["dW1"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["db1"]** </td>
# <td > [[ 0.]
# [ 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["dW2"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **v["db2"]** </td>
# <td > [[ 0.]
# [ 0.]
# [ 0.]] </td>
# </tr>
# <tr>
# <td > **s["dW1"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **s["db1"]** </td>
# <td > [[ 0.]
# [ 0.]] </td>
# </tr>
#
# <tr>
# <td > **s["dW2"]** </td>
# <td > [[ 0. 0. 0.]
# [ 0. 0. 0.]
# [ 0. 0. 0.]] </td>
# </tr>
#
# <tr>
# <td > **s["db2"]** </td>
# <td > [[ 0.]
# [ 0.]
# [ 0.]] </td>
# </tr>
#
# </table>
#
# **Exercise**: Now, implement the parameters update with Adam. Recall the general update rule is, for $l = 1, ..., L$:
#
# $$\begin{cases}
# v_{W^{[l]}} = \beta_1 v_{W^{[l]}} + (1 - \beta_1) \frac{\partial J }{ \partial W^{[l]} } \\
# v^{corrected}_{W^{[l]}} = \frac{v_{W^{[l]}}}{1 - (\beta_1)^t} \\
# s_{W^{[l]}} = \beta_2 s_{W^{[l]}} + (1 - \beta_2) (\frac{\partial J }{\partial W^{[l]} })^2 \\
# s^{corrected}_{W^{[l]}} = \frac{s_{W^{[l]}}}{1 - (\beta_2)^t} \\
# W^{[l]} = W^{[l]} - \alpha \frac{v^{corrected}_{W^{[l]}}}{\sqrt{s^{corrected}_{W^{[l]}}}+\varepsilon}
# \end{cases}$$
#
#
# **Note** that the iterator `l` starts at 0 in the `for` loop while the first parameters are $W^{[1]}$ and $b^{[1]}$. You need to shift `l` to `l+1` when coding.
# In[16]:
# GRADED FUNCTION: update_parameters_with_adam
def update_parameters_with_adam(parameters, grads, v, s, t, learning_rate = 0.01,
beta1 = 0.9, beta2 = 0.999, epsilon = 1e-8):
"""
Update parameters using Adam
Arguments:
parameters -- python dictionary containing your parameters:
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
grads -- python dictionary containing your gradients for each parameters:
grads['dW' + str(l)] = dWl
grads['db' + str(l)] = dbl
v -- Adam variable, moving average of the first gradient, python dictionary
s -- Adam variable, moving average of the squared gradient, python dictionary
learning_rate -- the learning rate, scalar.
beta1 -- Exponential decay hyperparameter for the first moment estimates
beta2 -- Exponential decay hyperparameter for the second moment estimates
epsilon -- hyperparameter preventing division by zero in Adam updates
Returns:
parameters -- python dictionary containing your updated parameters
v -- Adam variable, moving average of the first gradient, python dictionary
s -- Adam variable, moving average of the squared gradient, python dictionary
"""
L = len(parameters) // 2 # number of layers in the neural networks
v_corrected = {} # Initializing first moment estimate, python dictionary
s_corrected = {} # Initializing second moment estimate, python dictionary
# Perform Adam update on all parameters
for l in range(L):
# Moving average of the gradients. Inputs: "v, grads, beta1". Output: "v".
### START CODE HERE ### (approx. 2 lines)
v["dW" + str(l+1)] = beta1 * v["dW" + str(l+1)] + (1-beta1) * grads["dW" + str(l+1)]
v["db" + str(l+1)] = beta1 * v["db" + str(l+1)] + (1-beta1) * grads["db" + str(l+1)]
### END CODE HERE ###
# Compute bias-corrected first moment estimate. Inputs: "v, beta1, t". Output: "v_corrected".
### START CODE HERE ### (approx. 2 lines)
v_corrected["dW" + str(l+1)] = v["dW" + str(l+1)] / (1 - beta1 ** t)
v_corrected["db" + str(l+1)] = v["db" + str(l+1)] / (1 - beta1 ** t)
### END CODE HERE ###
# Moving average of the squared gradients. Inputs: "s, grads, beta2". Output: "s".
### START CODE HERE ### (approx. 2 lines)
s["dW" + str(l+1)] = beta2 * s["dW" + str(l+1)] + (1-beta2) * (grads["dW" + str(l+1)]**2)
s["db" + str(l+1)] = beta2 * s["db" + str(l+1)] + (1-beta2) * (grads["db" + str(l+1)]**2)
### END CODE HERE ###
# Compute bias-corrected second raw moment estimate. Inputs: "s, beta2, t". Output: "s_corrected".
### START CODE HERE ### (approx. 2 lines)
s_corrected["dW" + str(l+1)] = s["dW" + str(l+1)] / (1 - beta2 ** t)
s_corrected["db" + str(l+1)] = s["db" + str(l+1)] / (1 - beta2 ** t)
### END CODE HERE ###
# Update parameters. Inputs: "parameters, learning_rate, v_corrected, s_corrected, epsilon". Output: "parameters".
### START CODE HERE ### (approx. 2 lines)
parameters["W" + str(l+1)] = parameters["W" + str(l+1)] - learning_rate * v_corrected["dW" + str(l+1)] /(s_corrected["dW" + str(l+1)]**0.5 + epsilon )
parameters["b" + str(l+1)] = parameters["b" + str(l+1)] - learning_rate * v_corrected["db" + str(l+1)] /(s_corrected["db" + str(l+1)]**0.5 + epsilon )
### END CODE HERE ###
return parameters, v, s
# In[17]:
parameters, grads, v, s = update_parameters_with_adam_test_case()
parameters, v, s = update_parameters_with_adam(parameters, grads, v, s, t = 2)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
print("s[\"dW1\"] = " + str(s["dW1"]))
print("s[\"db1\"] = " + str(s["db1"]))
print("s[\"dW2\"] = " + str(s["dW2"]))
print("s[\"db2\"] = " + str(s["db2"]))
# **Expected Output**:
#
# <table>
# <tr>
# <td > **W1** </td>
# <td > [[ 1.63178673 -0.61919778 -0.53561312]
# [-1.08040999 0.85796626 -2.29409733]] </td>
# </tr>
#
# <tr>
# <td > **b1** </td>
# <td > [[ 1.75225313]
# [-0.75376553]] </td>
# </tr>
#
# <tr>
# <td > **W2** </td>
# <td > [[ 0.32648046 -0.25681174 1.46954931]
# [-2.05269934 -0.31497584 -0.37661299]
# [ 1.14121081 -1.09245036 -0.16498684]] </td>
# </tr>
#
# <tr>
# <td > **b2** </td>
# <td > [[-0.88529978]
# [ 0.03477238]
# [ 0.57537385]] </td>
# </tr>
# <tr>
# <td > **v["dW1"]** </td>
# <td > [[-0.11006192 0.11447237 0.09015907]
# [ 0.05024943 0.09008559 -0.06837279]] </td>
# </tr>
#
# <tr>
# <td > **v["db1"]** </td>
# <td > [[-0.01228902]
# [-0.09357694]] </td>
# </tr>
#
# <tr>
# <td > **v["dW2"]** </td>
# <td > [[-0.02678881 0.05303555 -0.06916608]
# [-0.03967535 -0.06871727 -0.08452056]
# [-0.06712461 -0.00126646 -0.11173103]] </td>
# </tr>
#
# <tr>
# <td > **v["db2"]** </td>
# <td > [[ 0.02344157]
# [ 0.16598022]
# [ 0.07420442]] </td>
# </tr>
# <tr>
# <td > **s["dW1"]** </td>
# <td > [[ 0.00121136 0.00131039 0.00081287]
# [ 0.0002525 0.00081154 0.00046748]] </td>
# </tr>
#
# <tr>
# <td > **s["db1"]** </td>
# <td > [[ 1.51020075e-05]
# [ 8.75664434e-04]] </td>
# </tr>
#
# <tr>
# <td > **s["dW2"]** </td>
# <td > [[ 7.17640232e-05 2.81276921e-04 4.78394595e-04]
# [ 1.57413361e-04 4.72206320e-04 7.14372576e-04]
# [ 4.50571368e-04 1.60392066e-07 1.24838242e-03]] </td>
# </tr>
#
# <tr>
# <td > **s["db2"]** </td>
# <td > [[ 5.49507194e-05]
# [ 2.75494327e-03]
# [ 5.50629536e-04]] </td>
# </tr>
# </table>
#
# You now have three working optimization algorithms (mini-batch gradient descent, Momentum, Adam). Let's implement a model with each of these optimizers and observe the difference.
# ## 5 - Model with different optimization algorithms
#
# Lets use the following "moons" dataset to test the different optimization methods. (The dataset is named "moons" because the data from each of the two classes looks a bit like a crescent-shaped moon.)
# In[18]:
train_X, train_Y = load_dataset()
# We have already implemented a 3-layer neural network. You will train it with:
# - Mini-batch **Gradient Descent**: it will call your function:
# - `update_parameters_with_gd()`
# - Mini-batch **Momentum**: it will call your functions:
# - `initialize_velocity()` and `update_parameters_with_momentum()`
# - Mini-batch **Adam**: it will call your functions:
# - `initialize_adam()` and `update_parameters_with_adam()`
# In[19]:
def model(X, Y, layers_dims, optimizer, learning_rate = 0.0007, mini_batch_size = 64, beta = 0.9,
beta1 = 0.9, beta2 = 0.999, epsilon = 1e-8, num_epochs = 10000, print_cost = True):
"""
3-layer neural network model which can be run in different optimizer modes.
Arguments:
X -- input data, of shape (2, number of examples)
Y -- true "label" vector (1 for blue dot / 0 for red dot), of shape (1, number of examples)
layers_dims -- python list, containing the size of each layer
learning_rate -- the learning rate, scalar.
mini_batch_size -- the size of a mini batch
beta -- Momentum hyperparameter
beta1 -- Exponential decay hyperparameter for the past gradients estimates
beta2 -- Exponential decay hyperparameter for the past squared gradients estimates
epsilon -- hyperparameter preventing division by zero in Adam updates
num_epochs -- number of epochs
print_cost -- True to print the cost every 1000 epochs
Returns:
parameters -- python dictionary containing your updated parameters
"""
L = len(layers_dims) # number of layers in the neural networks
costs = [] # to keep track of the cost
t = 0 # initializing the counter required for Adam update
seed = 10 # For grading purposes, so that your "random" minibatches are the same as ours
# Initialize parameters
parameters = initialize_parameters(layers_dims)
# Initialize the optimizer
if optimizer == "gd":
pass # no initialization required for gradient descent
elif optimizer == "momentum":
v = initialize_velocity(parameters)
elif optimizer == "adam":
v, s = initialize_adam(parameters)
# Optimization loop
for i in range(num_epochs):
# Define the random minibatches. We increment the seed to reshuffle differently the dataset after each epoch
seed = seed + 1
minibatches = random_mini_batches(X, Y, mini_batch_size, seed)
for minibatch in minibatches:
# Select a minibatch
(minibatch_X, minibatch_Y) = minibatch
# Forward propagation
a3, caches = forward_propagation(minibatch_X, parameters)
# Compute cost
cost = compute_cost(a3, minibatch_Y)
# Backward propagation
grads = backward_propagation(minibatch_X, minibatch_Y, caches)
# Update parameters
if optimizer == "gd":
parameters = update_parameters_with_gd(parameters, grads, learning_rate)
elif optimizer == "momentum":
parameters, v = update_parameters_with_momentum(parameters, grads, v, beta, learning_rate)
elif optimizer == "adam":
t = t + 1 # Adam counter
parameters, v, s = update_parameters_with_adam(parameters, grads, v, s,
t, learning_rate, beta1, beta2, epsilon)
# Print the cost every 1000 epoch
if print_cost and i % 1000 == 0:
print ("Cost after epoch %i: %f" %(i, cost))
if print_cost and i % 100 == 0:
costs.append(cost)
# plot the cost
plt.plot(costs)
plt.ylabel('cost')
plt.xlabel('epochs (per 100)')
plt.title("Learning rate = " + str(learning_rate))
plt.show()
return parameters
# You will now run this 3 layer neural network with each of the 3 optimization methods.
#
# ### 5.1 - Mini-batch Gradient descent
#
# Run the following code to see how the model does with mini-batch gradient descent.
# In[20]:
# train 3-layer model
layers_dims = [train_X.shape[0], 5, 2, 1]
parameters = model(train_X, train_Y, layers_dims, optimizer = "gd")
# Predict
predictions = predict(train_X, train_Y, parameters)
# Plot decision boundary
plt.title("Model with Gradient Descent optimization")
axes = plt.gca()
axes.set_xlim([-1.5,2.5])
axes.set_ylim([-1,1.5])
plot_decision_boundary(lambda x: predict_dec(parameters, x.T), train_X, train_Y)
# ### 5.2 - Mini-batch gradient descent with momentum
#
# Run the following code to see how the model does with momentum. Because this example is relatively simple, the gains from using momemtum are small; but for more complex problems you might see bigger gains.
# In[21]:
# train 3-layer model
layers_dims = [train_X.shape[0], 5, 2, 1]
parameters = model(train_X, train_Y, layers_dims, beta = 0.9, optimizer = "momentum")
# Predict
predictions = predict(train_X, train_Y, parameters)
# Plot decision boundary
plt.title("Model with Momentum optimization")
axes = plt.gca()
axes.set_xlim([-1.5,2.5])
axes.set_ylim([-1,1.5])
plot_decision_boundary(lambda x: predict_dec(parameters, x.T), train_X, train_Y)
# ### 5.3 - Mini-batch with Adam mode
#
# Run the following code to see how the model does with Adam.
# In[22]:
# train 3-layer model
layers_dims = [train_X.shape[0], 5, 2, 1]
parameters = model(train_X, train_Y, layers_dims, optimizer = "adam")
# Predict
predictions = predict(train_X, train_Y, parameters)
# Plot decision boundary
plt.title("Model with Adam optimization")
axes = plt.gca()
axes.set_xlim([-1.5,2.5])
axes.set_ylim([-1,1.5])
plot_decision_boundary(lambda x: predict_dec(parameters, x.T), train_X, train_Y)
# ### 5.4 - Summary
#
# <table>
# <tr>
# <td>
# **optimization method**
# </td>
# <td>
# **accuracy**
# </td>
# <td>
# **cost shape**
# </td>
#
# </tr>
# <td>
# Gradient descent
# </td>
# <td>
# 79.7%
# </td>
# <td>
# oscillations
# </td>
# <tr>
# <td>
# Momentum
# </td>
# <td>
# 79.7%
# </td>
# <td>
# oscillations
# </td>
# </tr>
# <tr>
# <td>
# Adam
# </td>
# <td>
# 94%
# </td>
# <td>
# smoother
# </td>
# </tr>
# </table>
#
# Momentum usually helps, but given the small learning rate and the simplistic dataset, its impact is almost negligeable. Also, the huge oscillations you see in the cost come from the fact that some minibatches are more difficult thans others for the optimization algorithm.
#
# Adam on the other hand, clearly outperforms mini-batch gradient descent and Momentum. If you run the model for more epochs on this simple dataset, all three methods will lead to very good results. However, you've seen that Adam converges a lot faster.
#
# Some advantages of Adam include:
# - Relatively low memory requirements (though higher than gradient descent and gradient descent with momentum)
# - Usually works well even with little tuning of hyperparameters (except $\alpha$)
# **References**:
#
# - Adam paper: https://arxiv.org/pdf/1412.6980.pdf
|
[
"zhuzhenwei95@gmail.com"
] |
zhuzhenwei95@gmail.com
|
8a3336a7bcdd604e74b83c6d88392be64fce7e97
|
daa727ab2f9415e345c19dd722e6de82945281b5
|
/app.py
|
4a8641ba0bb7a05df8464ac7694ac0679330054f
|
[] |
no_license
|
Zeeshan75/docker_demo_71
|
e629753f2eea6a94519eeea9e3aaf2a9c95d042a
|
edb9717757793134cbacb0c0c63a7fb45911e3fc
|
refs/heads/main
| 2023-07-07T17:48:59.373387
| 2021-08-31T08:41:26
| 2021-08-31T08:41:26
| 401,631,570
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 250
|
py
|
from flask import Flask
import os
app = Flask(__name__)
@app.route("/")
def hello():
return "getting started docker new change the file ........................!!"
if __name__ == "__main__":
app.run(host='0.0.0.0',debug=True, port=4000)
|
[
"noreply@github.com"
] |
noreply@github.com
|
65a0dc2550757ed063b5e95fc41794c30d082456
|
5cb5b774b1f5c5f9b503ed5da863f9c7e3773f5e
|
/6 - ZigZag Conversion/main.py
|
60f09ef05958c656df971bd1fcacd79155773e6b
|
[] |
no_license
|
ewalldo/LeetCode-Problems
|
92a5325c8bdb79fc69ea5dff1768f769fc13cc83
|
caba88c5c2b709625e1915554cf3b8a3a73ed2b4
|
refs/heads/master
| 2022-11-16T06:25:32.817039
| 2020-07-12T02:58:16
| 2020-07-12T02:58:16
| 277,758,915
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 508
|
py
|
class Solution:
def convert(self, s: str, numRows: int) -> str:
if numRows == 0 or numRows == 1:
return s
ret_ = []
n = len(s)
cycle_len = 2 * numRows - 2
for i in range(numRows):
for j in range(i, n, cycle_len):
ret_.append(s[j])
if i != numRows-1 and i != 0 and j + cycle_len - 2*i < n:
ret_.append(s[j+cycle_len-2*i])
ret_ = "".join(ret_)
return ret_
|
[
"noreply@github.com"
] |
noreply@github.com
|
2ac6fd37659ab6b940ecee9fbc7eff07fd8ca0db
|
4ebad384d7964d38f31f0f21e79243e1a461c019
|
/Tugas.py
|
cf4664fea8597ce6978d8b8915fc08077aa6bd5f
|
[] |
no_license
|
azrilizha/TugasAkhir
|
7027c13e7eacb5550817c5a08a0266d99c7c531c
|
404bd4e668fcee2d1f5f45ef2243cb1c72f5172c
|
refs/heads/main
| 2023-02-09T23:13:43.910051
| 2021-01-03T09:16:36
| 2021-01-03T09:16:36
| 326,343,994
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,350
|
py
|
import csv
import os
import datetime
date = datetime.date.today()
nama_file = "data servis Virtual Komputer.csv"
def blank_screen():
os.system('cls' if os.name == 'nt' else 'clear')
barang = []
with open('data servis Virtual Komputer.csv', 'w', newline='') as csv_file:
barang = ["Tanggal","Nama Barang","Jumlah","Estimasi","Biaya Total"]
tulis = csv.writer(csv_file, delimiter=';')
tulis.writerow(barang)
tulis = csv.writer(csv_file)
#1 : Menambah atau menulis file data baru
def tambahkan():
blank_screen()
with open(nama_file, 'a', newline='') as csv_file:
print("TAMBAHKAN BARANG".center(90))
print("SERVIS ELEKTRONIK DAN KOMPUTER".center(90))
print("="*90)
date = datetime.date.today()
tanggal = date.strftime('%d-%m-%Y')
nama = input('Nama Barang : ')
jumlah = int(input('Jumlah : '))
biaya = int(input('Estimasi : Rp.'))
biaya_sementara = jumlah*biaya
print("Estimasi Biaya Servis Anda : Rp.", biaya_sementara)
data = [tanggal,nama,jumlah,biaya,biaya_sementara]
tulis = csv.writer(csv_file, delimiter=';')
tulis.writerow(data)
print("Data sukses ditambahkan..")
kembali()
#2 : Menampilkan data dari file sebelumnya
def tampil_data():
blank_screen()
barang = []
with open(nama_file) as csv_file:
read_data = csv.reader(csv_file, delimiter=";")
for row in read_data:
barang.append(row)
print("TAMPILKAN BARANG".center(90))
print("SERVIS ELEKTRONIK DAN KOMPUTER".center(90))
print("_"*90)
if len(barang) > 1:
number = -1
for dt in barang:
number += 1
print (f"{str(number)} \t {dt[0]:<15} {dt[1]:<18} {dt[2]:^17} {dt[3]:<19} {dt[4]:<18}")
print ("="*85)
kembali()
elif len(barang) <=1 :
print("TIDAK ADA BARANG".center(90))
kembali()
#3 : Menghapus data yang ingin dihapus
def hapus():
blank_screen()
barang = []
with open(nama_file) as csv_file:
read_data = csv.reader(csv_file, delimiter=";")
for row in read_data:
barang.append(row)
print("HAPUS DATA BARANG".center(90))
print("SERVIS ELEKTRONIK DAN KOMPUTER".center(90))
print("="*90)
if len(barang)>1:
number = 0
for dt in barang:
print (f"{str(number)} \t {dt[0]:<15} {dt[1]:<18} {dt[2]:^17} {dt[3]:<19} {dt[4]:<18}")
number += 1
print ("_"*90)
elif len(barang) <= 1:
print("TIDAK ADA BARANG".center(90))
kembali()
number = int(input('Pilih nomer : '))
del barang[number]
with open(nama_file, 'w', newline='') as csv_file:
tulis = csv.writer(csv_file, delimiter=';')
for dt in barang:
data = [dt[0],dt[1],dt[2],dt[3],dt[4]]
tulis.writerow(data)
print("Data sukses dihapus..")
kembali()
#4 mengedit data
def edit_data():
blank_screen()
barang = []
with open(nama_file, mode="r") as csv_file:
read_data = csv.reader(csv_file, delimiter=";")
for row in read_data:
barang.append(row)
print("UBAH DATA BARANG".center(90))
print("SERVIS ELEKTRONIK DAN KOMPUTER".center(90))
print("="*90)
if len(barang) > 1:
number = 0
for dt in barang:
print (f"{str(number)} \t {dt[0]:<15} {dt[1]:<18} {dt[2]:^17} {dt[3]:<19} {dt[4]:<18}")
number += 1
print ("_"*90)
number = int(input('Pilih nomer : '))
date = datetime.date.today()
tanggal = date.strftime('%d-%m-%Y')
nama = input('Nama Barang : ')
jumlah = int(input('Jumlah : '))
biaya = int(input('Estimasi : Rp.'))
biaya_sementara = jumlah*biaya
barang[number][0] = tanggal
barang[number][1] = nama
barang[number][2] = jumlah
barang[number][3] = biaya
barang[number][4] = biaya_sementara
with open(nama_file, 'w', newline='') as csv_file:
tulis = csv.writer(csv_file, delimiter=';')
for dt in barang:
data = [dt[0],dt[1],dt[2],dt[3],dt[4]]
tulis.writerow(data)
print("Data sukses diubah..")
kembali()
elif len(barang) <= 1:
print("TIDAK ADA BARANG".center(90))
kembali()
def beranda():
blank_screen()
print("="*72)
print("SELAMAT DATANG".center(72))
print('APLIKASI PENCATATAN SERVIS ELEKTRONIK DAN KOMPUTER'.center(72))
print("="*72)
print('''
|99| Keluar
|1| Tambah data Servis
|2| Lihat data Servis
|3| Hapus data Servis (Jika sudah)
|4| Edit data Servis''')
print("="*72)
pilihan_menu = input(" Pilih tindakan : ")
if(pilihan_menu == "1"):
tambahkan()
elif(pilihan_menu == "2"):
tampil_data()
elif(pilihan_menu == "3"):
hapus()
elif(pilihan_menu == "4"):
edit_data()
elif(pilihan_menu == "99"):
exit()
else:
kembali()
def kembali():
input("\n Tekan enter untuk kembali ke beranda..")
beranda()
if __name__== "__main__":
while True:
beranda()
|
[
"noreply@github.com"
] |
noreply@github.com
|
39c3141c70b4a3fe7f93408a9993d754ec1d4bd5
|
e2c6f262bb4ea12e3adb4534b3d7e3451c416dc4
|
/slarson/pywikipedia/maintcont.py
|
b55f806b04bc8e108737425fb4b8a8401c72cf48
|
[
"MIT",
"Python-2.0",
"LicenseRef-scancode-mit-old-style"
] |
permissive
|
slarson/ncmir-semantic-sandbox
|
c48e8c9dd5a6f5769d4422c80ca58c370786bfab
|
d6a02a5cf4415796f25d191d541ebaccaab53e7f
|
refs/heads/master
| 2016-09-06T04:10:21.136714
| 2009-03-31T09:49:59
| 2009-03-31T09:49:59
| 32,129,001
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,925
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
The controller bot for maintainer.py
Exactly one instance should be running of it. To check, use /whois maintcont on irc.freenode.net
This script requires the Python IRC library http://python-irclib.sourceforge.net/
Warning: experimental software, use at your own risk
"""
__version__ = '$Id$'
# Author: Balasyum
# http://hu.wikipedia.org/wiki/User:Balasyum
from ircbot import SingleServerIRCBot
from irclib import nm_to_n
import threading
import time
import math
tasks = 'rciw|censure'
projtasks = {}
mainters = []
activity = {}
class MaintcontBot(SingleServerIRCBot):
def __init__(self, nickname, server, port=6667):
SingleServerIRCBot.__init__(self, [(server, port)], nickname, nickname)
def on_nicknameinuse(self, c, e):
c.nick(c.get_nickname() + "_")
def on_welcome(self, c, e):
t = threading.Thread(target=self.lister)
t.setDaemon(True)
t.start()
def on_privmsg(self, c, e):
nick = nm_to_n(e.source())
c = self.connection
cmd = e.arguments()[0]
do = cmd.split()
if do[0] == "workerjoin":
c.privmsg(nick, "accepted")
mainters.append([nick, do[1]])
activity[nick] = time.time()
print "worker got, name:", nick, "job:", do[1]
self.retasker(do[1])
elif do[0] == "active":
activity[nick] = time.time()
def on_dccmsg(self, c, e):
pass
def on_dccchat(self, c, e):
pass
def lister(self):
while True:
print
print "worker list:"
for mainter in mainters:
if time.time() - activity[mainter[0]] > 30:
print "*", mainter[0], "has been removed"
mainters.remove(mainter)
del activity[mainter[0]]
self.retasker(mainter[1])
continue
print "mainter name:", mainter[0], "job:", mainter[1]
print "--------------------"
print
time.sleep(1*60)
def retasker(self, group, optask = ''):
ingroup = 0
for mainter in mainters:
if mainter[1] == group:
ingroup += 1
if ingroup == 0:
return
if projtasks.has_key(group):
grt = projtasks[group]
else:
grt = tasks
tpc = grt.split('|')
tpcn = round(len(tpc) / ingroup)
i = 0
for mainter in mainters:
if mainter[1] != group:
continue
tts = '|'.join(tpc[int(round(i * tpcn)):int(round((i + 1) * tpcn))])
if tts != False:
self.connection.privmsg(mainter[0], "tasklist " + tts)
i += 1
def main():
bot = MaintcontBot("maintcont", "irc.freenode.net")
bot.start()
if __name__ == "__main__":
main()
|
[
"stephen.larson@933566eb-c141-0410-b91b-f3a7fcfc7766"
] |
stephen.larson@933566eb-c141-0410-b91b-f3a7fcfc7766
|
327b9720cf90db9b40e56d118b4b15e2dd54d0ca
|
4668d0c19f483cf40d37c0d3b3b22ec4a273eb3c
|
/intercept/plain_tcp_bridge.py
|
93d518a7187ec58284a1d8991a4415fff18a18f7
|
[] |
no_license
|
jevinskie/proxytap
|
ee423996c5bd8a516312d0a20a3ee4656f7e2067
|
1f4dadf68247959fbc56f5e49b7f5d6a836ad7a9
|
refs/heads/master
| 2016-09-05T19:20:07.796643
| 2012-01-19T21:24:14
| 2012-01-19T21:24:14
| 2,572,088
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 561
|
py
|
from scapy.all import Ether,TCP
import socket
from tcp_state import *
class PlainTcpBridge(object):
def __init__(self):
self.sockets = {}
def set_tcp_interceptor(self, tcp_int):
self.tcp_int = tcp_int
def connect(self, sock):
try:
print "start con"
brsock = socket.create_connection(sock[2:4])
print "finished con"
if self.tcp_int.finish_connect(sock):
self.sockets[sock] = {'state': tcp_states.ESTABLISHED}
except Exception, e:
print e
|
[
"jevinsweval@gmail.com"
] |
jevinsweval@gmail.com
|
7f097c9f8210d4dbb9dbbe2375d00e98ab669c30
|
773904616b39d54e8c8188903ae2f0f520d64c72
|
/recipes/masterlist.py
|
cac93cc35df29407fac234458c0fa288e2be3978
|
[] |
no_license
|
youoldmiyoung/recipe-aggregator
|
fe3170bfd83583a9ec34e1cb1d09819a81e2bf7a
|
ae3699e6efd1903a972ec8a0fe5c3a070e2efd11
|
refs/heads/main
| 2023-04-01T08:24:52.394182
| 2021-04-11T18:40:23
| 2021-04-11T18:40:23
| 347,492,226
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 159,562
|
py
|
masterList = [['Cauliflower and Mushroom Curry', 'https://olivesfordinner.com/2020/05/cauliflower-and-mushroom-curry.html', 'https://olivesfordinner.com/wp-content/uploads/2020/05/Cauliflower-and-Mushroom-Curry-cover-320x320.jpg'], ['Easy Homemade Pizza Dough', 'https://olivesfordinner.com/2020/04/easy-homemade-pizza-dough.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/2-ingredient-pizza-dough-320x320.jpg'], ['Social Distancing Soup', 'https://olivesfordinner.com/2020/04/social-distancing-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/social-distancing-soup-cover-320x320.jpg'], ['The Best Buffalo Cauliflower | Crispy + Spicy + Air Fried!', 'https://olivesfordinner.com/2019/11/the-best-buffalo-cauliflower-crispy-spicy-air-fried.html', 'https://olivesfordinner.com/wp-content/uploads/2019/11/Olives-for-Dinner-The-Best-Buffalo-Cauliflower-cover-320x320.jpg'], ['Oyster Mushroom Tacos with Chipotle + Lime Sauce', 'https://olivesfordinner.com/2019/08/oyster-mushroom-tacos-with-chipotle-lime-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/08/Oyster-Mushroom-Tacos-cover-320x320.jpg'], ['Vegan Lox', 'https://olivesfordinner.com/2019/08/vegan-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2019/08/Carrot-Lox-cover-320x320.jpg'], ['Red Lentil Fritters with Mint-Garlic Yogurt Sauce', 'https://olivesfordinner.com/2019/07/red-lentil-fritters-with-mint-garlic-yogurt-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/07/Olives-for-Dinner-Red-Lentil-Fritters-cover-320x320.jpg'], ['Tofu Satay with Spicy Peanut Sauce', 'https://olivesfordinner.com/2019/07/tofu-satay-with-spicy-peanut-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/07/Grilled-Tofu-with-Peanut-Sauce-cover-320x320.jpg'], ['Vegan Reuben with Mandolined Portobello', 'https://olivesfordinner.com/2019/04/vegan-reuben-with-mandolined-portobello.html', 'https://olivesfordinner.com/wp-content/uploads/2019/04/Vegan-Reuben-cover-320x320.jpg'], ['Deep-fried Jackfruit with Garlicky-Dill Mayo', 'https://olivesfordinner.com/2019/04/deep-fried-jackfruit-with-garlicky-dill-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2019/04/Deep-fried-Jackfruit-cover-320x320.jpg'], ['Cauliflower Puree with Roasted Mushrooms and Caramelized Onions', 'https://olivesfordinner.com/2019/03/cauliflower-puree-with-roasted-mushrooms-and-caramelized-onions.html', 'https://olivesfordinner.com/wp-content/uploads/2019/03/Olives-for-Dinner-Cauliflower-Puree-with-Roasted-Mushrooms-and-Caramelized-Onions-320x320.jpg'], ['Bang Bang Tofu Taco Bowl', 'https://olivesfordinner.com/2019/02/bang-bang-tofu-taco-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2019/02/Bang-Bang-Tofu-Taco-Bowl-cover-320x320.jpg'], ['Pasta with Roasted Red Pepper Sauce and Caramelized Shallots', 'https://olivesfordinner.com/2012/08/pasta-with-roasted-red-pepper-sauce-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7762912426_494a4743ce_z-320x320.jpg'], ['Toasted Farro with Roasted Shiitake, Shallots and Pine Nuts', 'https://olivesfordinner.com/2018/11/toasted-farro-with-roasted-shiitake-shallots-and-pine-nuts.html', 'https://olivesfordinner.com/wp-content/uploads/2018/11/Toasted-Farro-with-Roasted-Shiitake-Shallots-and-Pine-Nuts-cover-320x320.jpg'], ['Sambal + Ginger Tofu with Air-Fried Bok Choy', 'https://olivesfordinner.com/2018/10/sambal-ginger-tofu-with-air-fried-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2018/10/Sambal-Ginger-Tofu-with-Air-Fried-Bok-Choy-_-cover-320x320.jpg'], ['Roasted Parsnip, Cauliflower + Garlic Soup', 'https://olivesfordinner.com/2018/10/roasted-parsnip-cauliflower-garlic-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2018/10/Roasted-Parsnip-Cauliflower-Garlic-Soup-cover-320x320.jpg'], ['Riced Cauliflower + Pressed Portos', 'https://olivesfordinner.com/2018/09/riced-cauliflower-pressed-portos.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Riced-Cauliflower-Pressed-Porto-cover-320x320.jpg'], ['Farro + Beet Burgers with Kimchi Mayo', 'https://olivesfordinner.com/2018/09/farro-beet-burgers-with-kimchi-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Farro-Beet-Burgers-13236-320x320.jpg'], ['Firecracker Cauliflower', 'https://olivesfordinner.com/2018/08/firecracker-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2018/08/Firecracker-Cauliflower-cover-320x320.jpg'], ['Vegan Egg Drop Soup', 'https://olivesfordinner.com/2018/08/vegan-egg-drop-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2018/08/Vegan-Eggdrop-cover-320x320.jpg'], ['Seitan Bourguignon', 'https://olivesfordinner.com/2018/01/seitan-bourguignon.html', 'https://olivesfordinner.com/wp-content/uploads/2018/01/Seitan-Bourginon-cover1-320x320.jpg'], ['Quick + Easy Focaccia Pizza', 'https://olivesfordinner.com/2018/01/quick-easy-focaccia-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2018/01/Focaccia-cover-320x320.jpg'], ['New England Vegan Chowder', 'https://olivesfordinner.com/2017/12/new-england-vegan-chowder.html', 'https://olivesfordinner.com/wp-content/uploads/2017/12/New-England-Vegan-Chowder-cover-320x320.jpg'], ['Coconut-Chickpea Crepes with Smoky Herbed Mushrooms', 'https://olivesfordinner.com/2013/02/coconut-chickpea-crepes-with-smoky.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8455834235_7bfba45015_z-320x320.jpg'], ['Vegan Scallops in a Bacon-Cashew Cream Sauce', 'https://olivesfordinner.com/2017/11/vegan-scallops-in-a-bacon-cashew-cream-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/King-Oyster-Mushroom-Scallops-in-a-Bacon-Cashew-Cream-Sauce-cover-320x320.jpg'], ['Grilled Bok Choy with Salty + Spicy Oyster Mushrooms', 'https://olivesfordinner.com/2017/10/grilled-bok-choy-with-salty-spicy-oyster-mushrooms.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/Grilled-Bok-Choy-with-Salty-Spicy-Oyster-Mushrooms-cover-320x320.jpg'], ['Air-Fried Buffalo Cauliflower Steaks', 'https://olivesfordinner.com/2017/10/air-fried-buffalo-cauliflower-steaks.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/Air-Fried-Buffalo-Cauliflower-Steaks-cover-320x320.jpg'], ['Pasta with Roasted Tomatoes + Chickpeas', 'https://olivesfordinner.com/2017/10/pasta-with-roasted-tomatoes-chickpeas.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Pasta-with-Roasted-Tomatoes-and-Chickpeas-cover-320x320.jpg'], ['Shroom Stroganoff', 'https://olivesfordinner.com/2017/09/shroom-stroganoff.html', 'https://olivesfordinner.com/wp-content/uploads/2017/08/Shroom-Stroganoff-cover-320x320.jpg'], ['Gobi Manchurian', 'https://olivesfordinner.com/2017/09/gobi-manchurian.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Gobi-Manchurian-cover1-320x320.jpg'], ['Silky Bell Pepper Sushi', 'https://olivesfordinner.com/2017/08/silky-bell-pepper-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2017/08/Bell-Pepper-Sushi-cover-320x320.jpg'], ['Sweet Potato Tempura and Avocado Rolls with Teriyaki Glaze', 'https://olivesfordinner.com/2017/05/sweet-potato-tempura-and-avocado-rolls-with-teriyaki-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2017/05/Sweet-Potato-Tempura-and-Avocado-Rolls-with-Teriyaki-Glaze-cover-320x320.jpg'], ['Vegan Lobster Roll', 'https://olivesfordinner.com/2017/05/vegan-lobster-roll.html', 'https://olivesfordinner.com/wp-content/uploads/2017/05/Vegan-Lobster-Roll-cover-320x320.jpg'], ['Vegan Shrimp', 'https://olivesfordinner.com/2017/05/vegan-shrimp.html', 'https://olivesfordinner.com/wp-content/uploads/2017/04/Vegan-Shrimp-cover-320x320.jpg'], ['Korean-Style Tacos, made with Sweet Earth Brand Traditional Seitan (+bonus news!)', 'https://olivesfordinner.com/2017/04/korean-style-tacos-made-with-sweet-earth-brand-traditional-seitan-bonus-news.html', 'https://olivesfordinner.com/wp-content/uploads/2017/04/Korean-Style-Tacos-cover-320x320.jpg'], ['Maitake + Oyster Mushroom Wontons in a Roasted Ginger Broth', 'https://olivesfordinner.com/2017/03/maitake-oyster-mushroom-wontons-in-a-roasted-ginger-broth.html', 'https://olivesfordinner.com/wp-content/uploads/2017/03/Maitake-Oyster-Mushroom-Wontons-cover-320x320.jpg'], ['Mock Eel with a Sticky+Sweet Ginger Sauce', 'https://olivesfordinner.com/2017/03/mock-eel-with-a-stickysweet-ginger-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Mock-Eel-cover-320x320.jpg'], ['Vegan Po Boy', 'https://olivesfordinner.com/2017/03/vegan-po-boy.html', 'https://olivesfordinner.com/wp-content/uploads/2017/03/Vegan-Po-Boy-cover-320x320.jpg'], ['Carrot Lox Stuffed + Fried Ravioli', 'https://olivesfordinner.com/2017/03/carrot-lox-stuffed-fried-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Carrot-Lox-Stuffed-Fried-Ravioli-cover-320x320.jpg'], ['Japanese-Style Breakfast Bowl', 'https://olivesfordinner.com/2017/02/japanese-style-breakfast-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Japanese-Style-Breakfast-Bowl-cover-320x320.jpg'], ['Buttermilk-Battered + Air-Fried Tofu', 'https://olivesfordinner.com/2017/02/buttermilk-battered-air-fried-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Buttermilk-Battered-and-Air-Fried-Tofu_cover-1-320x320.jpg'], ['Veggie Grill Copycat Recipe | Koreatown Tacos', 'https://olivesfordinner.com/2017/01/veggie-grill-copycat-recipe-koreatown-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Veggie-Grill-Copycat-Recipe-Koreatown-Tacos-320x320.png'], ['Roasted Balsamic Beets + Rutabaga', 'https://olivesfordinner.com/2017/01/roasted-balsamic-beets-rutabaga.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Roasted-Balsamic-Beets-and-Rutabaga-cover-320x320.jpg'], ['Vegan Ramen', 'https://olivesfordinner.com/2017/01/vegan-ramen.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Vegan-Ramen-cover-320x320.jpg'], ['Roasted Ginger and Coconut Soup', 'https://olivesfordinner.com/2016/11/roasted-ginger-and-coconut-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2016/11/Roasted-Ginger-320x320.jpg'], ['Farro Sausages', 'https://olivesfordinner.com/2016/11/farro-sausages.html', 'https://olivesfordinner.com/wp-content/uploads/2016/11/Farro-Sausage-320x320.jpg'], ['Savory + Crispy Vietnamese Crepes', 'https://olivesfordinner.com/2016/10/savory-crispy-vietnamese-crepes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Banh-Xeo-cover-320x320.jpg'], ['Savory + Crispy Vietnamese Crepes', 'https://olivesfordinner.com/2016/10/savory-crispy-vietnamese-crepes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Banh-Xeo-cover-320x320.jpg'], ['Buffalo Cauliflower Pizza', 'https://olivesfordinner.com/2016/10/buffalo-cauliflower-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Buffalo-Cauliflower-Pizza-320x320.jpg'], ['Seared King Oyster Mushrooms + a Homemade Teriyaki Glaze', 'https://olivesfordinner.com/2016/10/seared-king-oyster-mushrooms-a-homemade-teriyaki-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2016/09/Seared-King-Oyster-Mushrooms-320x320.jpg'], ['Crispy Kung Pao Brussels Sprouts', 'https://olivesfordinner.com/2016/09/crispy-kung-pao-brussels-sprouts.html', 'https://olivesfordinner.com/wp-content/uploads/2016/09/kung-pao-cover-320x320.jpg'], ['Pressed Maitake Buns with Gochujang-Hoisin Glaze', 'https://olivesfordinner.com/2016/09/pressed-maitake-buns-with-gochujang-hoisin-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/Pressed-Maitake-Buns-320x320.jpg'], ['Tofu Banh Mi', 'https://olivesfordinner.com/2016/08/tofu-banh-mi.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/tofu-banh-mi-cover-320x320.jpg'], ['Crispy Buffalo Cauliflower Salad', 'https://olivesfordinner.com/2016/08/crispy-buffalo-cauliflower-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/coverphoto-320x320.jpg'], ['Crispy + Spicy Enoki Mushroom Roll', 'https://olivesfordinner.com/2016/07/crispy-spicy-enoki-mushroom-roll.html', 'https://olivesfordinner.com/wp-content/uploads/2016/07/28327021802_8fcd205138_z-320x320.jpg'], ['Mâche and Mint Salad with Buttermilk-Ponzu Dressing', 'https://olivesfordinner.com/2016/06/mache-and-mint-salad-with-buttermilk-ponzu-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/mache-cover-320x320.jpg'], ['Crispy Vegan Shrimp Toast', 'https://olivesfordinner.com/2016/06/crispy-vegan-shrimp-toast.html', 'https://olivesfordinner.com/wp-content/uploads/2016/06/shrimptoastcover-320x320.jpg'], ['Savory + Seared Watermelon', 'https://olivesfordinner.com/2016/06/savoryseared-watermelon.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/watermelon-cover-320x320.jpg'], ['Cauliflower and Cashew Cream Soup', 'https://olivesfordinner.com/2013/12/cauliflower-and-cashew-cream-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A6617-320x320.jpg'], ['Bang Bang Cauliflower', 'https://olivesfordinner.com/2016/05/bang-bang-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/Bang-Bang-Cauliflower-320x320.jpg'], ['Crispy Hearts of Palm Tacos', 'https://olivesfordinner.com/2016/05/crispy-hearts-of-palm-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/tacocover-320x320.jpg'], ['Watermelon Tuna Poke Bowl', 'https://olivesfordinner.com/2016/04/watermelon-tuna-poke-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/26505343176_1dc831866c_z-320x320.jpg'], ['Spaghetti with Vegan Scallops, Toasted Walnuts and Plum Tomatoes', 'https://olivesfordinner.com/2016/04/spaghetti-with-scallops-toasted-walnuts.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/Spaghetti-with-vegan-scallops-toasted-walnuts-and-plum-tomatoes-320x320.jpg'], ['Pasta with Seaweed-Matcha Butter and Vegan Scallops', 'https://olivesfordinner.com/2016/04/pasta-with-seaweed-matcha-butter-and-vegan-scallops.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/Pasta-with-Seaweed-Matcha-Butter-320x320.jpg'], ['Creamy + Chunky Pasta Sauce', 'https://olivesfordinner.com/2016/04/creamy-chunky-pasta-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/pastamisecover-320x320.jpg'], ['Vegan Tom Kha Gai made with Fysh™ Sauce + an Interview with product founder Zach Grossman [closed]', 'https://olivesfordinner.com/2016/03/vegan-tom-kha-gai-made-with-fysh-sauce-an-interview-with-product-founder-zach-grossman.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/TGKcover-320x320.jpg'], ['Spicy Vegan Shrimp Cakes', 'https://olivesfordinner.com/2016/03/spicy-vegan-shrimp-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/Shrimp-Cake-Cover-320x320.jpg'], ['Spicy Carrot Lox and Avocado Sushi', 'https://olivesfordinner.com/2016/03/spicy-carrot-lox-and-avocado-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/Spicy-Carrot-Lox-Sushi-320x320.jpg'], ['Mongolian Soy Curls', 'https://olivesfordinner.com/2016/03/mongolian-soy-curls.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/MongolianSoyCurls-320x320.jpg'], ['Super-Versatile Crispy Tofu Cutlets', 'https://olivesfordinner.com/2016/01/super-versatile-crispy-tofu-cutlets.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/tofucutlets_0011-Copy-320x320.jpg'], ['Vegan Dynamite Rolls', 'https://olivesfordinner.com/2016/01/vegan-dynamite-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/vegandynamiterollscover_0044-320x320.jpg'], ['Split Pea Soup with Toasted Sesame Oil', 'https://olivesfordinner.com/2011/01/split-pea-soup-with-toasted-sesame-oi.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5352648559_44f01a7575-320x320.jpg'], ['Vegan Pork Buns with Oyster Mushroom Bacon', 'https://olivesfordinner.com/2015/11/vegan-pork-buns-with-oyster-mushroo.html', 'https://olivesfordinner.com/wp-content/uploads/2015/11/22575913837_9fea97e04f_o-320x320.jpg'], ['Spicy Thai-Style Pizza with Peanut Sauce', 'https://olivesfordinner.com/2015/11/spicy-thai-style-pizza-with-peanut-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2015/11/22663496301_2f58089eb9_c-320x320.jpg'], ['Vegan Sausage-Stuffed Homemade Ravioli', 'https://olivesfordinner.com/2015/10/vegan-sausage-stuffed-homemade-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2015/10/21948265815_36b2850a74_b-320x320.jpg'], ['Beet Tartare with Mango Yolk', 'https://olivesfordinner.com/2015/10/beet-tartare-with-mango-yolk.html', 'https://olivesfordinner.com/wp-content/uploads/2015/10/tartarecover2-320x320.jpg'], ['Pulled Jackfruit Sandwiches with Red Cabbage Slaw', 'https://olivesfordinner.com/2015/08/pulled-jackfruit-sandwiches-with-red.html', 'https://olivesfordinner.com/wp-content/uploads/2015/08/19558130643_8a4333c2bd_c-320x320.jpg'], ['The Abundance Diet | Review + Recipe!', 'https://olivesfordinner.com/2015/07/the-abundance-diet-review-recipe.html', 'https://olivesfordinner.com/wp-content/uploads/2015/07/19685670870_0e2090a8ce_c-320x320.jpg'], ['Easy Miso Soup', 'https://olivesfordinner.com/2015/04/easy-miso-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2015/04/easycover-320x320.jpg'], ['Fire Noodles with Crispy Tofu', 'https://olivesfordinner.com/2015/04/fire-noodles-with-crispy-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2015/04/FN-final2-320x320.jpg'], ['Carrot Lox', 'https://olivesfordinner.com/2015/03/carrot-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2015/03/16549307059_ba959fc04a_z-320x320.jpg'], ['Salt-Roasted Golden Beets with Teriyaki Sauce and Nori Dust', 'https://olivesfordinner.com/2015/02/salt-roasted-golden-beets-with-teriyaki.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/goldenbeetcover2-320x320.jpg'], ['Roasted Garlic and Sriracha Hummus', 'https://olivesfordinner.com/2011/09/roasted-garlic-and-sriracha-hummus.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6116553668_6f1645590d-320x320.jpg'], ['Beer-Battered Hearts of Palm with Dill Slaw and Quick Pickles', 'https://olivesfordinner.com/2015/02/beer-battered-hearts-of-palm-with-di.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/HOPcover-320x320.jpg'], ['Purple Cauliflower Crust Pizza with Garlic Oil', 'https://olivesfordinner.com/2015/02/purple-cauliflower-crust-pizza-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/16288187119_b3f3aea0fe_z-320x320.jpg'], ['Sambal and Peanut Butter Ramen Noodles with Tofu', 'https://olivesfordinner.com/2014/07/sambal-and-peanut-butter-ramen-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2014/07/14580181994_358e518082_z-320x320.jpg'], ['Faux Pork Wonton Soup with Bok Choy', 'https://olivesfordinner.com/2014/06/faux-pork-wonton-soup-with-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/fauxporkwithscript-320x320.jpg'], ['Roasted Thai Eggplant with Cherry Tomatoes and Basil', 'https://olivesfordinner.com/2014/06/roasted-thai-eggplant-with-cherry.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/12893805454_064acd1b54_z-320x320.jpg'], ['Caramelized Vegan Scallops in Pasta with a Minted Pea Puree', 'https://olivesfordinner.com/2014/05/caramelized-vegan-scallops-in-pasta.html', 'https://olivesfordinner.com/wp-content/uploads/2014/05/14220916831_b3cf856bd6_z-320x320.jpg'], ['General Tso’s Cauliflower', 'https://olivesfordinner.com/2014/05/general-tsos-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2014/05/14030895357_a5df788e37_z-320x320.jpg'], ['Focaccia Topped with Mâche and Fresh Tomatoes + Some News!', 'https://olivesfordinner.com/2014/04/focaccia-topped-with-mache-and-fres.html', 'https://olivesfordinner.com/wp-content/uploads/2014/04/AA4A4808-320x320.jpg'], ['Miso-Scented Portobello with Garlic Cauliflower Mash', 'https://olivesfordinner.com/2014/04/miso-scented-portobello-with-garlic.html', 'https://olivesfordinner.com/wp-content/uploads/2014/04/AA4A8890-320x320.jpg'], ['Springtime Green Tea Noodle Salad', 'https://olivesfordinner.com/2014/03/springtime-green-tea-noodle-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2014/03/13312619903_1c48fff9b1_c-320x320.jpg'], ['Sesame Crusted Fu with Soy-Agave Sauce', 'https://olivesfordinner.com/2014/03/sesame-crusted-fu-with-soy-agave-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2014/03/11393685196_1b23d255a4_c-320x320.jpg'], ['How to Towel Press Tofu for Marinating', 'https://olivesfordinner.com/2014/02/how-to-towel-press-tofu-for-marinating.html', 'https://olivesfordinner.com/wp-content/uploads/2014/02/12501525913_6544d0f2f6_c-320x320.jpg'], ['Peppery Tofu with Chinese 5-Spice Powder and Black Bean Sauce', 'https://olivesfordinner.com/2014/01/peppery-tofu-with-chinese-5-spice.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11609819593_f650d4e6b1_c-320x320.jpg'], ['Baked Oyster Mushrooms with Dynamite Sauce', 'https://olivesfordinner.com/2014/01/baked-oyster-mushrooms-with-dynamite.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11915675694_4308cf083b_c-320x320.jpg'], ['15-Minute Quick and Easy Tofu', 'https://olivesfordinner.com/2014/01/15-minute-quick-and-easy-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11918810306_4ae3a50c28-320x320.jpg'], ['Shiitake Risotto', 'https://olivesfordinner.com/2014/01/shiitake-risotto.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11816804405_82cb53837a-320x320.jpg'], ['Curry-Scented Soy Curls with Sesame-Cinnamon Dressing', 'https://olivesfordinner.com/2014/01/curry-scented-soy-curls-with-sesame.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11715133245_e7cf858e43-320x320.jpg'], ['Oyster Mushroom Wonton Soup with Wilted Kale', 'https://olivesfordinner.com/2013/12/oyster-mushroom-wonton-soup-with-wilted.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/11275548074_38b8089957_o-320x320.jpg'], ['Cauliflower and Cashew Cream Soup', 'https://olivesfordinner.com/2013/12/cauliflower-and-cashew-cream-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A6617-320x320.jpg'], ['Chickpea Salad Sandwich', 'https://olivesfordinner.com/2011/04/chickpea-salad-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5587319628_af0b053223-320x320.jpg'], ['Roasted Cauliflower Steaks with Oyster Mushroom Gravy', 'https://olivesfordinner.com/2013/11/roasted-cauliflower-steaks-with-oyster.html', 'https://olivesfordinner.com/wp-content/uploads/2013/11/10788586614_17f292d810_c-320x320.jpg'], ['Baked Pumpkin Ravioli with Rubbed Sage Cream', 'https://olivesfordinner.com/2013/11/baked-pumpkin-ravioli-with-rubbed-sage.html', 'https://olivesfordinner.com/wp-content/uploads/2013/11/10632300434_87bb849788_o-320x320.jpg'], ['Toasted Ravioli Stuffed with Cultured Cashew Cheese', 'https://olivesfordinner.com/2013/10/toasted-ravioli-stuffed-with-cultured.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/9886909214_1a4a7510ab_z-320x320.jpg'], ['Vegan Tuna Salad', 'https://olivesfordinner.com/2013/10/vegan-tuna-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10370170004_c010d3a743_o-320x320.jpg'], ['Quick and Easy Fragrant Coconut Soup', 'https://olivesfordinner.com/2013/10/quick-and-easy-fragrant-coconut-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10005529404_41114c01d5_z-320x320.jpg'], ['Panisse with Garlic-Ginger Sauce', 'https://olivesfordinner.com/2013/09/panisse-with-garlic-ginger-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9824246796_06a9bf4e84_z-320x320.jpg'], ['Shiitake Nigiri', 'https://olivesfordinner.com/2013/09/shiitake-nigiri.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/eelcropped1-320x320.jpg'], ['Hearts of Palm Crab Cakes', 'https://olivesfordinner.com/2013/09/hearts-of-palm-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9489200230_b25a587ca4_z-320x320.jpg'], ['Quick and Easy Carrot-Ginger Tofu', 'https://olivesfordinner.com/2013/09/quick-and-easy-carrot-ginger-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9704514050_96a7646c51_z-320x320.jpg'], ['Green Lentil, Pistachio and Walnut Fauxlafel', 'https://olivesfordinner.com/2013/09/green-lentil-pistachio-and-walnu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/8999832064_98efa1b8ea_z-320x320.jpg'], ['Vegan Scallops with Pea Puree and Watercress', 'https://olivesfordinner.com/2013/09/vegan-scallops-with-pea-puree-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9499486405_a84cbab602_z-320x320.jpg'], ['Easy Overnight Crockpot Oatmeal', 'https://olivesfordinner.com/2011/12/easy-overnight-crockpot-oatmea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6454925599_1a5f5946fa_z-320x320.jpg'], ['Jackfruit Soft Tacos', 'https://olivesfordinner.com/2013/08/jackfruit-soft-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9539382971_352c7d72a8_z-320x320.jpg'], ['Farro Crab Cakes', 'https://olivesfordinner.com/2013/08/farro-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9480782318_360b947023_z-320x320.jpg'], ['Tofu with General Tso’s Sauce', 'https://olivesfordinner.com/2013/07/tofu-with-general-tsos-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2013/07/9287074976_d03c30e8f2_z-320x320.jpg'], ['Sambal Seitan Skewers', 'https://olivesfordinner.com/2013/07/sambal-seitan-skewers.html', 'https://olivesfordinner.com/wp-content/uploads/2013/07/9223979993_d60294d84d_z-320x320.jpg'], ['Vegan Crab Rangoon', 'https://olivesfordinner.com/2013/06/vegan-crab-rangoon.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9175256982_e66acedacb_z-320x320.jpg'], ['Quick and Easy Tofu with Ramen Noodles', 'https://olivesfordinner.com/2013/06/quick-and-easy-tofu-with-ramen-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9063055880_7247de15fd_z-320x320.jpg'], ['Sweet Potato, Farro and Walnut Burgers with Homemade Pickles', 'https://olivesfordinner.com/2013/06/sweet-potato-farro-and-walnut-burgers.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9051266379_2945ce81ff_z-320x320.jpg'], ['Fresh Spring Rolls with Soy Curls, Mango and Mint', 'https://olivesfordinner.com/2013/06/fresh-spring-rolls-with-soy-curls-mango.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/AA4A3748-320x320.jpg'], ['Sesame Roasted Cauliflower with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2013/05/sesame-roasted-cauliflower-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8732183749_75abe9d9d4_z-320x320.jpg'], ['Red Quinoa and Sweet Potato Croquettes', 'https://olivesfordinner.com/2013/05/red-quinoa-and-sweet-potato-croquettes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8732186825_ab08084ec0_z-320x320.jpg'], ['Buffalo Chickpea Soft Tacos with Avocado Sour Cream', 'https://olivesfordinner.com/2013/05/buffalo-chickpea-soft-tacos-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8652263659_135227c089_z-320x320.jpg'], ['Sweet and Sticky Cashew Tofu', 'https://olivesfordinner.com/2013/04/sweet-and-sticky-cashew-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8647185538_0fe8eeb9d7_z-320x320.jpg'], ['Farro + Beet Burgers with Kimchi Mayo', 'https://olivesfordinner.com/2018/09/farro-beet-burgers-with-kimchi-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Farro-Beet-Burgers-13236-320x320.jpg'], ['Vegan Grilled Cheese with Shiitake Bacon and Tomato', 'https://olivesfordinner.com/2013/04/vegan-grilled-cheese-with-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8669439780_841b866c5e_z-320x320.jpg'], ['Glazed Tofu with Fiery Sriracha Pearls', 'https://olivesfordinner.com/2013/04/glazed-tofu-with-fiery-sriracha-pearls.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8646152741_dff8613fc7_z-320x320.jpg'], ['Four Vegan Grilled Cheese Sandwiches', 'https://olivesfordinner.com/2013/04/four-vegan-grilled-cheese-sandwiches.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/IMG_3382-320x320.jpg'], ['Mint, Basil and Cilantro Udon Noodle Bowl', 'https://olivesfordinner.com/2013/04/mint-basil-and-cilantro-udon-noodle-bow.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8614208893_90e8e16d04_z-320x320.jpg'], ['Pistachio-Crusted Tofu with Horseradish Cream', 'https://olivesfordinner.com/2013/03/pistachio-crusted-tofu-with-horseradis.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8542590853_d4b66ecea0_z-320x320.jpg'], ['Homemade Vegan Sausage and Cheese Calzones', 'https://olivesfordinner.com/2013/03/homemade-vegan-sausage-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8495059876_03242ab476_z-320x320.jpg'], ['Tofu and Shiitake Stack with Bok Choy-Ginger Puree', 'https://olivesfordinner.com/2013/03/tofu-and-shiitake-stack-with-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8525187049_08c1c0e9c4_z-320x320.jpg'], ['Kale, Pearled Barley and Faux Roe Stack with Sweet Soy Glaze', 'https://olivesfordinner.com/2013/02/kale-pearled-barley-and-faux-roe-stack.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8505006984_158bf7f337_z-320x320.jpg'], ['Maifun Noodles in a Toasted Sesame-Ginger Broth', 'https://olivesfordinner.com/2013/02/maifun-noodles-in-toasted-sesame-ginger.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8478465251_74975b32fc_z-320x320.jpg'], ['Socca Pizza Crust with Caramelized Shallots and Kale', 'https://olivesfordinner.com/2013/02/socca-pizza-crust-with-caramelized.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8446596405_1d37fa4f92_z-320x320.jpg'], ['Spicy Vegan Scallop Roll', 'https://olivesfordinner.com/2013/01/spicy-vegan-scallop-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8421544856_6542d6a4b3_z-320x320.jpg'], ['Vegan Reuben Sandwich', 'https://olivesfordinner.com/2013/01/vegan-reuben-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8395416871_c2e6155d3b_z-320x320.jpg'], ['Soft Pretzel Bites', 'https://olivesfordinner.com/2012/10/soft-pretzel-bites.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8076551157_4fd577e190_z-320x320.jpg'], ['Sriracha-Habanero Vegan Buffalo Wings', 'https://olivesfordinner.com/2013/01/sriracha-habanero-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8335188506_7e166e2f61_z-1-320x320.jpg'], ['Easy Sesame Glazed Tofu', 'https://olivesfordinner.com/2013/01/easy-sesame-glazed-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8326773028_c169becd58_z-320x320.jpg'], ['Sweet and Sour Crispy Mock Chicken Stir Fry', 'https://olivesfordinner.com/2012/12/sweet-and-sour-crispy-mock-chicken-stir.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8279367354_b0f80b3b2d_z-320x320.jpg'], ['Vegan Palak Paneer', 'https://olivesfordinner.com/2012/12/vegan-palak-paneer.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8258313683_3b2437bbd3_z-320x320.jpg'], ['Black Rice Noodles with Fresh Kale, Mint and Basil', 'https://olivesfordinner.com/2012/12/black-rice-noodles-with-fresh-kale-min.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8255864336_008310cdd6_z-320x320.jpg'], ['Vegan French Onion Soup Sandwich', 'https://olivesfordinner.com/2012/12/vegan-french-onion-soup-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8235083245_0d2d4cb46a_z-320x320.jpg'], ['Vegan Pot Pie with Portobella, Green Garbanzo and Shallots', 'https://olivesfordinner.com/2012/11/vegan-pot-pie-with-portobella-green.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8151769754_a15689bd03_z-320x320.jpg'], ['Sage Stuffing and Shiitake-Shallot Gravy', 'https://olivesfordinner.com/2012/11/sage-stuffing-and-shiitake-shallot-gravy.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8196192203_461d3a4b8e_z-320x320.jpg'], ['Shallot and Shiitake Seitan Wellington', 'https://olivesfordinner.com/2012/11/shallot-and-shiitake-seitan-wellington.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8176377771_bebfa1ab0d_z-320x320.jpg'], ['Protein-Happy Quinoa Wraps from Vegan Sandwiches Save the Day!', 'https://olivesfordinner.com/2012/11/protein-happy-quinoa-wraps-from-vegan.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8155643856_4a48f6617a_z-320x320.jpg'], ['Tofu Tikka Masala', 'https://olivesfordinner.com/2012/10/tofu-tikka-masala.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8110173470_6644257513_z-320x320.jpg'], ['Vegan Scallops in a White Wine Cream Sauce over Pasta', 'https://olivesfordinner.com/2012/10/vegan-scallops-in-wine-wine-cream-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8106318112_c63658492a_z-320x320.jpg'], ['PPK Chocolate Chip Cookies', 'https://olivesfordinner.com/2011/04/ppk-chocolate-chip-cookies.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5639003638_504f52bbf9-320x320.jpg'], ['White Bean and Mushroom Soup with Dill-Pepper Dumplings', 'https://olivesfordinner.com/2012/10/white-bean-and-mushroom-soup-with-di.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8087081755_464006e571_z-320x320.jpg'], ['Roasted Red Pepper Mac and Cheese', 'https://olivesfordinner.com/2012/10/roasted-red-pepper-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8060234137_cfb80515f8_z-320x320.jpg'], ['Sesame-Ginger Soba Noodles', 'https://olivesfordinner.com/2012/10/sesame-ginger-soba-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8064732272_38b0391bfa_z-320x320.jpg'], ['Homemade Thai Sweet Chili Sauce with Fried Tofu', 'https://olivesfordinner.com/2012/10/homemade-thai-sweet-chili-sauce-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8017588970_dd4a45740c-320x320.jpg'], ['Vegan Bacon-Wrapped Scallops with Paprika Cream Sauce', 'https://olivesfordinner.com/2012/10/vegan-bacon-wrapped-scallops-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8044749163_c4e55e1c58_z-320x320.jpg'], ['Raw Tahini and Cashew Dressing', 'https://olivesfordinner.com/2012/09/raw-tahini-and-cashew-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/IMG_1464-320x320.jpg'], ['Vegan Grilled Cheese with Smoky Tomato Soup', 'https://olivesfordinner.com/2012/09/vegan-grilled-cheese-with-smoky-tomato.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7993534714_73858b07d3-320x320.jpg'], ['Ratatouille (Confit Byaldi)', 'https://olivesfordinner.com/2012/09/ratatouille-confit-byaldi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/12193750456_691804522f_c-320x320.jpg'], ['Cheese-Stuffed Homemade Ravioli with White Wine Sauce', 'https://olivesfordinner.com/2012/09/cheese-stuffed-homemade-ravioli-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7906875742_ab4cd8a52a-320x320.jpg'], ['Kale, Tofu and Cashew Stir-Fry with Cold Soba Noodles', 'https://olivesfordinner.com/2012/08/kale-tofu-and-cashew-stir-fry-with-cold.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7883972382_e7373658bb-320x320.jpg'], ['Za’atar Tofu', 'https://olivesfordinner.com/2012/08/zaatar-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7866869068_5cef523a33_z-320x320.jpg'], ['Savory Corn Pancakes with IKEA Vegan Caviar', 'https://olivesfordinner.com/2012/08/savory-corn-pancakes-with-ikea-vegan.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7808905436_5fb2b5f1a8_z-320x320.jpg'], ['Savory + Seared Watermelon', 'https://olivesfordinner.com/2016/06/savoryseared-watermelon.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/watermelon-cover-320x320.jpg'], ['Grilled Sambal Oelek Tofu with Peanut Butter Sauce', 'https://olivesfordinner.com/2012/08/grilled-sambal-oelek-tofu-with-peanu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7768265260_78510a0aea_z-320x320.jpg'], ['Pasta with Roasted Red Pepper Sauce and Caramelized Shallots', 'https://olivesfordinner.com/2012/08/pasta-with-roasted-red-pepper-sauce-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7762912426_494a4743ce_z-320x320.jpg'], ['Deep-Fried Vegan Mac and Cheese', 'https://olivesfordinner.com/2012/07/deep-fried-vegan-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7665545816_98fb6efe94-320x320.jpg'], ['Minted Red Quinoa, Fava Bean and Cashew Salad', 'https://olivesfordinner.com/2012/07/minted-red-quinoa-fava-bean-and-cashew.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7622670174_b39801759b-320x320.jpg'], ['Gingerade Kombucha Caviar', 'https://olivesfordinner.com/2012/06/gingerade-kombucha-caviar.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7426883562_bff8f197b4-320x320.jpg'], ['Oyster Mushroom Wonton and Lemongrass Soup', 'https://olivesfordinner.com/2012/06/oyster-mushroom-wonton-and-lemongrass.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7382263230_d73039a21b-320x320.jpg'], ['Black Lentil, Pistachio and Shiitake Mushroom Burger', 'https://olivesfordinner.com/2012/05/black-lentil-pistachio-and-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7289963524_945a2a56b9-320x320.jpg'], ['Black Pepper and Thyme Crusted Tofu', 'https://olivesfordinner.com/2012/05/black-pepper-and-thyme-crusted-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7237306376_63d9934023-320x320.jpg'], ['Spicy Jackfruit Salad', 'https://olivesfordinner.com/2012/05/spicy-jackfruit-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7183288902_62c2826f8d-320x320.jpg'], ['Vegan Sushi: Faux-Roe Gunkanmaki with Pickled Daikon', 'https://olivesfordinner.com/2012/04/vegan-sushi-faux-roe-gunkanmaki-with_16.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6936157558_aaf4a19acb-320x320.jpg'], ['Steamed Tofu with Spicy Black Bean Sauce', 'https://olivesfordinner.com/2012/04/steamed-tofu-with-spicy-black-bean.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/7051589731_bbdc77e088-320x320.jpg'], ['Fresh Vegan Mozzarella Pizza', 'https://olivesfordinner.com/2012/03/fresh-vegan-mozzarella-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/7012328143_2537723fed_z-320x320.jpg'], ['Peppery Tofu with Chinese 5-Spice Powder and Black Bean Sauce', 'https://olivesfordinner.com/2014/01/peppery-tofu-with-chinese-5-spice.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11609819593_f650d4e6b1_c-320x320.jpg'], ['Roasted Eggplant and Hummus Sandwich', 'https://olivesfordinner.com/2012/03/roasted-egglant-and-hummus-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6847432936_61d6d916a1_z-320x320.jpg'], ['Garlic-Sriracha Vegan Buffalo Wings', 'https://olivesfordinner.com/2012/03/garlic-sriracha-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6973941693_4db60198cb_z-320x320.jpg'], ['Molecular Vegan: Scallops with Carrot-Ginger Caviar', 'https://olivesfordinner.com/2012/02/molecular-vegan-scallops-with-carro.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6786920836_eb7688d3fd_z-320x320.jpg'], ['Galangal Tofu with Lemongrass-Scented Broth', 'https://olivesfordinner.com/2012/02/galangal-tofu-with-lemongrass-scented.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6866617493_299419355a_z-320x320.jpg'], ['Vegan Macaroni and Cheese', 'https://olivesfordinner.com/2012/02/vegan-macaroni-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6864169847_3f2925df44_z-320x320.jpg'], ['Portabello and Shallot Ravioli with Toasted Walnuts', 'https://olivesfordinner.com/2012/01/portabello-and-shallot-ravioli-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6784743515_db35f73545_z-320x320.jpg'], ['Ginger-Garlic Tofu with Fiery Chili Oil', 'https://olivesfordinner.com/2012/01/ginger-garlic-tofu-with-fiery-chili-oi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6745971851_5d4457be90_z-320x320.jpg'], ['Spicy Shiitake Mushroom Roll', 'https://olivesfordinner.com/2011/12/spicy-shiitake-mushroom-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6489807867_ce5d312d01_z-320x320.jpg'], ['Black Bean and Corn Quesadillas with Smoked Paprika', 'https://olivesfordinner.com/2011/12/black-bean-and-corn-quesadillas-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6405288727_73a635e8d3_z-320x320.jpg'], ['Spicy Pumpkin Soup with Lemongrass and Coconut Milk', 'https://olivesfordinner.com/2011/11/spicy-pumpkin-soup-with-lemongrass-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6370959129_cc4e826b6e_z-320x320.jpg'], ['Seitan Roulade with Sage and Sweet Onion Stuffing', 'https://olivesfordinner.com/2011/11/seitan-roulade-with-sage-and-swee.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6338385248_d552579dd6_z-320x320.jpg'], ['Grilled Tofu with Lemongrass and Cilantro Stuffing', 'https://olivesfordinner.com/2011/10/grilled-tofu-with-lemongrass-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6251395078_631e498749-320x320.jpg'], ['Sriracha-Habanero Vegan Buffalo Wings', 'https://olivesfordinner.com/2013/01/sriracha-habanero-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8335188506_7e166e2f61_z-1-320x320.jpg'], ['Faux Fish and Real Chips', 'https://olivesfordinner.com/2011/10/faux-fish-and-real-chips.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6205471514_3aebc3ce03-320x320.jpg'], ['Sriracha and Peanut Butter Tofu', 'https://olivesfordinner.com/2011/09/sriracha-and-peanut-butter-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6177319593_1790a4c72a-320x320.jpg'], ['Spicy Fava Bean Falafel', 'https://olivesfordinner.com/2011/09/spicy-fava-bean-falafe.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6116560812_4e068eeb74-320x320.jpg'], ['Lemongrass and Garlic Stuffed Tofu', 'https://olivesfordinner.com/2011/08/lemongrass-and-garlic-stuffed-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/08/5995896848_e75fd561f1-320x320.jpg'], ['Minted Pea Soup with Cashew Cream and Basil Oil', 'https://olivesfordinner.com/2011/07/minted-pea-soup-with-cashew-cream-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/07/5901979517_8694056f1a_z-320x320.jpg'], ['Vegan Crab Cakes with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/06/vegan-crab-cakes-with-sriracha.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5874981554_d77f709a58_z-320x320.jpg'], ['Simple Sauteed Tofu with Crispy Sesame Kale', 'https://olivesfordinner.com/2011/05/simple-sauteed-tofu-with-crispy-sesame.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5773981218_9c803c3c7e_z-320x320.jpg'], ['Vegan French Onion Soup', 'https://olivesfordinner.com/2011/05/vegan-french-onion-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5750417424_685a60cc8c_z-320x320.jpg'], ['Lemongrass Tofu with Satay Sauce', 'https://olivesfordinner.com/2011/05/lemongrass-tofu-with-satay-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5772886868_33b2b5f461_z-320x320.jpg'], ['Tempeh Reuben with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/04/tempeh-reuben-with-sriracha-vegenaise.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5631338611_eb6d6dc5d0-320x320.jpg'], ['Vegan Clam Chowder', 'https://olivesfordinner.com/2011/04/vegan-clam-chowder.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5610900417_d191a862c1-320x320.jpg'], ['Spinach, Mushroom and Soysage Tart', 'https://olivesfordinner.com/2011/04/spinach-mushroom-and-soysage-tar.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5586726243_1c4cf386f6-320x320.jpg'], ['Raw Kale with Tofu Croutons and Pomegranate-Sesame Dressing', 'https://olivesfordinner.com/2011/03/raw-kale-with-tofu-croutons-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/8196256924_5a9c7eeaf4_z-320x320.jpg'], ['Chickpea Salad Sandwich', 'https://olivesfordinner.com/2011/04/chickpea-salad-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5587319628_af0b053223-320x320.jpg'], ['Raw Kale with Tofu Croutons and Pomegranate-Sesame Dressing', 'https://olivesfordinner.com/2011/03/raw-kale-with-tofu-croutons-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/8196256924_5a9c7eeaf4_z-320x320.jpg'], ['Minted Quinoa Spring Rolls with Toasted Cashews and Tahini', 'https://olivesfordinner.com/2011/03/minted-quinoa-spring-rolls-with-toasted.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/5507119039_254cb9c4dc-320x320.jpg'], ['Roasted Garlic Soup with Carmelized Shallots and Sage Croutons', 'https://olivesfordinner.com/2011/02/roasted-garlic-soup-with-carmelized.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5480624870_19cda005b7-320x320.jpg'], ['Rosemary Soysage, Spinach and Mushroom with Farfalle', 'https://olivesfordinner.com/2011/02/rosemary-soysage-spinach-and-mushroo.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5462958033_6aae203183-320x320.jpg'], ['Vegan Tom Kha Gai', 'https://olivesfordinner.com/2011/02/vegan-tom-kha-gai.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5439733635_127c9ba730-320x320.jpg'], ['Field Roast, Oyster Mushrooms and Tiny Potatoes Over Polenta', 'https://olivesfordinner.com/2011/02/field-roast-oyster-mushrooms-and-tiny.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5403091742_e1b485701c_z-320x320.jpg'], ['Balsamic-Agave Tofu with Beet Infused Couscous', 'https://olivesfordinner.com/2011/01/balsamic-agave-tofu-with-beet-infused.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5399578900_9e450f412b-320x320.jpg'], ['Vcon Chickpea Noodle Soup', 'https://olivesfordinner.com/2011/01/vcon-chickpea-noodle-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5383052832_f75d36fd4f-320x320.jpg'], ['Vcon Chickpea Cutlets with Mustard Roasted Potatoes', 'https://olivesfordinner.com/2011/01/vcon-chickpea-cutlets-with-mustard.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5380883994_541579e3b8-320x320.jpg'], ['Falafel with Vegenaise Tahini', 'https://olivesfordinner.com/2011/01/falafe.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5371628748_227f845e97-320x320.jpg'], ['General Tso’s Tofu', 'https://olivesfordinner.com/2011/01/general-tsos-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5362522702_3323f1f486-320x320.jpg'], ['Buttermilk-Battered + Air-Fried Tofu', 'https://olivesfordinner.com/2017/02/buttermilk-battered-air-fried-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Buttermilk-Battered-and-Air-Fried-Tofu_cover-1-320x320.jpg'], ['Pasta with a Garlic, Butter and White Wine Sauce', 'https://olivesfordinner.com/2021/02/pasta-with-a-garlic-butter-and-white-wine-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2021/02/pasta-in-garlic-butter-white-wine-sauce-320x320.jpg'], ['Viral Tortilla Wrap Hack!', 'https://olivesfordinner.com/2021/01/viral-tortilla-wrap-hack.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/viral-wrap-hack-cover-320x320.jpg'], ['Vegan Mapo Tofu (Braised Tofu in a Spicy+Savory Sauce)', 'https://olivesfordinner.com/2021/01/vegan-mapo-tofu-braised-tofu-in-a-spicysavory-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/Vegan-Mapo-Tofu-cover-320x320.jpg'], ['Thai-Style Coconut + Crabless Soup', 'https://olivesfordinner.com/2021/01/thai-style-coconut-crabless-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/Thai-Style-Coconut-Crabless-Soup-cover-320x320.jpg'], ['French Onion and Seitan Ramen', 'https://olivesfordinner.com/2021/01/french-onion-and-seitan-ramen.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/vegan-ramen-cover-shot-320x320.jpg'], ['Lobster Mushroom Tempura, Avocado and Kimchi+Mayo Sushi Rolls', 'https://olivesfordinner.com/2020/12/lobster-mushroom-tempura-avocado-and-kimchimayo-sushi-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2020/12/Lobster-Mushroom-Tempura-Sushi-Rolls-cover-dec-320x320.jpg'], ['Easy Focaccia', 'https://olivesfordinner.com/2020/12/easy-focaccia.html', 'https://olivesfordinner.com/wp-content/uploads/2020/12/focaccia-cover-1-320x320.jpg'], ['How to Make Homemade Ravioli', 'https://olivesfordinner.com/2020/11/how-to-make-homemade-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2020/11/How-to-Make-Homemade-Ravioli-320x320.jpg'], ['Bang Bang Cauliflower Tacos', 'https://olivesfordinner.com/2020/09/bang-bang-cauliflower-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2020/08/Bang-Bang-Taco-cover-320x320.jpg'], ['Sweet Potato, Barley + Bean Burgers with Sloppy Sauce', 'https://olivesfordinner.com/2020/07/sweet-potato-barley-bean-burgers-with-sloppy-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2020/07/Sweet-Potato-Barley-Bean-Burgers-with-sloppy-Sauce-cover-320x320.jpg'], ['Crispy Filet No Fish', 'https://olivesfordinner.com/2020/06/crispy-filet-no-fish.html', 'https://olivesfordinner.com/wp-content/uploads/2020/06/Filet-No-Fish-cover-2-320x320.jpg'], ['Sweet Potato and Kidney Bean Smothered Burritos', 'https://olivesfordinner.com/2020/05/sweet-potato-and-kidney-bean-smothered-burritos.html', 'https://olivesfordinner.com/wp-content/uploads/2020/05/Smothered-Burrito-cover-320x320.jpg'], ['Spicy Garlic Tofu, https://www.pickuplimes.com/recipe/spicy-garlic-tofu-3, https://cdn.pickuplimes.com/cache/9c/9f/9c9f13ad4a9ef78bd0f2a9eae20547e3.jpg'], ['Buffalo Style Tofu Strips, https://www.pickuplimes.com/recipe/buffalo-style-tofu-strips-4, https://cdn.pickuplimes.com/cache/a1/30/a1307710aaf6c5babcb4a306339fa499.jpg'], ['Oven Roasted Sweet Potato & Tofu Veggie Skewers, https://www.pickuplimes.com/recipe/oven-roasted-sweet-potato-tofu-veggie-skewers-1, https://cdn.pickuplimes.com/cache/64/e7/64e73b49fe995bffa331e62f55c939bc.jpg'], ['Homemade Vegan Kimchi, https://www.pickuplimes.com/recipe/homemade-vegan-kimchi-9, https://cdn.pickuplimes.com/cache/cf/62/cf621ccd83ef84a81356d3c8860a6e81.jpg'], ['Herb & Garlic Homemade Vegan Naan, https://www.pickuplimes.com/recipe/herb-garlic-homemade-vegan-naan-64, https://cdn.pickuplimes.com/cache/ab/b4/abb4898bbd62c92714b560dad2dff577.jpg'], ['Baked Zucchini Fries with Two Dips, https://www.pickuplimes.com/recipe/baked-zucchini-fries-with-two-dips-70, https://cdn.pickuplimes.com/cache/64/1c/641cf5625fcaa58d861cbe7fffc868c3.jpg'], ['Sweet & Spicy Miso Roasted Sweet Potatoes with Sambal Tahini Mayo, https://www.pickuplimes.com/recipe/sweet-spicy-miso-roasted-sweet-potatoes-with-sambal-tahini-mayo-68, https://cdn.pickuplimes.com/cache/0a/1e/0a1e1b756c5256e0abb6675e90539e5d.jpg'], ['Seaweed Baked Fries with Hoisin Mayo & Sriracha Mayo, https://www.pickuplimes.com/recipe/seaweed-baked-fries-with-hoisin-mayo-sriracha-mayo-66, https://cdn.pickuplimes.com/cache/15/f0/15f001c3b4c39c530786256eb3126e9d.jpg'], ['Loaded Chili Sweet Potato Fries, https://www.pickuplimes.com/recipe/loaded-chili-sweet-potato-fries-65, https://cdn.pickuplimes.com/cache/92/b3/92b3936f77ff25b46e427245e15ecf04.jpg'], ['Crispy Oven-Roasted Sweet Potato Fries, https://www.pickuplimes.com/recipe/crispy-oven-roasted-sweet-potato-fries-78, https://cdn.pickuplimes.com/cache/13/0c/130c1e0c73485016f13837568eb33eaf.jpg'], ['Ranch Roasted Potato Wedges, https://www.pickuplimes.com/recipe/ranch-roasted-potato-wedges-142, https://cdn.pickuplimes.com/cache/82/9c/829c7334b8631856007d1f05d658129e.jpg'], ['Roasted Potatoes, https://www.pickuplimes.com/recipe/roasted-potatoes-144, https://cdn.pickuplimes.com/cache/ed/2c/ed2c6b4e244c276620f85f326eb6433a.jpg'], ['Sushi Rice, https://www.pickuplimes.com/recipe/sushi-rice-168, https://cdn.pickuplimes.com/cache/68/8b/688badd22d3b4ac1a057b9512e266a15.jpg'], ['Zucchini Rolls with Vegan Ricotta Cheese, https://www.pickuplimes.com/recipe/zucchini-rolls-with-vegan-ricotta-cheese-181, https://cdn.pickuplimes.com/cache/17/8a/178a338a01abec07f8107eb0a5d8f11b.jpg'], ['Vegan Tofu Chorizo, https://www.pickuplimes.com/recipe/vegan-tofu-chorizo-205, https://cdn.pickuplimes.com/cache/e7/c2/e7c2a733c0d8aaa269500ec71c3af072.jpg'], ['Protein-Packed Lentil & Quinoa Salad, https://www.pickuplimes.com/recipe/protein-packed-lentil-quinoa-salad-208, https://cdn.pickuplimes.com/cache/15/04/1504d4ea2b07ba79258a82cc030ddbc3.jpg'], ['Quinoa & Red Lentil Stuffed Sweet Potatoes, https://www.pickuplimes.com/recipe/quinoa-red-lentil-stuffed-sweet-potatoes-243, https://cdn.pickuplimes.com/cache/aa/c4/aac41bf2fb65d098acd6c69289030af6.jpg'], ['Brown Rice Miso Salad, https://www.pickuplimes.com/recipe/brown-rice-miso-salad-218, https://cdn.pickuplimes.com/cache/53/37/53377f3955ff8accf7363016ae77b34d.jpg'], ['Baked Dill Falafels, https://www.pickuplimes.com/recipe/baked-dill-falafels-225, https://cdn.pickuplimes.com/cache/6f/b0/6fb0b1590a306754de5c341f60e5f3dd.jpg'], ['Dill Pasta Salad with a Tahini Dressing, https://www.pickuplimes.com/recipe/dill-pasta-salad-with-a-tahini-dressing-267, https://cdn.pickuplimes.com/cache/17/fb/17fbfa1b010ba0135be4c8cff6188358.jpg'], ['Creamy Coleslaw with Tahini Dijon Dressing, https://www.pickuplimes.com/recipe/creamy-coleslaw-with-tahini-dijon-dressing-280, https://cdn.pickuplimes.com/cache/8c/99/8c998c4727043752bd1a1ba298b5000b.jpg'], ['Beetroot & Red Cabbage Sauerkraut, https://www.pickuplimes.com/recipe/beetroot-red-cabbage-sauerkraut-281, https://cdn.pickuplimes.com/cache/d2/30/d2306b343c15e9def90ab66418ef4116.jpg'], ['Warm Lentil & Potato Salad, https://www.pickuplimes.com/recipe/warm-lentil-potato-salad-300, https://cdn.pickuplimes.com/cache/ed/8c/ed8ce962058ba56ecd6e3c8616465e7c.jpg'], ['Potato Crisps with Creamy Dill Dip, https://www.pickuplimes.com/recipe/potato-crisps-with-creamy-dill-dip-302, https://cdn.pickuplimes.com/cache/e0/e9/e0e9fc98f352d10d017ef791175ed29a.jpg'], ['Coconut Thai Green Curry Noodle Soup, https://www.pickuplimes.com/recipe/coconut-thai-green-curry-noodle-soup-377, https://cdn.pickuplimes.com/cache/53/e8/53e89e4f540d2b815e2c661693ee3085.jpg'], ['One-Pot Golden Red Lentil and Mushroom Curry Soup, https://www.pickuplimes.com/recipe/one-pot-golden-red-lentil-and-mushroom-curry-soup-375, https://cdn.pickuplimes.com/cache/23/38/2338263f32972e4df27253106a3c0e50.jpg'], ['Peanut Butter Curry Soup, https://www.pickuplimes.com/recipe/peanut-butter-curry-soup-97, https://cdn.pickuplimes.com/cache/d7/89/d789fef1057ff95baf9973dc21f551db.jpg'], ['Crispy Croutons, https://www.pickuplimes.com/recipe/crispy-croutons-96, https://cdn.pickuplimes.com/cache/55/d0/55d0c851ede5e28e2fd461c0015cbae3.jpg'], ['Roasted Vegetable Harvest Soup, https://www.pickuplimes.com/recipe/roasted-vegetable-harvest-soup-95, https://cdn.pickuplimes.com/cache/7e/e0/7ee0e714e3f739b264d2861d99438059.jpg'], ['One-Pot Veggie Lentil Soup, https://www.pickuplimes.com/recipe/one-pot-veggie-lentil-soup-94, https://cdn.pickuplimes.com/cache/f5/82/f582268ae361a5b375c35cdf084fbe9f.jpg'], ['Borscht Soup, https://www.pickuplimes.com/recipe/borscht-soup-93, https://cdn.pickuplimes.com/cache/aa/c4/aac446ea0f7ee2f512b2c7e851591993.jpg'], ['Green Pea Soup, https://www.pickuplimes.com/recipe/green-pea-soup-92, https://cdn.pickuplimes.com/cache/ee/9d/ee9dc2a116fa7f9b5a4f774239d09d20.jpg'], ['White Asparagus & Potato Soup, https://www.pickuplimes.com/recipe/white-asparagus-potato-soup-149, https://cdn.pickuplimes.com/cache/83/f0/83f008c20089b9454f96850e0b3d39d3.jpg'], ['To-Go Miso Noodle Soup, https://www.pickuplimes.com/recipe/to-go-miso-noodle-soup-171, https://cdn.pickuplimes.com/cache/13/d4/13d4d28182a330de98d8671bcb3ca12d.jpg'], ['Thai Red Curry Noodle Soup with Crispy Tofu, https://www.pickuplimes.com/recipe/thai-red-curry-noodle-soup-with-crispy-tofu-175, https://cdn.pickuplimes.com/cache/83/8a/838a9bb7db57ae234505816bc06db22f.jpg'], ['One-Pot Lasagna Soup, https://www.pickuplimes.com/recipe/one-pot-lasagna-soup-184, https://cdn.pickuplimes.com/cache/e1/1d/e11da20477208c0f830f6d35944c704a.jpg'], ['Creamy Coconut Paprika Zucchini Soup, https://www.pickuplimes.com/recipe/creamy-coconut-paprika-zucchini-soup-191, https://cdn.pickuplimes.com/cache/83/03/83032368b6546a850bcbe5bf47b1a2c6.jpg'], ['Curry Lentil Soup, https://www.pickuplimes.com/recipe/curry-lentil-soup-241, https://cdn.pickuplimes.com/cache/6b/d9/6bd9891c6fa47cc12c9d87b3272c8cc9.jpg'], ['Vegan Pho - Vietnamese Noodle Soup, https://www.pickuplimes.com/recipe/vegan-pho-vietnamese-noodle-soup-258, https://cdn.pickuplimes.com/cache/90/3c/903c1ef58713599a7a3ed6c14c256e69.jpg'], ['Creamy Beetroot & Sweet Potato Soup, https://www.pickuplimes.com/recipe/creamy-beetroot-sweet-potato-soup-260, https://cdn.pickuplimes.com/cache/1b/d5/1bd565ec88ba59ec5c7ad74c7cbe4f7e.jpg'], ['Curried Carrot & Potato Soup, https://www.pickuplimes.com/recipe/curried-carrot-potato-soup-266, https://cdn.pickuplimes.com/cache/fc/b0/fcb0666a4071a8f90b9b2334b81c8789.jpg'], ['Coconut Curry Roasted Squash Soup, https://www.pickuplimes.com/recipe/coconut-curry-roasted-squash-soup-271, https://cdn.pickuplimes.com/cache/d0/7d/d07d61c7085554ca3724e6fe54d041c1.jpg'], ['Cream of Mushroom & Potato Soup, https://www.pickuplimes.com/recipe/cream-of-mushroom-potato-soup-283, https://cdn.pickuplimes.com/cache/e5/54/e554acb3a706b014f6ecc582193621c2.jpg'], ['Sweet Potato & Zucchini Soup, https://www.pickuplimes.com/recipe/sweet-potato-zucchini-soup-285, https://cdn.pickuplimes.com/cache/69/63/6963b656badb6a1fe77efbe778895963.jpg'], ['Turkish Red Lentil Soup, https://www.pickuplimes.com/recipe/turkish-red-lentil-soup-26, https://cdn.pickuplimes.com/cache/09/f9/09f97c231822621adeb89792c99e094e.jpg'], ['Easy Bulgur Salad in a Jar, https://www.pickuplimes.com/recipe/easy-bulgur-salad-in-a-jar-10, https://cdn.pickuplimes.com/cache/8f/dc/8fdcd4ee953efb0468319a0c7ead9e2a.jpg'], ['Spiced Black Bean & Rice Salad in a Jar, https://www.pickuplimes.com/recipe/spiced-black-bean-rice-salad-in-a-jar-11, https://cdn.pickuplimes.com/cache/fd/60/fd606e0af7c1aff85d27e893ec9637f7.jpg'], ['Quinoa Salad with Roasted Garlic Dressing, https://www.pickuplimes.com/recipe/quinoa-salad-with-roasted-garlic-dressing-129, https://cdn.pickuplimes.com/cache/ee/95/ee95277ccaf3ee618ae427dd1ac84eea.jpg'], ['Bold & Filling Lentil Taco Salad, https://www.pickuplimes.com/recipe/bold-filling-lentil-taco-salad-135, https://cdn.pickuplimes.com/cache/d6/72/d672bf6f158d745737f4f81bca1785c3.jpg'], ['Chorizo Taco Salad Bowl, https://www.pickuplimes.com/recipe/chorizo-taco-salad-bowl-204, https://cdn.pickuplimes.com/cache/72/27/72278a91ccb69cd17084d4882eccc8bb.jpg'], ['Protein-Packed Lentil & Quinoa Salad, https://www.pickuplimes.com/recipe/protein-packed-lentil-quinoa-salad-208, https://cdn.pickuplimes.com/cache/15/04/1504d4ea2b07ba79258a82cc030ddbc3.jpg'], ['Massaged Umami Kale Salad, https://www.pickuplimes.com/recipe/massaged-umami-kale-salad-237, https://cdn.pickuplimes.com/cache/dc/2d/dc2dee08b92225470022f90ffea682fd.jpg'], ['Sweet Lime Summer Fruit Salad, https://www.pickuplimes.com/recipe/sweet-lime-summer-fruit-salad-216, https://cdn.pickuplimes.com/cache/55/4f/554fcf65fb4a9fd85aecf1bce6540d65.jpg'], ['Brown Rice Miso Salad, https://www.pickuplimes.com/recipe/brown-rice-miso-salad-218, https://cdn.pickuplimes.com/cache/53/37/53377f3955ff8accf7363016ae77b34d.jpg'], ['Dill Pasta Salad with a Tahini Dressing, https://www.pickuplimes.com/recipe/dill-pasta-salad-with-a-tahini-dressing-267, https://cdn.pickuplimes.com/cache/17/fb/17fbfa1b010ba0135be4c8cff6188358.jpg'], ['Beetroot Quinoa Salad with an Orange Ginger Dressing, https://www.pickuplimes.com/recipe/beetroot-quinoa-salad-with-an-orange-ginger-dressing-278, https://cdn.pickuplimes.com/cache/ad/ab/adabb18ab0ed27ae84c6f441f22b226a.jpg'], ['Creamy Coleslaw with Tahini Dijon Dressing, https://www.pickuplimes.com/recipe/creamy-coleslaw-with-tahini-dijon-dressing-280, https://cdn.pickuplimes.com/cache/8c/99/8c998c4727043752bd1a1ba298b5000b.jpg'], ['Cauliflower and Mushroom Curry', 'https://olivesfordinner.com/2020/05/cauliflower-and-mushroom-curry.html', 'https://olivesfordinner.com/wp-content/uploads/2020/05/Cauliflower-and-Mushroom-Curry-cover-320x320.jpg'], ['Easy Homemade Pizza Dough', 'https://olivesfordinner.com/2020/04/easy-homemade-pizza-dough.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/2-ingredient-pizza-dough-320x320.jpg'], ['Social Distancing Soup', 'https://olivesfordinner.com/2020/04/social-distancing-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/social-distancing-soup-cover-320x320.jpg'], ['The Best Buffalo Cauliflower | Crispy + Spicy + Air Fried!', 'https://olivesfordinner.com/2019/11/the-best-buffalo-cauliflower-crispy-spicy-air-fried.html', 'https://olivesfordinner.com/wp-content/uploads/2019/11/Olives-for-Dinner-The-Best-Buffalo-Cauliflower-cover-320x320.jpg'], ['Oyster Mushroom Tacos with Chipotle + Lime Sauce', 'https://olivesfordinner.com/2019/08/oyster-mushroom-tacos-with-chipotle-lime-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/08/Oyster-Mushroom-Tacos-cover-320x320.jpg'], ['Vegan Lox', 'https://olivesfordinner.com/2019/08/vegan-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2019/08/Carrot-Lox-cover-320x320.jpg'], ['Red Lentil Fritters with Mint-Garlic Yogurt Sauce', 'https://olivesfordinner.com/2019/07/red-lentil-fritters-with-mint-garlic-yogurt-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/07/Olives-for-Dinner-Red-Lentil-Fritters-cover-320x320.jpg'], ['Tofu Satay with Spicy Peanut Sauce', 'https://olivesfordinner.com/2019/07/tofu-satay-with-spicy-peanut-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2019/07/Grilled-Tofu-with-Peanut-Sauce-cover-320x320.jpg'], ['Vegan Reuben with Mandolined Portobello', 'https://olivesfordinner.com/2019/04/vegan-reuben-with-mandolined-portobello.html', 'https://olivesfordinner.com/wp-content/uploads/2019/04/Vegan-Reuben-cover-320x320.jpg'], ['Deep-fried Jackfruit with Garlicky-Dill Mayo', 'https://olivesfordinner.com/2019/04/deep-fried-jackfruit-with-garlicky-dill-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2019/04/Deep-fried-Jackfruit-cover-320x320.jpg'], ['Cauliflower Puree with Roasted Mushrooms and Caramelized Onions', 'https://olivesfordinner.com/2019/03/cauliflower-puree-with-roasted-mushrooms-and-caramelized-onions.html', 'https://olivesfordinner.com/wp-content/uploads/2019/03/Olives-for-Dinner-Cauliflower-Puree-with-Roasted-Mushrooms-and-Caramelized-Onions-320x320.jpg'], ['Bang Bang Tofu Taco Bowl', 'https://olivesfordinner.com/2019/02/bang-bang-tofu-taco-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2019/02/Bang-Bang-Tofu-Taco-Bowl-cover-320x320.jpg'], ['Pasta with Roasted Red Pepper Sauce and Caramelized Shallots', 'https://olivesfordinner.com/2012/08/pasta-with-roasted-red-pepper-sauce-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7762912426_494a4743ce_z-320x320.jpg'], ['Toasted Farro with Roasted Shiitake, Shallots and Pine Nuts', 'https://olivesfordinner.com/2018/11/toasted-farro-with-roasted-shiitake-shallots-and-pine-nuts.html', 'https://olivesfordinner.com/wp-content/uploads/2018/11/Toasted-Farro-with-Roasted-Shiitake-Shallots-and-Pine-Nuts-cover-320x320.jpg'], ['Sambal + Ginger Tofu with Air-Fried Bok Choy', 'https://olivesfordinner.com/2018/10/sambal-ginger-tofu-with-air-fried-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2018/10/Sambal-Ginger-Tofu-with-Air-Fried-Bok-Choy-_-cover-320x320.jpg'], ['Roasted Parsnip, Cauliflower + Garlic Soup', 'https://olivesfordinner.com/2018/10/roasted-parsnip-cauliflower-garlic-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2018/10/Roasted-Parsnip-Cauliflower-Garlic-Soup-cover-320x320.jpg'], ['Riced Cauliflower + Pressed Portos', 'https://olivesfordinner.com/2018/09/riced-cauliflower-pressed-portos.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Riced-Cauliflower-Pressed-Porto-cover-320x320.jpg'], ['Farro + Beet Burgers with Kimchi Mayo', 'https://olivesfordinner.com/2018/09/farro-beet-burgers-with-kimchi-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Farro-Beet-Burgers-13236-320x320.jpg'], ['Firecracker Cauliflower', 'https://olivesfordinner.com/2018/08/firecracker-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2018/08/Firecracker-Cauliflower-cover-320x320.jpg'], ['Vegan Egg Drop Soup', 'https://olivesfordinner.com/2018/08/vegan-egg-drop-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2018/08/Vegan-Eggdrop-cover-320x320.jpg'], ['Seitan Bourguignon', 'https://olivesfordinner.com/2018/01/seitan-bourguignon.html', 'https://olivesfordinner.com/wp-content/uploads/2018/01/Seitan-Bourginon-cover1-320x320.jpg'], ['Quick + Easy Focaccia Pizza', 'https://olivesfordinner.com/2018/01/quick-easy-focaccia-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2018/01/Focaccia-cover-320x320.jpg'], ['New England Vegan Chowder', 'https://olivesfordinner.com/2017/12/new-england-vegan-chowder.html', 'https://olivesfordinner.com/wp-content/uploads/2017/12/New-England-Vegan-Chowder-cover-320x320.jpg'], ['Coconut-Chickpea Crepes with Smoky Herbed Mushrooms', 'https://olivesfordinner.com/2013/02/coconut-chickpea-crepes-with-smoky.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8455834235_7bfba45015_z-320x320.jpg'], ['Vegan Scallops in a Bacon-Cashew Cream Sauce', 'https://olivesfordinner.com/2017/11/vegan-scallops-in-a-bacon-cashew-cream-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/King-Oyster-Mushroom-Scallops-in-a-Bacon-Cashew-Cream-Sauce-cover-320x320.jpg'], ['Grilled Bok Choy with Salty + Spicy Oyster Mushrooms', 'https://olivesfordinner.com/2017/10/grilled-bok-choy-with-salty-spicy-oyster-mushrooms.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/Grilled-Bok-Choy-with-Salty-Spicy-Oyster-Mushrooms-cover-320x320.jpg'], ['Air-Fried Buffalo Cauliflower Steaks', 'https://olivesfordinner.com/2017/10/air-fried-buffalo-cauliflower-steaks.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/Air-Fried-Buffalo-Cauliflower-Steaks-cover-320x320.jpg'], ['Pasta with Roasted Tomatoes + Chickpeas', 'https://olivesfordinner.com/2017/10/pasta-with-roasted-tomatoes-chickpeas.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Pasta-with-Roasted-Tomatoes-and-Chickpeas-cover-320x320.jpg'], ['Shroom Stroganoff', 'https://olivesfordinner.com/2017/09/shroom-stroganoff.html', 'https://olivesfordinner.com/wp-content/uploads/2017/08/Shroom-Stroganoff-cover-320x320.jpg'], ['Gobi Manchurian', 'https://olivesfordinner.com/2017/09/gobi-manchurian.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Gobi-Manchurian-cover1-320x320.jpg'], ['Silky Bell Pepper Sushi', 'https://olivesfordinner.com/2017/08/silky-bell-pepper-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2017/08/Bell-Pepper-Sushi-cover-320x320.jpg'], ['Sweet Potato Tempura and Avocado Rolls with Teriyaki Glaze', 'https://olivesfordinner.com/2017/05/sweet-potato-tempura-and-avocado-rolls-with-teriyaki-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2017/05/Sweet-Potato-Tempura-and-Avocado-Rolls-with-Teriyaki-Glaze-cover-320x320.jpg'], ['Vegan Lobster Roll', 'https://olivesfordinner.com/2017/05/vegan-lobster-roll.html', 'https://olivesfordinner.com/wp-content/uploads/2017/05/Vegan-Lobster-Roll-cover-320x320.jpg'], ['Vegan Shrimp', 'https://olivesfordinner.com/2017/05/vegan-shrimp.html', 'https://olivesfordinner.com/wp-content/uploads/2017/04/Vegan-Shrimp-cover-320x320.jpg'], ['Korean-Style Tacos, made with Sweet Earth Brand Traditional Seitan (+bonus news!)', 'https://olivesfordinner.com/2017/04/korean-style-tacos-made-with-sweet-earth-brand-traditional-seitan-bonus-news.html', 'https://olivesfordinner.com/wp-content/uploads/2017/04/Korean-Style-Tacos-cover-320x320.jpg'], ['Maitake + Oyster Mushroom Wontons in a Roasted Ginger Broth', 'https://olivesfordinner.com/2017/03/maitake-oyster-mushroom-wontons-in-a-roasted-ginger-broth.html', 'https://olivesfordinner.com/wp-content/uploads/2017/03/Maitake-Oyster-Mushroom-Wontons-cover-320x320.jpg'], ['Mock Eel with a Sticky+Sweet Ginger Sauce', 'https://olivesfordinner.com/2017/03/mock-eel-with-a-stickysweet-ginger-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Mock-Eel-cover-320x320.jpg'], ['Vegan Po Boy', 'https://olivesfordinner.com/2017/03/vegan-po-boy.html', 'https://olivesfordinner.com/wp-content/uploads/2017/03/Vegan-Po-Boy-cover-320x320.jpg'], ['Carrot Lox Stuffed + Fried Ravioli', 'https://olivesfordinner.com/2017/03/carrot-lox-stuffed-fried-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Carrot-Lox-Stuffed-Fried-Ravioli-cover-320x320.jpg'], ['Japanese-Style Breakfast Bowl', 'https://olivesfordinner.com/2017/02/japanese-style-breakfast-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Japanese-Style-Breakfast-Bowl-cover-320x320.jpg'], ['Buttermilk-Battered + Air-Fried Tofu', 'https://olivesfordinner.com/2017/02/buttermilk-battered-air-fried-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Buttermilk-Battered-and-Air-Fried-Tofu_cover-1-320x320.jpg'], ['Veggie Grill Copycat Recipe | Koreatown Tacos', 'https://olivesfordinner.com/2017/01/veggie-grill-copycat-recipe-koreatown-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Veggie-Grill-Copycat-Recipe-Koreatown-Tacos-320x320.png'], ['Roasted Balsamic Beets + Rutabaga', 'https://olivesfordinner.com/2017/01/roasted-balsamic-beets-rutabaga.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Roasted-Balsamic-Beets-and-Rutabaga-cover-320x320.jpg'], ['Vegan Ramen', 'https://olivesfordinner.com/2017/01/vegan-ramen.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Vegan-Ramen-cover-320x320.jpg'], ['Roasted Ginger and Coconut Soup', 'https://olivesfordinner.com/2016/11/roasted-ginger-and-coconut-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2016/11/Roasted-Ginger-320x320.jpg'], ['Farro Sausages', 'https://olivesfordinner.com/2016/11/farro-sausages.html', 'https://olivesfordinner.com/wp-content/uploads/2016/11/Farro-Sausage-320x320.jpg'], ['Savory + Crispy Vietnamese Crepes', 'https://olivesfordinner.com/2016/10/savory-crispy-vietnamese-crepes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Banh-Xeo-cover-320x320.jpg'], ['Savory + Crispy Vietnamese Crepes', 'https://olivesfordinner.com/2016/10/savory-crispy-vietnamese-crepes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Banh-Xeo-cover-320x320.jpg'], ['Buffalo Cauliflower Pizza', 'https://olivesfordinner.com/2016/10/buffalo-cauliflower-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Buffalo-Cauliflower-Pizza-320x320.jpg'], ['Seared King Oyster Mushrooms + a Homemade Teriyaki Glaze', 'https://olivesfordinner.com/2016/10/seared-king-oyster-mushrooms-a-homemade-teriyaki-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2016/09/Seared-King-Oyster-Mushrooms-320x320.jpg'], ['Crispy Kung Pao Brussels Sprouts', 'https://olivesfordinner.com/2016/09/crispy-kung-pao-brussels-sprouts.html', 'https://olivesfordinner.com/wp-content/uploads/2016/09/kung-pao-cover-320x320.jpg'], ['Pressed Maitake Buns with Gochujang-Hoisin Glaze', 'https://olivesfordinner.com/2016/09/pressed-maitake-buns-with-gochujang-hoisin-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/Pressed-Maitake-Buns-320x320.jpg'], ['Tofu Banh Mi', 'https://olivesfordinner.com/2016/08/tofu-banh-mi.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/tofu-banh-mi-cover-320x320.jpg'], ['Crispy Buffalo Cauliflower Salad', 'https://olivesfordinner.com/2016/08/crispy-buffalo-cauliflower-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/coverphoto-320x320.jpg'], ['Crispy + Spicy Enoki Mushroom Roll', 'https://olivesfordinner.com/2016/07/crispy-spicy-enoki-mushroom-roll.html', 'https://olivesfordinner.com/wp-content/uploads/2016/07/28327021802_8fcd205138_z-320x320.jpg'], ['Mâche and Mint Salad with Buttermilk-Ponzu Dressing', 'https://olivesfordinner.com/2016/06/mache-and-mint-salad-with-buttermilk-ponzu-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/mache-cover-320x320.jpg'], ['Crispy Vegan Shrimp Toast', 'https://olivesfordinner.com/2016/06/crispy-vegan-shrimp-toast.html', 'https://olivesfordinner.com/wp-content/uploads/2016/06/shrimptoastcover-320x320.jpg'], ['Savory + Seared Watermelon', 'https://olivesfordinner.com/2016/06/savoryseared-watermelon.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/watermelon-cover-320x320.jpg'], ['Cauliflower and Cashew Cream Soup', 'https://olivesfordinner.com/2013/12/cauliflower-and-cashew-cream-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A6617-320x320.jpg'], ['Bang Bang Cauliflower', 'https://olivesfordinner.com/2016/05/bang-bang-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/Bang-Bang-Cauliflower-320x320.jpg'], ['Crispy Hearts of Palm Tacos', 'https://olivesfordinner.com/2016/05/crispy-hearts-of-palm-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/tacocover-320x320.jpg'], ['Watermelon Tuna Poke Bowl', 'https://olivesfordinner.com/2016/04/watermelon-tuna-poke-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/26505343176_1dc831866c_z-320x320.jpg'], ['Spaghetti with Vegan Scallops, Toasted Walnuts and Plum Tomatoes', 'https://olivesfordinner.com/2016/04/spaghetti-with-scallops-toasted-walnuts.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/Spaghetti-with-vegan-scallops-toasted-walnuts-and-plum-tomatoes-320x320.jpg'], ['Pasta with Seaweed-Matcha Butter and Vegan Scallops', 'https://olivesfordinner.com/2016/04/pasta-with-seaweed-matcha-butter-and-vegan-scallops.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/Pasta-with-Seaweed-Matcha-Butter-320x320.jpg'], ['Creamy + Chunky Pasta Sauce', 'https://olivesfordinner.com/2016/04/creamy-chunky-pasta-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/pastamisecover-320x320.jpg'], ['Vegan Tom Kha Gai made with Fysh™ Sauce + an Interview with product founder Zach Grossman [closed]', 'https://olivesfordinner.com/2016/03/vegan-tom-kha-gai-made-with-fysh-sauce-an-interview-with-product-founder-zach-grossman.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/TGKcover-320x320.jpg'], ['Spicy Vegan Shrimp Cakes', 'https://olivesfordinner.com/2016/03/spicy-vegan-shrimp-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/Shrimp-Cake-Cover-320x320.jpg'], ['Spicy Carrot Lox and Avocado Sushi', 'https://olivesfordinner.com/2016/03/spicy-carrot-lox-and-avocado-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/Spicy-Carrot-Lox-Sushi-320x320.jpg'], ['Mongolian Soy Curls', 'https://olivesfordinner.com/2016/03/mongolian-soy-curls.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/MongolianSoyCurls-320x320.jpg'], ['Super-Versatile Crispy Tofu Cutlets', 'https://olivesfordinner.com/2016/01/super-versatile-crispy-tofu-cutlets.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/tofucutlets_0011-Copy-320x320.jpg'], ['Vegan Dynamite Rolls', 'https://olivesfordinner.com/2016/01/vegan-dynamite-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/vegandynamiterollscover_0044-320x320.jpg'], ['Split Pea Soup with Toasted Sesame Oil', 'https://olivesfordinner.com/2011/01/split-pea-soup-with-toasted-sesame-oi.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5352648559_44f01a7575-320x320.jpg'], ['Vegan Pork Buns with Oyster Mushroom Bacon', 'https://olivesfordinner.com/2015/11/vegan-pork-buns-with-oyster-mushroo.html', 'https://olivesfordinner.com/wp-content/uploads/2015/11/22575913837_9fea97e04f_o-320x320.jpg'], ['Spicy Thai-Style Pizza with Peanut Sauce', 'https://olivesfordinner.com/2015/11/spicy-thai-style-pizza-with-peanut-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2015/11/22663496301_2f58089eb9_c-320x320.jpg'], ['Vegan Sausage-Stuffed Homemade Ravioli', 'https://olivesfordinner.com/2015/10/vegan-sausage-stuffed-homemade-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2015/10/21948265815_36b2850a74_b-320x320.jpg'], ['Beet Tartare with Mango Yolk', 'https://olivesfordinner.com/2015/10/beet-tartare-with-mango-yolk.html', 'https://olivesfordinner.com/wp-content/uploads/2015/10/tartarecover2-320x320.jpg'], ['Pulled Jackfruit Sandwiches with Red Cabbage Slaw', 'https://olivesfordinner.com/2015/08/pulled-jackfruit-sandwiches-with-red.html', 'https://olivesfordinner.com/wp-content/uploads/2015/08/19558130643_8a4333c2bd_c-320x320.jpg'], ['The Abundance Diet | Review + Recipe!', 'https://olivesfordinner.com/2015/07/the-abundance-diet-review-recipe.html', 'https://olivesfordinner.com/wp-content/uploads/2015/07/19685670870_0e2090a8ce_c-320x320.jpg'], ['Easy Miso Soup', 'https://olivesfordinner.com/2015/04/easy-miso-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2015/04/easycover-320x320.jpg'], ['Fire Noodles with Crispy Tofu', 'https://olivesfordinner.com/2015/04/fire-noodles-with-crispy-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2015/04/FN-final2-320x320.jpg'], ['Carrot Lox', 'https://olivesfordinner.com/2015/03/carrot-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2015/03/16549307059_ba959fc04a_z-320x320.jpg'], ['Salt-Roasted Golden Beets with Teriyaki Sauce and Nori Dust', 'https://olivesfordinner.com/2015/02/salt-roasted-golden-beets-with-teriyaki.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/goldenbeetcover2-320x320.jpg'], ['Roasted Garlic and Sriracha Hummus', 'https://olivesfordinner.com/2011/09/roasted-garlic-and-sriracha-hummus.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6116553668_6f1645590d-320x320.jpg'], ['Beer-Battered Hearts of Palm with Dill Slaw and Quick Pickles', 'https://olivesfordinner.com/2015/02/beer-battered-hearts-of-palm-with-di.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/HOPcover-320x320.jpg'], ['Purple Cauliflower Crust Pizza with Garlic Oil', 'https://olivesfordinner.com/2015/02/purple-cauliflower-crust-pizza-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/16288187119_b3f3aea0fe_z-320x320.jpg'], ['Sambal and Peanut Butter Ramen Noodles with Tofu', 'https://olivesfordinner.com/2014/07/sambal-and-peanut-butter-ramen-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2014/07/14580181994_358e518082_z-320x320.jpg'], ['Faux Pork Wonton Soup with Bok Choy', 'https://olivesfordinner.com/2014/06/faux-pork-wonton-soup-with-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/fauxporkwithscript-320x320.jpg'], ['Roasted Thai Eggplant with Cherry Tomatoes and Basil', 'https://olivesfordinner.com/2014/06/roasted-thai-eggplant-with-cherry.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/12893805454_064acd1b54_z-320x320.jpg'], ['Caramelized Vegan Scallops in Pasta with a Minted Pea Puree', 'https://olivesfordinner.com/2014/05/caramelized-vegan-scallops-in-pasta.html', 'https://olivesfordinner.com/wp-content/uploads/2014/05/14220916831_b3cf856bd6_z-320x320.jpg'], ['General Tso’s Cauliflower', 'https://olivesfordinner.com/2014/05/general-tsos-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2014/05/14030895357_a5df788e37_z-320x320.jpg'], ['Focaccia Topped with Mâche and Fresh Tomatoes + Some News!', 'https://olivesfordinner.com/2014/04/focaccia-topped-with-mache-and-fres.html', 'https://olivesfordinner.com/wp-content/uploads/2014/04/AA4A4808-320x320.jpg'], ['Miso-Scented Portobello with Garlic Cauliflower Mash', 'https://olivesfordinner.com/2014/04/miso-scented-portobello-with-garlic.html', 'https://olivesfordinner.com/wp-content/uploads/2014/04/AA4A8890-320x320.jpg'], ['Springtime Green Tea Noodle Salad', 'https://olivesfordinner.com/2014/03/springtime-green-tea-noodle-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2014/03/13312619903_1c48fff9b1_c-320x320.jpg'], ['Sesame Crusted Fu with Soy-Agave Sauce', 'https://olivesfordinner.com/2014/03/sesame-crusted-fu-with-soy-agave-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2014/03/11393685196_1b23d255a4_c-320x320.jpg'], ['How to Towel Press Tofu for Marinating', 'https://olivesfordinner.com/2014/02/how-to-towel-press-tofu-for-marinating.html', 'https://olivesfordinner.com/wp-content/uploads/2014/02/12501525913_6544d0f2f6_c-320x320.jpg'], ['Peppery Tofu with Chinese 5-Spice Powder and Black Bean Sauce', 'https://olivesfordinner.com/2014/01/peppery-tofu-with-chinese-5-spice.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11609819593_f650d4e6b1_c-320x320.jpg'], ['Baked Oyster Mushrooms with Dynamite Sauce', 'https://olivesfordinner.com/2014/01/baked-oyster-mushrooms-with-dynamite.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11915675694_4308cf083b_c-320x320.jpg'], ['15-Minute Quick and Easy Tofu', 'https://olivesfordinner.com/2014/01/15-minute-quick-and-easy-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11918810306_4ae3a50c28-320x320.jpg'], ['Shiitake Risotto', 'https://olivesfordinner.com/2014/01/shiitake-risotto.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11816804405_82cb53837a-320x320.jpg'], ['Curry-Scented Soy Curls with Sesame-Cinnamon Dressing', 'https://olivesfordinner.com/2014/01/curry-scented-soy-curls-with-sesame.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11715133245_e7cf858e43-320x320.jpg'], ['Oyster Mushroom Wonton Soup with Wilted Kale', 'https://olivesfordinner.com/2013/12/oyster-mushroom-wonton-soup-with-wilted.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/11275548074_38b8089957_o-320x320.jpg'], ['Cauliflower and Cashew Cream Soup', 'https://olivesfordinner.com/2013/12/cauliflower-and-cashew-cream-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A6617-320x320.jpg'], ['Chickpea Salad Sandwich', 'https://olivesfordinner.com/2011/04/chickpea-salad-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5587319628_af0b053223-320x320.jpg'], ['Roasted Cauliflower Steaks with Oyster Mushroom Gravy', 'https://olivesfordinner.com/2013/11/roasted-cauliflower-steaks-with-oyster.html', 'https://olivesfordinner.com/wp-content/uploads/2013/11/10788586614_17f292d810_c-320x320.jpg'], ['Baked Pumpkin Ravioli with Rubbed Sage Cream', 'https://olivesfordinner.com/2013/11/baked-pumpkin-ravioli-with-rubbed-sage.html', 'https://olivesfordinner.com/wp-content/uploads/2013/11/10632300434_87bb849788_o-320x320.jpg'], ['Toasted Ravioli Stuffed with Cultured Cashew Cheese', 'https://olivesfordinner.com/2013/10/toasted-ravioli-stuffed-with-cultured.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/9886909214_1a4a7510ab_z-320x320.jpg'], ['Vegan Tuna Salad', 'https://olivesfordinner.com/2013/10/vegan-tuna-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10370170004_c010d3a743_o-320x320.jpg'], ['Quick and Easy Fragrant Coconut Soup', 'https://olivesfordinner.com/2013/10/quick-and-easy-fragrant-coconut-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10005529404_41114c01d5_z-320x320.jpg'], ['Panisse with Garlic-Ginger Sauce', 'https://olivesfordinner.com/2013/09/panisse-with-garlic-ginger-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9824246796_06a9bf4e84_z-320x320.jpg'], ['Shiitake Nigiri', 'https://olivesfordinner.com/2013/09/shiitake-nigiri.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/eelcropped1-320x320.jpg'], ['Hearts of Palm Crab Cakes', 'https://olivesfordinner.com/2013/09/hearts-of-palm-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9489200230_b25a587ca4_z-320x320.jpg'], ['Quick and Easy Carrot-Ginger Tofu', 'https://olivesfordinner.com/2013/09/quick-and-easy-carrot-ginger-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9704514050_96a7646c51_z-320x320.jpg'], ['Green Lentil, Pistachio and Walnut Fauxlafel', 'https://olivesfordinner.com/2013/09/green-lentil-pistachio-and-walnu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/8999832064_98efa1b8ea_z-320x320.jpg'], ['Vegan Scallops with Pea Puree and Watercress', 'https://olivesfordinner.com/2013/09/vegan-scallops-with-pea-puree-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9499486405_a84cbab602_z-320x320.jpg'], ['Easy Overnight Crockpot Oatmeal', 'https://olivesfordinner.com/2011/12/easy-overnight-crockpot-oatmea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6454925599_1a5f5946fa_z-320x320.jpg'], ['Jackfruit Soft Tacos', 'https://olivesfordinner.com/2013/08/jackfruit-soft-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9539382971_352c7d72a8_z-320x320.jpg'], ['Farro Crab Cakes', 'https://olivesfordinner.com/2013/08/farro-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9480782318_360b947023_z-320x320.jpg'], ['Tofu with General Tso’s Sauce', 'https://olivesfordinner.com/2013/07/tofu-with-general-tsos-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2013/07/9287074976_d03c30e8f2_z-320x320.jpg'], ['Sambal Seitan Skewers', 'https://olivesfordinner.com/2013/07/sambal-seitan-skewers.html', 'https://olivesfordinner.com/wp-content/uploads/2013/07/9223979993_d60294d84d_z-320x320.jpg'], ['Vegan Crab Rangoon', 'https://olivesfordinner.com/2013/06/vegan-crab-rangoon.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9175256982_e66acedacb_z-320x320.jpg'], ['Quick and Easy Tofu with Ramen Noodles', 'https://olivesfordinner.com/2013/06/quick-and-easy-tofu-with-ramen-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9063055880_7247de15fd_z-320x320.jpg'], ['Sweet Potato, Farro and Walnut Burgers with Homemade Pickles', 'https://olivesfordinner.com/2013/06/sweet-potato-farro-and-walnut-burgers.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9051266379_2945ce81ff_z-320x320.jpg'], ['Fresh Spring Rolls with Soy Curls, Mango and Mint', 'https://olivesfordinner.com/2013/06/fresh-spring-rolls-with-soy-curls-mango.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/AA4A3748-320x320.jpg'], ['Sesame Roasted Cauliflower with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2013/05/sesame-roasted-cauliflower-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8732183749_75abe9d9d4_z-320x320.jpg'], ['Red Quinoa and Sweet Potato Croquettes', 'https://olivesfordinner.com/2013/05/red-quinoa-and-sweet-potato-croquettes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8732186825_ab08084ec0_z-320x320.jpg'], ['Buffalo Chickpea Soft Tacos with Avocado Sour Cream', 'https://olivesfordinner.com/2013/05/buffalo-chickpea-soft-tacos-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8652263659_135227c089_z-320x320.jpg'], ['Sweet and Sticky Cashew Tofu', 'https://olivesfordinner.com/2013/04/sweet-and-sticky-cashew-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8647185538_0fe8eeb9d7_z-320x320.jpg'], ['Farro + Beet Burgers with Kimchi Mayo', 'https://olivesfordinner.com/2018/09/farro-beet-burgers-with-kimchi-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Farro-Beet-Burgers-13236-320x320.jpg'], ['Vegan Grilled Cheese with Shiitake Bacon and Tomato', 'https://olivesfordinner.com/2013/04/vegan-grilled-cheese-with-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8669439780_841b866c5e_z-320x320.jpg'], ['Glazed Tofu with Fiery Sriracha Pearls', 'https://olivesfordinner.com/2013/04/glazed-tofu-with-fiery-sriracha-pearls.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8646152741_dff8613fc7_z-320x320.jpg'], ['Four Vegan Grilled Cheese Sandwiches', 'https://olivesfordinner.com/2013/04/four-vegan-grilled-cheese-sandwiches.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/IMG_3382-320x320.jpg'], ['Mint, Basil and Cilantro Udon Noodle Bowl', 'https://olivesfordinner.com/2013/04/mint-basil-and-cilantro-udon-noodle-bow.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8614208893_90e8e16d04_z-320x320.jpg'], ['Pistachio-Crusted Tofu with Horseradish Cream', 'https://olivesfordinner.com/2013/03/pistachio-crusted-tofu-with-horseradis.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8542590853_d4b66ecea0_z-320x320.jpg'], ['Homemade Vegan Sausage and Cheese Calzones', 'https://olivesfordinner.com/2013/03/homemade-vegan-sausage-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8495059876_03242ab476_z-320x320.jpg'], ['Tofu and Shiitake Stack with Bok Choy-Ginger Puree', 'https://olivesfordinner.com/2013/03/tofu-and-shiitake-stack-with-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8525187049_08c1c0e9c4_z-320x320.jpg'], ['Kale, Pearled Barley and Faux Roe Stack with Sweet Soy Glaze', 'https://olivesfordinner.com/2013/02/kale-pearled-barley-and-faux-roe-stack.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8505006984_158bf7f337_z-320x320.jpg'], ['Maifun Noodles in a Toasted Sesame-Ginger Broth', 'https://olivesfordinner.com/2013/02/maifun-noodles-in-toasted-sesame-ginger.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8478465251_74975b32fc_z-320x320.jpg'], ['Socca Pizza Crust with Caramelized Shallots and Kale', 'https://olivesfordinner.com/2013/02/socca-pizza-crust-with-caramelized.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8446596405_1d37fa4f92_z-320x320.jpg'], ['Spicy Vegan Scallop Roll', 'https://olivesfordinner.com/2013/01/spicy-vegan-scallop-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8421544856_6542d6a4b3_z-320x320.jpg'], ['Vegan Reuben Sandwich', 'https://olivesfordinner.com/2013/01/vegan-reuben-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8395416871_c2e6155d3b_z-320x320.jpg'], ['Soft Pretzel Bites', 'https://olivesfordinner.com/2012/10/soft-pretzel-bites.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8076551157_4fd577e190_z-320x320.jpg'], ['Sriracha-Habanero Vegan Buffalo Wings', 'https://olivesfordinner.com/2013/01/sriracha-habanero-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8335188506_7e166e2f61_z-1-320x320.jpg'], ['Easy Sesame Glazed Tofu', 'https://olivesfordinner.com/2013/01/easy-sesame-glazed-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8326773028_c169becd58_z-320x320.jpg'], ['Sweet and Sour Crispy Mock Chicken Stir Fry', 'https://olivesfordinner.com/2012/12/sweet-and-sour-crispy-mock-chicken-stir.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8279367354_b0f80b3b2d_z-320x320.jpg'], ['Vegan Palak Paneer', 'https://olivesfordinner.com/2012/12/vegan-palak-paneer.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8258313683_3b2437bbd3_z-320x320.jpg'], ['Black Rice Noodles with Fresh Kale, Mint and Basil', 'https://olivesfordinner.com/2012/12/black-rice-noodles-with-fresh-kale-min.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8255864336_008310cdd6_z-320x320.jpg'], ['Vegan French Onion Soup Sandwich', 'https://olivesfordinner.com/2012/12/vegan-french-onion-soup-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8235083245_0d2d4cb46a_z-320x320.jpg'], ['Vegan Pot Pie with Portobella, Green Garbanzo and Shallots', 'https://olivesfordinner.com/2012/11/vegan-pot-pie-with-portobella-green.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8151769754_a15689bd03_z-320x320.jpg'], ['Sage Stuffing and Shiitake-Shallot Gravy', 'https://olivesfordinner.com/2012/11/sage-stuffing-and-shiitake-shallot-gravy.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8196192203_461d3a4b8e_z-320x320.jpg'], ['Shallot and Shiitake Seitan Wellington', 'https://olivesfordinner.com/2012/11/shallot-and-shiitake-seitan-wellington.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8176377771_bebfa1ab0d_z-320x320.jpg'], ['Protein-Happy Quinoa Wraps from Vegan Sandwiches Save the Day!', 'https://olivesfordinner.com/2012/11/protein-happy-quinoa-wraps-from-vegan.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8155643856_4a48f6617a_z-320x320.jpg'], ['Tofu Tikka Masala', 'https://olivesfordinner.com/2012/10/tofu-tikka-masala.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8110173470_6644257513_z-320x320.jpg'], ['Vegan Scallops in a White Wine Cream Sauce over Pasta', 'https://olivesfordinner.com/2012/10/vegan-scallops-in-wine-wine-cream-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8106318112_c63658492a_z-320x320.jpg'], ['PPK Chocolate Chip Cookies', 'https://olivesfordinner.com/2011/04/ppk-chocolate-chip-cookies.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5639003638_504f52bbf9-320x320.jpg'], ['White Bean and Mushroom Soup with Dill-Pepper Dumplings', 'https://olivesfordinner.com/2012/10/white-bean-and-mushroom-soup-with-di.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8087081755_464006e571_z-320x320.jpg'], ['Roasted Red Pepper Mac and Cheese', 'https://olivesfordinner.com/2012/10/roasted-red-pepper-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8060234137_cfb80515f8_z-320x320.jpg'], ['Sesame-Ginger Soba Noodles', 'https://olivesfordinner.com/2012/10/sesame-ginger-soba-noodles.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8064732272_38b0391bfa_z-320x320.jpg'], ['Homemade Thai Sweet Chili Sauce with Fried Tofu', 'https://olivesfordinner.com/2012/10/homemade-thai-sweet-chili-sauce-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8017588970_dd4a45740c-320x320.jpg'], ['Vegan Bacon-Wrapped Scallops with Paprika Cream Sauce', 'https://olivesfordinner.com/2012/10/vegan-bacon-wrapped-scallops-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8044749163_c4e55e1c58_z-320x320.jpg'], ['Raw Tahini and Cashew Dressing', 'https://olivesfordinner.com/2012/09/raw-tahini-and-cashew-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/IMG_1464-320x320.jpg'], ['Vegan Grilled Cheese with Smoky Tomato Soup', 'https://olivesfordinner.com/2012/09/vegan-grilled-cheese-with-smoky-tomato.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7993534714_73858b07d3-320x320.jpg'], ['Ratatouille (Confit Byaldi)', 'https://olivesfordinner.com/2012/09/ratatouille-confit-byaldi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/12193750456_691804522f_c-320x320.jpg'], ['Cheese-Stuffed Homemade Ravioli with White Wine Sauce', 'https://olivesfordinner.com/2012/09/cheese-stuffed-homemade-ravioli-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7906875742_ab4cd8a52a-320x320.jpg'], ['Kale, Tofu and Cashew Stir-Fry with Cold Soba Noodles', 'https://olivesfordinner.com/2012/08/kale-tofu-and-cashew-stir-fry-with-cold.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7883972382_e7373658bb-320x320.jpg'], ['Za’atar Tofu', 'https://olivesfordinner.com/2012/08/zaatar-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7866869068_5cef523a33_z-320x320.jpg'], ['Savory Corn Pancakes with IKEA Vegan Caviar', 'https://olivesfordinner.com/2012/08/savory-corn-pancakes-with-ikea-vegan.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7808905436_5fb2b5f1a8_z-320x320.jpg'], ['Savory + Seared Watermelon', 'https://olivesfordinner.com/2016/06/savoryseared-watermelon.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/watermelon-cover-320x320.jpg'], ['Grilled Sambal Oelek Tofu with Peanut Butter Sauce', 'https://olivesfordinner.com/2012/08/grilled-sambal-oelek-tofu-with-peanu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7768265260_78510a0aea_z-320x320.jpg'], ['Pasta with Roasted Red Pepper Sauce and Caramelized Shallots', 'https://olivesfordinner.com/2012/08/pasta-with-roasted-red-pepper-sauce-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7762912426_494a4743ce_z-320x320.jpg'], ['Deep-Fried Vegan Mac and Cheese', 'https://olivesfordinner.com/2012/07/deep-fried-vegan-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7665545816_98fb6efe94-320x320.jpg'], ['Minted Red Quinoa, Fava Bean and Cashew Salad', 'https://olivesfordinner.com/2012/07/minted-red-quinoa-fava-bean-and-cashew.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7622670174_b39801759b-320x320.jpg'], ['Gingerade Kombucha Caviar', 'https://olivesfordinner.com/2012/06/gingerade-kombucha-caviar.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7426883562_bff8f197b4-320x320.jpg'], ['Oyster Mushroom Wonton and Lemongrass Soup', 'https://olivesfordinner.com/2012/06/oyster-mushroom-wonton-and-lemongrass.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7382263230_d73039a21b-320x320.jpg'], ['Black Lentil, Pistachio and Shiitake Mushroom Burger', 'https://olivesfordinner.com/2012/05/black-lentil-pistachio-and-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7289963524_945a2a56b9-320x320.jpg'], ['Black Pepper and Thyme Crusted Tofu', 'https://olivesfordinner.com/2012/05/black-pepper-and-thyme-crusted-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7237306376_63d9934023-320x320.jpg'], ['Spicy Jackfruit Salad', 'https://olivesfordinner.com/2012/05/spicy-jackfruit-salad.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7183288902_62c2826f8d-320x320.jpg'], ['Vegan Sushi: Faux-Roe Gunkanmaki with Pickled Daikon', 'https://olivesfordinner.com/2012/04/vegan-sushi-faux-roe-gunkanmaki-with_16.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6936157558_aaf4a19acb-320x320.jpg'], ['Steamed Tofu with Spicy Black Bean Sauce', 'https://olivesfordinner.com/2012/04/steamed-tofu-with-spicy-black-bean.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/7051589731_bbdc77e088-320x320.jpg'], ['Fresh Vegan Mozzarella Pizza', 'https://olivesfordinner.com/2012/03/fresh-vegan-mozzarella-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/7012328143_2537723fed_z-320x320.jpg'], ['Peppery Tofu with Chinese 5-Spice Powder and Black Bean Sauce', 'https://olivesfordinner.com/2014/01/peppery-tofu-with-chinese-5-spice.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11609819593_f650d4e6b1_c-320x320.jpg'], ['Roasted Eggplant and Hummus Sandwich', 'https://olivesfordinner.com/2012/03/roasted-egglant-and-hummus-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6847432936_61d6d916a1_z-320x320.jpg'], ['Garlic-Sriracha Vegan Buffalo Wings', 'https://olivesfordinner.com/2012/03/garlic-sriracha-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6973941693_4db60198cb_z-320x320.jpg'], ['Molecular Vegan: Scallops with Carrot-Ginger Caviar', 'https://olivesfordinner.com/2012/02/molecular-vegan-scallops-with-carro.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6786920836_eb7688d3fd_z-320x320.jpg'], ['Galangal Tofu with Lemongrass-Scented Broth', 'https://olivesfordinner.com/2012/02/galangal-tofu-with-lemongrass-scented.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6866617493_299419355a_z-320x320.jpg'], ['Vegan Macaroni and Cheese', 'https://olivesfordinner.com/2012/02/vegan-macaroni-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6864169847_3f2925df44_z-320x320.jpg'], ['Portabello and Shallot Ravioli with Toasted Walnuts', 'https://olivesfordinner.com/2012/01/portabello-and-shallot-ravioli-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6784743515_db35f73545_z-320x320.jpg'], ['Ginger-Garlic Tofu with Fiery Chili Oil', 'https://olivesfordinner.com/2012/01/ginger-garlic-tofu-with-fiery-chili-oi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6745971851_5d4457be90_z-320x320.jpg'], ['Spicy Shiitake Mushroom Roll', 'https://olivesfordinner.com/2011/12/spicy-shiitake-mushroom-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6489807867_ce5d312d01_z-320x320.jpg'], ['Black Bean and Corn Quesadillas with Smoked Paprika', 'https://olivesfordinner.com/2011/12/black-bean-and-corn-quesadillas-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6405288727_73a635e8d3_z-320x320.jpg'], ['Spicy Pumpkin Soup with Lemongrass and Coconut Milk', 'https://olivesfordinner.com/2011/11/spicy-pumpkin-soup-with-lemongrass-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6370959129_cc4e826b6e_z-320x320.jpg'], ['Seitan Roulade with Sage and Sweet Onion Stuffing', 'https://olivesfordinner.com/2011/11/seitan-roulade-with-sage-and-swee.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6338385248_d552579dd6_z-320x320.jpg'], ['Grilled Tofu with Lemongrass and Cilantro Stuffing', 'https://olivesfordinner.com/2011/10/grilled-tofu-with-lemongrass-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6251395078_631e498749-320x320.jpg'], ['Sriracha-Habanero Vegan Buffalo Wings', 'https://olivesfordinner.com/2013/01/sriracha-habanero-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8335188506_7e166e2f61_z-1-320x320.jpg'], ['Faux Fish and Real Chips', 'https://olivesfordinner.com/2011/10/faux-fish-and-real-chips.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6205471514_3aebc3ce03-320x320.jpg'], ['Sriracha and Peanut Butter Tofu', 'https://olivesfordinner.com/2011/09/sriracha-and-peanut-butter-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6177319593_1790a4c72a-320x320.jpg'], ['Spicy Fava Bean Falafel', 'https://olivesfordinner.com/2011/09/spicy-fava-bean-falafe.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6116560812_4e068eeb74-320x320.jpg'], ['Lemongrass and Garlic Stuffed Tofu', 'https://olivesfordinner.com/2011/08/lemongrass-and-garlic-stuffed-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/08/5995896848_e75fd561f1-320x320.jpg'], ['Minted Pea Soup with Cashew Cream and Basil Oil', 'https://olivesfordinner.com/2011/07/minted-pea-soup-with-cashew-cream-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/07/5901979517_8694056f1a_z-320x320.jpg'], ['Vegan Crab Cakes with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/06/vegan-crab-cakes-with-sriracha.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5874981554_d77f709a58_z-320x320.jpg'], ['Simple Sauteed Tofu with Crispy Sesame Kale', 'https://olivesfordinner.com/2011/05/simple-sauteed-tofu-with-crispy-sesame.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5773981218_9c803c3c7e_z-320x320.jpg'], ['Vegan French Onion Soup', 'https://olivesfordinner.com/2011/05/vegan-french-onion-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5750417424_685a60cc8c_z-320x320.jpg'], ['Lemongrass Tofu with Satay Sauce', 'https://olivesfordinner.com/2011/05/lemongrass-tofu-with-satay-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5772886868_33b2b5f461_z-320x320.jpg'], ['Tempeh Reuben with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/04/tempeh-reuben-with-sriracha-vegenaise.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5631338611_eb6d6dc5d0-320x320.jpg'], ['Vegan Clam Chowder', 'https://olivesfordinner.com/2011/04/vegan-clam-chowder.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5610900417_d191a862c1-320x320.jpg'], ['Spinach, Mushroom and Soysage Tart', 'https://olivesfordinner.com/2011/04/spinach-mushroom-and-soysage-tar.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5586726243_1c4cf386f6-320x320.jpg'], ['Raw Kale with Tofu Croutons and Pomegranate-Sesame Dressing', 'https://olivesfordinner.com/2011/03/raw-kale-with-tofu-croutons-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/8196256924_5a9c7eeaf4_z-320x320.jpg'], ['Chickpea Salad Sandwich', 'https://olivesfordinner.com/2011/04/chickpea-salad-sandwic.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5587319628_af0b053223-320x320.jpg'], ['Raw Kale with Tofu Croutons and Pomegranate-Sesame Dressing', 'https://olivesfordinner.com/2011/03/raw-kale-with-tofu-croutons-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/8196256924_5a9c7eeaf4_z-320x320.jpg'], ['Minted Quinoa Spring Rolls with Toasted Cashews and Tahini', 'https://olivesfordinner.com/2011/03/minted-quinoa-spring-rolls-with-toasted.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/5507119039_254cb9c4dc-320x320.jpg'], ['Roasted Garlic Soup with Carmelized Shallots and Sage Croutons', 'https://olivesfordinner.com/2011/02/roasted-garlic-soup-with-carmelized.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5480624870_19cda005b7-320x320.jpg'], ['Rosemary Soysage, Spinach and Mushroom with Farfalle', 'https://olivesfordinner.com/2011/02/rosemary-soysage-spinach-and-mushroo.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5462958033_6aae203183-320x320.jpg'], ['Vegan Tom Kha Gai', 'https://olivesfordinner.com/2011/02/vegan-tom-kha-gai.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5439733635_127c9ba730-320x320.jpg'], ['Field Roast, Oyster Mushrooms and Tiny Potatoes Over Polenta', 'https://olivesfordinner.com/2011/02/field-roast-oyster-mushrooms-and-tiny.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5403091742_e1b485701c_z-320x320.jpg'], ['Balsamic-Agave Tofu with Beet Infused Couscous', 'https://olivesfordinner.com/2011/01/balsamic-agave-tofu-with-beet-infused.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5399578900_9e450f412b-320x320.jpg'], ['Vcon Chickpea Noodle Soup', 'https://olivesfordinner.com/2011/01/vcon-chickpea-noodle-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5383052832_f75d36fd4f-320x320.jpg'], ['Vcon Chickpea Cutlets with Mustard Roasted Potatoes', 'https://olivesfordinner.com/2011/01/vcon-chickpea-cutlets-with-mustard.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5380883994_541579e3b8-320x320.jpg'], ['Falafel with Vegenaise Tahini', 'https://olivesfordinner.com/2011/01/falafe.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5371628748_227f845e97-320x320.jpg'], ['General Tso’s Tofu', 'https://olivesfordinner.com/2011/01/general-tsos-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5362522702_3323f1f486-320x320.jpg'], ['Buttermilk-Battered + Air-Fried Tofu', 'https://olivesfordinner.com/2017/02/buttermilk-battered-air-fried-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2017/01/Buttermilk-Battered-and-Air-Fried-Tofu_cover-1-320x320.jpg'], ['Pasta with a Garlic, Butter and White Wine Sauce', 'https://olivesfordinner.com/2021/02/pasta-with-a-garlic-butter-and-white-wine-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2021/02/pasta-in-garlic-butter-white-wine-sauce-320x320.jpg'], ['Viral Tortilla Wrap Hack!', 'https://olivesfordinner.com/2021/01/viral-tortilla-wrap-hack.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/viral-wrap-hack-cover-320x320.jpg'], ['Vegan Mapo Tofu (Braised Tofu in a Spicy+Savory Sauce)', 'https://olivesfordinner.com/2021/01/vegan-mapo-tofu-braised-tofu-in-a-spicysavory-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/Vegan-Mapo-Tofu-cover-320x320.jpg'], ['Thai-Style Coconut + Crabless Soup', 'https://olivesfordinner.com/2021/01/thai-style-coconut-crabless-soup.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/Thai-Style-Coconut-Crabless-Soup-cover-320x320.jpg'], ['French Onion and Seitan Ramen', 'https://olivesfordinner.com/2021/01/french-onion-and-seitan-ramen.html', 'https://olivesfordinner.com/wp-content/uploads/2021/01/vegan-ramen-cover-shot-320x320.jpg'], ['Lobster Mushroom Tempura, Avocado and Kimchi+Mayo Sushi Rolls', 'https://olivesfordinner.com/2020/12/lobster-mushroom-tempura-avocado-and-kimchimayo-sushi-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2020/12/Lobster-Mushroom-Tempura-Sushi-Rolls-cover-dec-320x320.jpg'], ['Easy Focaccia', 'https://olivesfordinner.com/2020/12/easy-focaccia.html', 'https://olivesfordinner.com/wp-content/uploads/2020/12/focaccia-cover-1-320x320.jpg'], ['How to Make Homemade Ravioli', 'https://olivesfordinner.com/2020/11/how-to-make-homemade-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2020/11/How-to-Make-Homemade-Ravioli-320x320.jpg'], ['Bang Bang Cauliflower Tacos', 'https://olivesfordinner.com/2020/09/bang-bang-cauliflower-tacos.html', 'https://olivesfordinner.com/wp-content/uploads/2020/08/Bang-Bang-Taco-cover-320x320.jpg'], ['Sweet Potato, Barley + Bean Burgers with Sloppy Sauce', 'https://olivesfordinner.com/2020/07/sweet-potato-barley-bean-burgers-with-sloppy-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2020/07/Sweet-Potato-Barley-Bean-Burgers-with-sloppy-Sauce-cover-320x320.jpg'], ['Crispy Filet No Fish', 'https://olivesfordinner.com/2020/06/crispy-filet-no-fish.html', 'https://olivesfordinner.com/wp-content/uploads/2020/06/Filet-No-Fish-cover-2-320x320.jpg'], ['Sweet Potato and Kidney Bean Smothered Burritos', 'https://olivesfordinner.com/2020/05/sweet-potato-and-kidney-bean-smothered-burritos.html', 'https://olivesfordinner.com/wp-content/uploads/2020/05/Smothered-Burrito-cover-320x320.jpg'], ['Vegan Gingerbread Latte, https://www.pickuplimes.com/recipe/vegan-gingerbread-latte-42, https://cdn.pickuplimes.com/cache/cd/1b/cd1b630dfbd8c70547f1703f094aa5e5.jpg'], ['Cinnamon Coconut Cream Latte, https://www.pickuplimes.com/recipe/cinnamon-coconut-cream-latte-40, https://cdn.pickuplimes.com/cache/f3/61/f36125862947d51c7fda813b838ac905.jpg'], ['Refreshing Melon Cucumber Soda, https://www.pickuplimes.com/recipe/refreshing-melon-cucumber-soda-21, https://cdn.pickuplimes.com/cache/90/7b/907b86bf8924151801e4eba06ce4d002.jpg'], ['Peach Thyme Iced Tea, https://www.pickuplimes.com/recipe/peach-thyme-iced-tea-13, https://cdn.pickuplimes.com/cache/99/86/9986da7cbe0970cfbb7d6f4f989138fd.jpg'], ['Pink Lemonade, https://www.pickuplimes.com/recipe/pink-lemonade-24, https://cdn.pickuplimes.com/cache/93/2c/932c518862f8d5081db87bd4af27296a.jpg'], ['Grapefruit Rosemary Spritzer, https://www.pickuplimes.com/recipe/grapefruit-rosemary-spritzer-12, https://cdn.pickuplimes.com/cache/59/29/5929176f62ba9c11c9535ec41451d032.jpg'], ['Beetroot Latte, https://www.pickuplimes.com/recipe/beetroot-latte-105, https://cdn.pickuplimes.com/cache/e0/43/e0438bfc0583e1f307cc526942bd35a7.jpg'], ['Golden Milk, https://www.pickuplimes.com/recipe/golden-milk-104, https://cdn.pickuplimes.com/cache/2c/23/2c2366af7169773c9f1f11a1349b003c.jpg'], ['Peanut Butter Caramel Latte, https://www.pickuplimes.com/recipe/peanut-butter-caramel-latte-103, https://cdn.pickuplimes.com/cache/13/06/1306afd34ab76f363db8ea3e68df9814.jpg'], ['Mulled Spiced Apple Cider, https://www.pickuplimes.com/recipe/mulled-spiced-apple-cider-102, https://cdn.pickuplimes.com/cache/2d/7c/2d7c6dcc13a4b6c17b4713863b17e376.jpg'], ['Chai Hot Chocolate, https://www.pickuplimes.com/recipe/chai-hot-chocolate-101, https://cdn.pickuplimes.com/cache/3d/9c/3d9cae8cd5c4662bdca50f3c5717b76e.jpg'], ['Refreshing Ginger Mint Lemonade, https://www.pickuplimes.com/recipe/refreshing-ginger-mint-lemonade-151, https://cdn.pickuplimes.com/cache/47/2f/472f5af93bb847ab97af82741692f102.jpg'], ['Golden Vegan Hot Chocolate, https://www.pickuplimes.com/recipe/golden-vegan-hot-chocolate-179, https://cdn.pickuplimes.com/cache/a3/b0/a3b01ee32454747585a45a30d2dac952.jpg'], ['Creamy & Rich London Fog, https://www.pickuplimes.com/recipe/creamy-rich-london-fog-183, https://cdn.pickuplimes.com/cache/6f/54/6f54574bbaa67f93ae89aa8ac5c5b905.jpg'], ['Coconut Matcha Latte, https://www.pickuplimes.com/recipe/coconut-matcha-latte-235, https://cdn.pickuplimes.com/cache/bd/7e/bd7e40e9f2a6ceb48bb31cec03873c0c.jpg'], ['Tropical Pineapple Green Juice, https://www.pickuplimes.com/recipe/tropical-pineapple-green-juice-253, https://cdn.pickuplimes.com/cache/e2/ff/e2ffe8d6959dde160cd3259e908849fd.jpg'], ['Glowing Green Juice, https://www.pickuplimes.com/recipe/glowing-green-juice-256, https://cdn.pickuplimes.com/cache/2c/bb/2cbbd3c93e53fd756e7d576c7bd55a9d.jpg'], ['Blood Red Green Juice, https://www.pickuplimes.com/recipe/blood-red-green-juice-287, https://cdn.pickuplimes.com/cache/6b/1c/6b1c46977e8bf4890f9d946c74de361e.jpg'], ['Carrot Cantaloupe Juice, https://www.pickuplimes.com/recipe/carrot-cantaloupe-juice-27, https://cdn.pickuplimes.com/cache/6e/ea/6eea8b49a7294cb6bcf6b6ef774ac7ae.jpg'], ['Homemade Kombucha: Continuous Brew, https://www.pickuplimes.com/recipe/homemade-kombucha-continuous-brew-29, https://cdn.pickuplimes.com/cache/2f/a9/2fa9e5d7ad0aa24076bbc2d30392ea2a.jpg'], ['Raw Berry Tarts for Valentine’s Day', 'https://olivesfordinner.com/2014/02/raw-berry-tarts-for-valentines-day.html', 'https://olivesfordinner.com/wp-content/uploads/2014/02/12162219205_39e8e1cc35_c-320x320.jpg'], ['Warmed Turmeric-Ginger Cashew Milk', 'https://olivesfordinner.com/2014/01/warmed-turmeric-ginger-cashew-milk.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/AA4A8035-320x320.jpg'], ['Cherry and Blackberry Skillet Cobbler', 'https://olivesfordinner.com/2013/12/cherry-and-blackberry-skillet-cobbler.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A7024-320x320.jpg'], ['Whisky-Sriracha Caramel Corn', 'https://olivesfordinner.com/2013/12/whisky-sriracha-caramel-corn.html', 'https://olivesfordinner.com/wp-content/uploads/2013/12/AA4A6710-320x320.jpg'], ['Vegan Halloween: Eyeball Ravioli', 'https://olivesfordinner.com/2013/10/vegan-halloween-eyeball-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10106060445_9cd0c74bac_o-320x320.jpg'], ['Basic Cashew Cream', 'https://olivesfordinner.com/2013/09/basic-cashew-crea.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/cashew-straighter-320x320.jpg'], ['German Chocolate Cake Frosting Truffles', 'https://olivesfordinner.com/2013/06/german-chocolate-cake-frosting-truffles.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9118577981_4121eab42b_z-320x320.jpg'], ['Practically Raw Desserts: Review and Sample Recipe', 'https://olivesfordinner.com/2013/06/practically-raw-desserts-review-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/8915055884_18c9684789_z-320x320.jpg'], ['Raw Chocolate Cups with Blueberries and Cashews', 'https://olivesfordinner.com/2013/02/raw-chocolate-cups-with-blueberries-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8510146072_0698990f0c_z-320x320.jpg'], ['Sesame Roasted Cauliflower with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2013/05/sesame-roasted-cauliflower-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/05/8732183749_75abe9d9d4_z-320x320.jpg'], ['Whisky-Sriracha Candy', 'https://olivesfordinner.com/2013/02/whisky-sriracha-candy.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8442805202_95912e4193_z-320x320.jpg'], ['Chocolate-Covered Peanut Brittle with Habanero', 'https://olivesfordinner.com/2012/12/chocolate-covered-peanut-brittle-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8275644135_100da263b7_z-320x320.jpg'], ['Coconut, Pomegranate and Lime Kanten', 'https://olivesfordinner.com/2012/12/coconut-pomegranate-and-lime-kanten.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8238121569_601c854c11_z-320x320.jpg'], ['Leftover Pumpkin Pie Ravioli Spheres', 'https://olivesfordinner.com/2012/11/leftover-pumpkin-pie-ravioli-spheres.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/8216881065_d9eecd1a36_z-320x320.jpg'], ['Peanut Butter and Sambal Wontons with Chocolate Sauce', 'https://olivesfordinner.com/2012/10/peanut-butter-and-sambal-wontons-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8131856981_67fc7216e9_z-320x320.jpg'], ['Raw Tahini Butter Chocolate Cups', 'https://olivesfordinner.com/2012/10/raw-tahini-butter-chocolate-cups.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8108454922_98596c23bf_z-320x320.jpg'], ['Clementine, Basil and Lime Sorbet', 'https://olivesfordinner.com/2012/09/clementine-basil-and-lime-sorbe.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7975866571_2279b2ae96-320x320.jpg'], ['Lychee-Vanilla Coconut Cooler', 'https://olivesfordinner.com/2012/08/lychee-vanilla-coconut-cooler.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7817622504_37ce8b9b6d_z-320x320.jpg'], ['Deconstructed Taro Root Bubble Tea', 'https://olivesfordinner.com/2012/08/deconstructed-taro-root-bubble-tea.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7761936724_2427ff3c14_z-320x320.jpg'], ['Product Review: The Vegg (and a Vegan Creme Brulee)', 'https://olivesfordinner.com/2012/07/product-review-vegg-and-vegan-creme.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/IMG_8781orig-320x320.jpg'], ['Spiced Taro Root Wontons with Salted Coconut Cream', 'https://olivesfordinner.com/2012/06/spiced-taro-root-wontons-with-salted.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7434752192_9ecca0e475-320x320.jpg'], ['Chocolate-Cardamom Chia Pudding', 'https://olivesfordinner.com/2012/06/chocolate-cardamom-chia-pudding.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7328945478_941bde9d08-320x320.jpg'], ['Farro + Beet Burgers with Kimchi Mayo', 'https://olivesfordinner.com/2018/09/farro-beet-burgers-with-kimchi-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2018/09/Farro-Beet-Burgers-13236-320x320.jpg'], ['Vegan Panna Cotta with Brown Sugar Sauce', 'https://olivesfordinner.com/2012/06/vegan-panna-cotta-with-brown-sugar.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7368386934_1a206dee6c-320x320.jpg'], ['Tea-Smoked Lychees from Herbivoracious', 'https://olivesfordinner.com/2012/06/tea-smoked-lychees-from-herbivoracious.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7159216503_47fac83987-320x320.jpg'], ['Sweet Basil and Lemongrass Tea', 'https://olivesfordinner.com/2012/04/sweet-basil-and-lemongrass-tea.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/7103669783_d7a0ac9aea-315x320.jpg'], ['Cardamom-Pistachio Coconut Macaroons', 'https://olivesfordinner.com/2012/03/cardamom-pistachio-coconut-macaroons.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6869304208_fd4f13b25e_z-320x320.jpg'], ['Basil, Kumquat and Blueberry Canapés', 'https://olivesfordinner.com/2012/01/basil-kumquat-and-blueberry-canapes.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6743821965_44199385a0_z-320x320.jpg'], ['Chocolate-Sriracha Shortbread Cookies', 'https://olivesfordinner.com/2012/01/chocolate-sriracha-shortbread-cookies.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6662090307_3009516266_z-320x320.jpg'], ['Chia Chai Tea', 'https://olivesfordinner.com/2011/12/chia-chai-tea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6504614837_fa0eab46da_z-320x320.jpg'], ['Spiced and Sugared Cranberries', 'https://olivesfordinner.com/2011/11/spiced-and-sugared-cranberries.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6411419331_630f217898_z-320x320.jpg'], ['Fresh Sage Tea', 'https://olivesfordinner.com/2011/11/fresh-sage-tea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6349316090_2411a5c48d_z-320x320.jpg'], ['Vegan Doughnuts with Cardamom-Pistachio Glaze', 'https://olivesfordinner.com/2011/10/vegan-doughnuts-with-cardamom-pistachio.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6292640811_47701a0972-320x320.jpg'], ['Star Anise and Cinnamon Tea', 'https://olivesfordinner.com/2011/10/star-anise-and-cinnamontea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6251510748_e2f58dbd22-320x320.jpg'], ['Whisky Butterscotch Pudding', 'https://olivesfordinner.com/2011/10/whisky-butterscotch-pudding.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6247118308_588fb781a1-320x320.jpg'], ['Vegan Blizzard with Salted Cashew-Caramel Sauce', 'https://olivesfordinner.com/2020/07/vegan-blizzard-with-salted-cashew-caramel-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2020/03/Vegan-Blizzard-cover-1-320x320.jpg'], ['Homegrown Organic Farms Freeze-Dried Fruit', 'https://olivesfordinner.com/2017/10/homegrown-organic-farms-freeze-dried-fruit.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Homegrown-Organic-Farms-Freeze-Dried-Fruit-320x320.jpg'], ['Mixed Berry Compote + Creamy Chia Pudding', 'https://olivesfordinner.com/2017/07/mixed-berry-compote-creamy-chia-pudding.html', 'https://olivesfordinner.com/wp-content/uploads/2017/07/Mixed-Berry-and-Chia-Pudding-cover-320x320.jpg'], ['Lemon-Saffron Custard Tarts', 'https://olivesfordinner.com/2017/02/lemon-saffron-custard-tarts.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Lemon-Saffron-Custard-Tart-cover-320x320.jpg'], ['Vegan Macarons', 'https://olivesfordinner.com/2015/09/vegan-macarons.html', 'https://olivesfordinner.com/wp-content/uploads/2015/09/21228263699_ea188c43aa_z-320x320.jpg'], ['Vegan S’mores', 'https://olivesfordinner.com/2015/05/vegan-smores.html', 'https://olivesfordinner.com/wp-content/uploads/2015/05/18010657515_6426d8ef34_z-320x320.jpg'], ['Easy Whiskey-Sriracha Caramel Sauce', 'https://olivesfordinner.com/2014/06/easy-whiskey-sriracha-caramel-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/wssauce-320x320.jpg'], ['Lotus Root Tempura', 'https://olivesfordinner.com/2012/05/lotus-root-tempura.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7190707114_e694432351-320x320.jpg'], ['Vegan Chick’n and Waffles with Sriracha-Maple Syrup', 'https://olivesfordinner.com/2015/06/vegan-chickn-and-waffles-with-sriracha.html', 'https://olivesfordinner.com/wp-content/uploads/2015/06/AA4A0618-320x320.jpg'], ['Carrot Lox', 'https://olivesfordinner.com/2015/03/carrot-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2015/03/16549307059_ba959fc04a_z-320x320.jpg'], ['Vegan Brunch\xa0: Chocolate Beer Waffles', 'https://olivesfordinner.com/2014/09/vegan-brunch-chocolate-beer-waffles.html', 'https://olivesfordinner.com/wp-content/uploads/2014/09/15192832479_563b57fc3f_c-320x320.jpg'], ['Vegan Finger Foods\xa0: Kimchi-Stuffed Sausages', 'https://olivesfordinner.com/2014/09/vegan-finger-foods-kimchi-stuffed.html', 'https://olivesfordinner.com/wp-content/uploads/2014/09/15166329550_2500c99f85_c-320x320.jpg'], ['Rosewater Pistachio Quinoa + 10 reasons I love Redondo Beach', 'https://olivesfordinner.com/2014/07/rosewater-pistachio-quinoa-10-reasons-i.html', 'https://olivesfordinner.com/wp-content/uploads/2014/07/14711296855_ce1d55979d_z-320x320.jpg'], ['Sweet Potato and Rosemary Beignets', 'https://olivesfordinner.com/2014/04/sweet-potato-and-rosemary-beignets.html', 'https://olivesfordinner.com/wp-content/uploads/2014/04/12071009784_527d4b2407_c-320x320.jpg'], ['Thyme-Scented Pearled Barley with Brown Beech Mushrooms', 'https://olivesfordinner.com/2014/03/thyme-scented-pearled-barley-with-brown.html', 'https://olivesfordinner.com/wp-content/uploads/2014/03/10910925933_247240e78f_c-320x320.jpg'], ['Perfect Hash Browns', 'https://olivesfordinner.com/2013/09/perfect-hash-browns.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9578876774_e0382952ee_z-320x320.jpg'], ['Hearts of Palm Crab Cakes', 'https://olivesfordinner.com/2013/09/hearts-of-palm-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9489200230_b25a587ca4_z-320x320.jpg'], ['Farro Crab Cakes', 'https://olivesfordinner.com/2013/08/farro-crab-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9480782318_360b947023_z-320x320.jpg'], ['Shiitake Bacon', 'https://olivesfordinner.com/2013/03/shiitake-bacon.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8563120276_d9c5551aa3_z-320x320.jpg'], ['Raw Almond Milk', 'https://olivesfordinner.com/2011/05/raw-almond-milk.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5673752446_806eca0da3-320x320.jpg'], ['Vegan Sawmill Gravy and Biscuits with TVP-Shiitake Hash', 'https://olivesfordinner.com/2013/02/vegan-sawmill-gravy-and-biscuits-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8458736025_2192b22a4b_z-320x320.jpg'], ['Coconut-Chickpea Crepes with Smoky Herbed Mushrooms', 'https://olivesfordinner.com/2013/02/coconut-chickpea-crepes-with-smoky.html', 'https://olivesfordinner.com/wp-content/uploads/2013/02/8455834235_7bfba45015_z-320x320.jpg'], ['Horseradish Potato Pancakes with Raw Apple Salad', 'https://olivesfordinner.com/2012/12/horseradish-potato-pancakes-with-raw.html', 'https://olivesfordinner.com/wp-content/uploads/2012/12/8297466105_76cfa32ffe_z-320x320.jpg'], ['Balsamic Roasted Figs and Shallots with Herbed Socca', 'https://olivesfordinner.com/2012/10/balsamic-roasted-figs-and-shallots-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8060623919_9efa9ea90f_z-320x320.jpg'], ['Zucchini and Lemongrass Fritters', 'https://olivesfordinner.com/2012/09/zucchini-and-lemongrass-fritters.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7992669464_39f87b7c9e_z-320x320.jpg'], ['Savory Corn Pancakes with IKEA Vegan Caviar', 'https://olivesfordinner.com/2012/08/savory-corn-pancakes-with-ikea-vegan.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7808905436_5fb2b5f1a8_z-320x320.jpg'], ['Vegan Sausages with Sriracha and Five-Spice Powder', 'https://olivesfordinner.com/2012/08/vegan-sausages-with-sriracha-and-five.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7719591422_63c5625fdf_z-320x320.jpg'], ['Creme Brulee French Toast', 'https://olivesfordinner.com/2012/07/creme-brulee-french-toas.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7615392824_c211a3028a-320x320.jpg'], ['Chocolate-Cardamom Chia Pudding', 'https://olivesfordinner.com/2012/06/chocolate-cardamom-chia-pudding.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7328945478_941bde9d08-320x320.jpg'], ['Tea-Smoked Lychees from Herbivoracious', 'https://olivesfordinner.com/2012/06/tea-smoked-lychees-from-herbivoracious.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7159216503_47fac83987-320x320.jpg'], ['Ricotta-Stuffed French Toast with Salted Butterscotch Sauce', 'https://olivesfordinner.com/2012/05/ricotta-stuffed-french-toast-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7149109439_1ff5676ee4-320x320.jpg'], ['Breakfast Quinoa with Toasted Coconut and Pistachios', 'https://olivesfordinner.com/2012/02/breakfast-quinoa-with-toasted-coconu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6905234549_f96d28c267_z-320x320.jpg'], ['Field Roast, Oyster Mushrooms and Tiny Potatoes Over Polenta', 'https://olivesfordinner.com/2011/02/field-roast-oyster-mushrooms-and-tiny.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5403091742_e1b485701c_z-320x320.jpg'], ['Celery Root Hashbrowns with Basil Cream', 'https://olivesfordinner.com/2012/01/celery-root-hashbrowns-with-basil-crea.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6608971293_f8f9c59ed7_z-320x320.jpg'], ['Easy Overnight Crockpot Oatmeal', 'https://olivesfordinner.com/2011/12/easy-overnight-crockpot-oatmea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6454925599_1a5f5946fa_z-320x320.jpg'], ['Tofu Scramble with Seared Shiitake and Caramelized Shallots', 'https://olivesfordinner.com/2011/11/tofu-scramble-with-seared-shiitake-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/11/6318603030_17d169c762_z-320x320.jpg'], ['Homemade Bagels with Tofutti, Capers and Caramelized Shallots', 'https://olivesfordinner.com/2011/10/homemade-bagels-with-toffuti-capers-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6274808782_a135c76540-320x320.jpg'], ['Vegan Okonomiyaki', 'https://olivesfordinner.com/2011/10/vegan-okonomiyaki.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6232687596_2b04238a86-320x320.jpg'], ['Spinach and Olive Soccattata', 'https://olivesfordinner.com/2011/09/spinach-and-olive-soccattata.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6182288101_4594d4d763-320x320.jpg'], ['PPK Pumpkin Muffins with Tofutti Cream Cheese', 'https://olivesfordinner.com/2011/09/ppk-pumpkin-muffins-with-tofutti-crea.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6117368726_aa92b1a5c9-320x320.jpg'], ['Vegan Crab Cakes with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/06/vegan-crab-cakes-with-sriracha.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5874981554_d77f709a58_z-320x320.jpg'], ['Homemade Granola with Dried Blueberries', 'https://olivesfordinner.com/2011/06/homemade-granola-with-dried-blueberries.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5849573505_02fb8167b9_z-320x320.jpg'], ['Raw Almond Matcha Cakes', 'https://olivesfordinner.com/2011/05/raw-almond-matcha-cakes.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5724084935_5604d176c6_z-320x320.jpg'], ['Spinach, Mushroom and Soysage Tart', 'https://olivesfordinner.com/2011/04/spinach-mushroom-and-soysage-tar.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5586726243_1c4cf386f6-320x320.jpg'], ['Tofutti, Capers and Red Onion Beggars Purses', 'https://olivesfordinner.com/2011/02/tofutti-capers-and-red-onion-beggars.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5462974450_d97bb81d5e-320x320.jpg'], ['Beer-Battered Hearts of Palm with Dill Slaw and Quick Pickles', 'https://olivesfordinner.com/2015/02/beer-battered-hearts-of-palm-with-di.html', 'https://olivesfordinner.com/wp-content/uploads/2015/02/HOPcover-320x320.jpg'], ['Vegan Sausage Wafflewich', 'https://olivesfordinner.com/2020/09/vegan-sausage-wafflewich.html', 'https://olivesfordinner.com/wp-content/uploads/2020/09/Vegan-Sausage-Wafflewich-cv-cover-320x320.jpg'], ['Homemade Coconut Yogurt', 'https://olivesfordinner.com/2020/04/homemade-coconut-yogurt.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/Coconut-Yogurt-cover-320x320.jpg'], ['Vegan Lox', 'https://olivesfordinner.com/2019/08/vegan-lox.html', 'https://olivesfordinner.com/wp-content/uploads/2019/08/Carrot-Lox-cover-320x320.jpg'], ['Homegrown Organic Farms Freeze-Dried Fruit', 'https://olivesfordinner.com/2017/10/homegrown-organic-farms-freeze-dried-fruit.html', 'https://olivesfordinner.com/wp-content/uploads/2017/09/Homegrown-Organic-Farms-Freeze-Dried-Fruit-320x320.jpg'], ['Mixed Berry Compote + Creamy Chia Pudding', 'https://olivesfordinner.com/2017/07/mixed-berry-compote-creamy-chia-pudding.html', 'https://olivesfordinner.com/wp-content/uploads/2017/07/Mixed-Berry-and-Chia-Pudding-cover-320x320.jpg'], ['Japanese-Style Breakfast Bowl', 'https://olivesfordinner.com/2017/02/japanese-style-breakfast-bowl.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Japanese-Style-Breakfast-Bowl-cover-320x320.jpg'], ['Savory + Crispy Vietnamese Crepes', 'https://olivesfordinner.com/2016/10/savory-crispy-vietnamese-crepes.html', 'https://olivesfordinner.com/wp-content/uploads/2016/10/Banh-Xeo-cover-320x320.jpg'], ['Vegan Bacon, Egg and Cheese Biscuit', 'https://olivesfordinner.com/2015/10/vegan-bacon-egg-and-cheese-biscui.html', 'https://olivesfordinner.com/wp-content/uploads/2015/10/BECcover2-320x320.jpg'], ['Loaded Baked Potato Hash Brown Waffles', 'https://olivesfordinner.com/2015/08/loaded-baked-potato-hash-brown-waffles.html', 'https://olivesfordinner.com/wp-content/uploads/2015/08/20833106406_f5a71e8a4b_z-320x320.jpg'], ['Bang Bang Cauliflower', 'https://olivesfordinner.com/2016/05/bang-bang-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/Bang-Bang-Cauliflower-320x320.jpg'], ['Farro Sausages', 'https://olivesfordinner.com/2016/11/farro-sausages.html', 'https://olivesfordinner.com/wp-content/uploads/2016/11/Farro-Sausage-320x320.jpg'], ['Pressed Maitake Buns with Gochujang-Hoisin Glaze', 'https://olivesfordinner.com/2016/09/pressed-maitake-buns-with-gochujang-hoisin-glaze.html', 'https://olivesfordinner.com/wp-content/uploads/2016/08/Pressed-Maitake-Buns-320x320.jpg'], ['Crispy + Spicy Enoki Mushroom Roll', 'https://olivesfordinner.com/2016/07/crispy-spicy-enoki-mushroom-roll.html', 'https://olivesfordinner.com/wp-content/uploads/2016/07/28327021802_8fcd205138_z-320x320.jpg'], ['Mâche and Mint Salad with Buttermilk-Ponzu Dressing', 'https://olivesfordinner.com/2016/06/mache-and-mint-salad-with-buttermilk-ponzu-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/mache-cover-320x320.jpg'], ['Crispy Vegan Shrimp Toast', 'https://olivesfordinner.com/2016/06/crispy-vegan-shrimp-toast.html', 'https://olivesfordinner.com/wp-content/uploads/2016/06/shrimptoastcover-320x320.jpg'], ['Broccoli Tots + Quick Curry Ketchup', 'https://olivesfordinner.com/2016/06/broccoli-tots-quick-curry-ketchup.html', 'https://olivesfordinner.com/wp-content/uploads/2016/06/broccolitotscover-320x320.jpg'], ['Creamy Poblano + Cilantro Sauce', 'https://olivesfordinner.com/2016/06/creamy-poblano-cilantro-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2016/06/poblano-cover-1-320x320.jpg'], ['Savory + Seared Watermelon', 'https://olivesfordinner.com/2016/06/savoryseared-watermelon.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/watermelon-cover-320x320.jpg'], ['Bang Bang Cauliflower', 'https://olivesfordinner.com/2016/05/bang-bang-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2016/05/Bang-Bang-Cauliflower-320x320.jpg'], ['Gochujang + Peanut Butter Broccoli', 'https://olivesfordinner.com/2016/04/gochujangpeanut-butter-broccoli.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/gochujangpbbroccoli-320x320.jpg'], ['Deep-Fried Vegan Ricotta Puffs', 'https://olivesfordinner.com/2012/02/molecular-vegan-deep-fried-ricotta.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6817901961_cc588ef74d_z-320x320.jpg'], ['Pasta with Seaweed-Matcha Butter and Vegan Scallops', 'https://olivesfordinner.com/2016/04/pasta-with-seaweed-matcha-butter-and-vegan-scallops.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/Pasta-with-Seaweed-Matcha-Butter-320x320.jpg'], ['Creamy + Chunky Pasta Sauce', 'https://olivesfordinner.com/2016/04/creamy-chunky-pasta-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2016/04/pastamisecover-320x320.jpg'], ['Spicy Carrot Lox and Avocado Sushi', 'https://olivesfordinner.com/2016/03/spicy-carrot-lox-and-avocado-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2016/03/Spicy-Carrot-Lox-Sushi-320x320.jpg'], ['Vegan Salsa Con Queso', 'https://olivesfordinner.com/2016/01/vegan-salsa-con-queso.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/0041wFONTS-320x320.jpg'], ['Vegan Dynamite Rolls', 'https://olivesfordinner.com/2016/01/vegan-dynamite-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2016/01/vegandynamiterollscover_0044-320x320.jpg'], ['Red Curry and Ginger Coconut Sauce', 'https://olivesfordinner.com/2015/03/red-curry-and-ginger-coconut-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2015/03/saucyfinal-320x320.jpg'], ['Gochujang Queso + Sweet Potato and Kidney Bean Quesadillas', 'https://olivesfordinner.com/2015/01/gochujang-queso-sweet-potato-and-kidney.html', 'https://olivesfordinner.com/wp-content/uploads/2015/01/gochu1-320x320.jpg'], ['Maitake Bacon', 'https://olivesfordinner.com/2015/01/maitake-bacon.html', 'https://olivesfordinner.com/wp-content/uploads/2015/01/shroom-320x320.jpg'], ['Cashew-Basil Stuffed and Beer-Battered Zucchini Blossoms', 'https://olivesfordinner.com/2014/10/cashew-basil-stuffed-and-beer-battered.html', 'https://olivesfordinner.com/wp-content/uploads/2014/10/15602855231_099f550ed8_z-320x320.jpg'], ['Panisse Bruschetta', 'https://olivesfordinner.com/2014/06/panisse-bruschetta.html', 'https://olivesfordinner.com/wp-content/uploads/2014/06/14311183838_ac67faa97e_b-320x320.jpg'], ['How to Towel Press Tofu for Marinating', 'https://olivesfordinner.com/2014/02/how-to-towel-press-tofu-for-marinating.html', 'https://olivesfordinner.com/wp-content/uploads/2014/02/12501525913_6544d0f2f6_c-320x320.jpg'], ['Roasted Maitake Mushrooms in Sesame-Miso Broth', 'https://olivesfordinner.com/2014/02/roasted-maitake-mushrooms-in-sesame.html', 'https://olivesfordinner.com/wp-content/uploads/2014/02/11372632066_4a4decb60b_c-320x320.jpg'], ['How to Fold a Wonton Dumpling', 'https://olivesfordinner.com/2012/06/how-to-fold-wonton-dumpling.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7381609018_078ffb781a-320x320.jpg'], ['Baked Oyster Mushrooms with Dynamite Sauce', 'https://olivesfordinner.com/2014/01/baked-oyster-mushrooms-with-dynamite.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11915675694_4308cf083b_c-320x320.jpg'], ['15-Minute Quick and Easy Tofu', 'https://olivesfordinner.com/2014/01/15-minute-quick-and-easy-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2014/01/11918810306_4ae3a50c28-320x320.jpg'], ['Seaweed-Tofu Beignets with Jalapeño and Shikuwasa Jam Paste', 'https://olivesfordinner.com/2013/10/seaweed-tofu-beignets-with-jalapeno-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/10/10518471494_de3c97d767_c-320x320.jpg'], ['Basic Cashew Cream', 'https://olivesfordinner.com/2013/09/basic-cashew-crea.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/cashew-straighter-320x320.jpg'], ['Shiitake Nigiri', 'https://olivesfordinner.com/2013/09/shiitake-nigiri.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/eelcropped1-320x320.jpg'], ['Sriracha-Nooch Seasoning', 'https://olivesfordinner.com/2013/09/sriracha-nooch-seasoning.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9644767299_9bf4fc3a10_z-320x320.jpg'], ['Vegan Scallops with Pea Puree and Watercress', 'https://olivesfordinner.com/2013/09/vegan-scallops-with-pea-puree-and.html', 'https://olivesfordinner.com/wp-content/uploads/2013/09/9499486405_a84cbab602_z-320x320.jpg'], ['Cultured Cashew Cheese', 'https://olivesfordinner.com/2013/08/cultured-cashew-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9507153235_0a9e55b33f_z-320x320.jpg'], ['Cauliflower Jerky', 'https://olivesfordinner.com/2013/08/cauliflower-jerky.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9206570993_c1c4ec4b13_z-320x320.jpg'], ['Beer-Battered Cauliflower', 'https://olivesfordinner.com/2013/07/beer-battered-cauliflower.html', 'https://olivesfordinner.com/wp-content/uploads/2013/07/9118210040_d2ecc43c91_z-320x320.jpg'], ['Vegan Crab Rangoon', 'https://olivesfordinner.com/2013/06/vegan-crab-rangoon.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/9175256982_e66acedacb_z-320x320.jpg'], ['Everything Sauce', 'https://olivesfordinner.com/2013/06/everything-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/8526319102_48ae2fe4e5_z-320x320.jpg'], ['Vegan Sushi: Faux-Roe Gunkanmaki with Pickled Daikon', 'https://olivesfordinner.com/2012/04/vegan-sushi-faux-roe-gunkanmaki-with_16.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6936157558_aaf4a19acb-320x320.jpg'], ['Fresh Spring Rolls with Soy Curls, Mango and Mint', 'https://olivesfordinner.com/2013/06/fresh-spring-rolls-with-soy-curls-mango.html', 'https://olivesfordinner.com/wp-content/uploads/2013/06/AA4A3748-320x320.jpg'], ['Vegan Grilled Cheese with Shiitake Bacon and Tomato', 'https://olivesfordinner.com/2013/04/vegan-grilled-cheese-with-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8669439780_841b866c5e_z-320x320.jpg'], ['Glazed Tofu with Fiery Sriracha Pearls', 'https://olivesfordinner.com/2013/04/glazed-tofu-with-fiery-sriracha-pearls.html', 'https://olivesfordinner.com/wp-content/uploads/2013/04/8646152741_dff8613fc7_z-320x320.jpg'], ['Vegan Port Wine Cheese', 'https://olivesfordinner.com/2013/03/vegan-port-wine-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8584101760_7f06763901_z-320x320.jpg'], ['Miso and Brown Sugar Glazed Shiitake Caps', 'https://olivesfordinner.com/2013/03/miso-and-brown-sugar-glazed-shiitake.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8566688264_a510e02bcb_z-320x320.jpg'], ['Shiitake Bacon', 'https://olivesfordinner.com/2013/03/shiitake-bacon.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8563120276_d9c5551aa3_z-320x320.jpg'], ['Tofu and Shiitake Stack with Bok Choy-Ginger Puree', 'https://olivesfordinner.com/2013/03/tofu-and-shiitake-stack-with-bok-choy.html', 'https://olivesfordinner.com/wp-content/uploads/2013/03/8525187049_08c1c0e9c4_z-320x320.jpg'], ['Spicy Vegan Scallop Roll', 'https://olivesfordinner.com/2013/01/spicy-vegan-scallop-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8421544856_6542d6a4b3_z-320x320.jpg'], ['Sriracha-Cashew Kale Chips', 'https://olivesfordinner.com/2013/01/sriracha-cashew-kale-chips.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8402198913_f1c3592067_z-320x320.jpg'], ['Curried Quinoa Triangles with Cilantro-Ginger Sauce', 'https://olivesfordinner.com/2013/01/curried-quinoa-samosas-with-cilantro.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8373065343_4b12dc47fc_z-320x320.jpg'], ['Sriracha-Habanero Vegan Buffalo Wings', 'https://olivesfordinner.com/2013/01/sriracha-habanero-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8335188506_7e166e2f61_z-1-320x320.jpg'], ['Chickpea Panisse with Roasted Garlic Aioli', 'https://olivesfordinner.com/2013/01/chickpea-panisse-with-roasted-garlic.html', 'https://olivesfordinner.com/wp-content/uploads/2013/01/8322257525_e1360e918b_z-320x320.jpg'], ['Cardamom-Pistachio Coconut Macaroons', 'https://olivesfordinner.com/2012/03/cardamom-pistachio-coconut-macaroons.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6869304208_fd4f13b25e_z-320x320.jpg'], ['How to Chiffonade Collard Greens', 'https://olivesfordinner.com/2012/11/how-to-chiffonade-collard-greens.html', 'https://olivesfordinner.com/wp-content/uploads/2012/11/IMG_2817-320x320.jpg'], ['Roasted Red Pepper Mac and Cheese', 'https://olivesfordinner.com/2012/10/roasted-red-pepper-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8060234137_cfb80515f8_z-320x320.jpg'], ['Soft Pretzel Bites', 'https://olivesfordinner.com/2012/10/soft-pretzel-bites.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8076551157_4fd577e190_z-320x320.jpg'], ['Homemade Thai Sweet Chili Sauce with Fried Tofu', 'https://olivesfordinner.com/2012/10/homemade-thai-sweet-chili-sauce-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8017588970_dd4a45740c-320x320.jpg'], ['Vegan Bacon-Wrapped Scallops with Paprika Cream Sauce', 'https://olivesfordinner.com/2012/10/vegan-bacon-wrapped-scallops-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8044749163_c4e55e1c58_z-320x320.jpg'], ['Raw Tahini and Cashew Dressing', 'https://olivesfordinner.com/2012/09/raw-tahini-and-cashew-dressing.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/IMG_1464-320x320.jpg'], ['Zucchini and Lemongrass Fritters', 'https://olivesfordinner.com/2012/09/zucchini-and-lemongrass-fritters.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7992669464_39f87b7c9e_z-320x320.jpg'], ['Vegan Grilled Cheese with Smoky Tomato Soup', 'https://olivesfordinner.com/2012/09/vegan-grilled-cheese-with-smoky-tomato.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7993534714_73858b07d3-320x320.jpg'], ['Cheese-Stuffed Homemade Ravioli with White Wine Sauce', 'https://olivesfordinner.com/2012/09/cheese-stuffed-homemade-ravioli-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/09/7906875742_ab4cd8a52a-320x320.jpg'], ['Grilled Sambal Oelek Tofu with Peanut Butter Sauce', 'https://olivesfordinner.com/2012/08/grilled-sambal-oelek-tofu-with-peanu.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7768265260_78510a0aea_z-320x320.jpg'], ['Pasta with Roasted Red Pepper Sauce and Caramelized Shallots', 'https://olivesfordinner.com/2012/08/pasta-with-roasted-red-pepper-sauce-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7762912426_494a4743ce_z-320x320.jpg'], ['Vegan Sausages with Sriracha and Five-Spice Powder', 'https://olivesfordinner.com/2012/08/vegan-sausages-with-sriracha-and-five.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7719591422_63c5625fdf_z-320x320.jpg'], ['Our Boston Kitchen | Before and After', 'https://olivesfordinner.com/2017/08/our-boston-kitchen-before-and-after.html', 'https://olivesfordinner.com/wp-content/uploads/2017/08/kitchen-after-cover-320x320.jpg'], ['White Bean and Roasted Garlic Spread', 'https://olivesfordinner.com/2012/08/white-bean-and-roasted-garlic-spread.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7711734956_7041d38c3f_b-320x320.jpg'], ['Spring Onion and Chili Pepper Sauce', 'https://olivesfordinner.com/2012/08/spring-onion-and-chili-pepper-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2012/08/7676246688_9bfa403c7d_z-320x320.jpg'], ['Deep-Fried Vegan Mac and Cheese', 'https://olivesfordinner.com/2012/07/deep-fried-vegan-mac-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7665545816_98fb6efe94-320x320.jpg'], ['Flowering Chive and Garlic Dumplings', 'https://olivesfordinner.com/2012/07/flowering-chive-and-garlic-dumplings.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7582186006_74a1e56585-320x320.jpg'], ['Muhammara', 'https://olivesfordinner.com/2012/07/muhammara.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7576216232_2cce8c7f20-320x320.jpg'], ['Dried and Fried Sriracha Chickpeas', 'https://olivesfordinner.com/2012/07/dried-and-fried-sriracha-chickpeas.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7528146660_0b8fd339ef-320x320.jpg'], ['Vegan Mozzarella, Cherry Tomato and Basil Skewers', 'https://olivesfordinner.com/2012/07/vegan-mozzarella-cherry-tomato-and.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7502952152_a665bf8a95-320x320.jpg'], ['Easy Refrigerator Pickles with Flowering Chives', 'https://olivesfordinner.com/2012/07/easy-refrigerator-pickles-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/07/7480723494_e1118328c9-320x320.jpg'], ['Gingerade Kombucha Caviar', 'https://olivesfordinner.com/2012/06/gingerade-kombucha-caviar.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7426883562_bff8f197b4-320x320.jpg'], ['Homemade Basil and Garlic Oil', 'https://olivesfordinner.com/2012/06/homemade-basil-and-garlic-oi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7388511020_7acacbaca4-320x296.jpg'], ['How to Fold a Wonton Dumpling', 'https://olivesfordinner.com/2012/06/how-to-fold-wonton-dumpling.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7381609018_078ffb781a-320x320.jpg'], ['Tea-Smoked Lychees from Herbivoracious', 'https://olivesfordinner.com/2012/06/tea-smoked-lychees-from-herbivoracious.html', 'https://olivesfordinner.com/wp-content/uploads/2012/06/7159216503_47fac83987-320x320.jpg'], ['Loaded Baked Potato Hash Brown Waffles', 'https://olivesfordinner.com/2015/08/loaded-baked-potato-hash-brown-waffles.html', 'https://olivesfordinner.com/wp-content/uploads/2015/08/20833106406_f5a71e8a4b_z-320x320.jpg'], ['Smoky Baba Ghanoush with Roasted Garlic', 'https://olivesfordinner.com/2012/05/smoky-baba-ghanoush-with-roasted-garlic.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7282267428_b102aac59e-320x320.jpg'], ['Lotus Root Tempura', 'https://olivesfordinner.com/2012/05/lotus-root-tempura.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7190707114_e694432351-320x320.jpg'], ['Ricotta-Stuffed French Toast with Salted Butterscotch Sauce', 'https://olivesfordinner.com/2012/05/ricotta-stuffed-french-toast-wi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7149109439_1ff5676ee4-320x320.jpg'], ['Raw Collard Greens Sushi Rolls', 'https://olivesfordinner.com/2012/05/raw-collard-greens-sushi.html', 'https://olivesfordinner.com/wp-content/uploads/2012/05/7126218555_e617847dee-320x320.jpg'], ['Curried Chickpea and Onion Fritters', 'https://olivesfordinner.com/2012/04/curried-chickpea-and-onion-fritters.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6976063048_387e956ec6-320x320.jpg'], ['Vegan Sushi: Faux-Roe Gunkanmaki with Pickled Daikon', 'https://olivesfordinner.com/2012/04/vegan-sushi-faux-roe-gunkanmaki-with_16.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6936157558_aaf4a19acb-320x320.jpg'], ['Vegan Queso Blanco Dip', 'https://olivesfordinner.com/2012/04/vegan-queso-blanco-dip.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/6887430116_20f9c97740-320x320.jpg'], ['Fresh Vegan Mozzarella Pizza', 'https://olivesfordinner.com/2012/03/fresh-vegan-mozzarella-pizza.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/7012328143_2537723fed_z-320x320.jpg'], ['Garlic-Sriracha Vegan Buffalo Wings', 'https://olivesfordinner.com/2012/03/garlic-sriracha-vegan-buffalo-wings.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6973941693_4db60198cb_z-320x320.jpg'], ['Sweet Potato and Lemongrass Gyozas', 'https://olivesfordinner.com/2012/03/sweet-potato-and-lemongrass-gyozas.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6807135216_ef28a5c2b8_z-320x320.jpg'], ['Spiced Taro Root Croquettes', 'https://olivesfordinner.com/2012/03/spiced-taro-root-croquettes.html', 'https://olivesfordinner.com/wp-content/uploads/2012/03/6804125084_b162b29fdc_z-320x320.jpg'], ['Molecular Vegan: Scallops with Carrot-Ginger Caviar', 'https://olivesfordinner.com/2012/02/molecular-vegan-scallops-with-carro.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6786920836_eb7688d3fd_z-320x320.jpg'], ['Steamed Tofu with Spicy Black Bean Sauce', 'https://olivesfordinner.com/2012/04/steamed-tofu-with-spicy-black-bean.html', 'https://olivesfordinner.com/wp-content/uploads/2012/04/7051589731_bbdc77e088-320x320.jpg'], ['Cheesy Kale Chips', 'https://olivesfordinner.com/2012/02/cheesy-kale-chips.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6912551587_7f9ff3ea85_z-320x320.jpg'], ['Vegan Macaroni and Cheese', 'https://olivesfordinner.com/2012/02/vegan-macaroni-and-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6864169847_3f2925df44_z-320x320.jpg'], ['Deep-Fried Vegan Ricotta Puffs', 'https://olivesfordinner.com/2012/02/molecular-vegan-deep-fried-ricotta.html', 'https://olivesfordinner.com/wp-content/uploads/2012/02/6817901961_cc588ef74d_z-320x320.jpg'], ['Raw Almond and Chickpea Miso Spread', 'https://olivesfordinner.com/2012/01/raw-almond-and-chickpea-miso-spread.html', 'https://olivesfordinner.com/wp-content/uploads/2012/01/6709230867_2bd4b61bd9_z-320x320.jpg'], ['Lemongrass and Cilantro Shumai', 'https://olivesfordinner.com/2011/12/lemongrass-and-cilantro-shumai.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6513396407_89164d4b84_z-320x320.jpg'], ['Spicy Shiitake Mushroom Roll', 'https://olivesfordinner.com/2011/12/spicy-shiitake-mushroom-ro.html', 'https://olivesfordinner.com/wp-content/uploads/2011/12/6489807867_ce5d312d01_z-320x320.jpg'], ['Scallion Pancakes', 'https://olivesfordinner.com/2011/10/scallion-pancakes.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6273005342_1584ec7d3b-320x320.jpg'], ['Grilled Tofu with Lemongrass and Cilantro Stuffing', 'https://olivesfordinner.com/2011/10/grilled-tofu-with-lemongrass-and.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6251395078_631e498749-320x320.jpg'], ['Homemade Sriracha Sauce', 'https://olivesfordinner.com/2011/10/homemade-sriracha-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2011/10/6223899243_3fb9918b77-320x320.jpg'], ['Mattie’s Vegan Butter', 'https://olivesfordinner.com/2011/09/matties-vegan-butter.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6163784411_e74c8b4423-320x320.jpg'], ['Roasted Garlic and Sriracha Hummus', 'https://olivesfordinner.com/2011/09/roasted-garlic-and-sriracha-hummus.html', 'https://olivesfordinner.com/wp-content/uploads/2011/09/6116553668_6f1645590d-320x320.jpg'], ['Smoky Cashew Cheese', 'https://olivesfordinner.com/2011/08/smoky-cashew-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2011/08/6042115879_be419da111-320x320.jpg'], ['Lemongrass and Garlic Stuffed Tofu', 'https://olivesfordinner.com/2011/08/lemongrass-and-garlic-stuffed-tofu.html', 'https://olivesfordinner.com/wp-content/uploads/2011/08/5995896848_e75fd561f1-320x320.jpg'], ['Mushroom Walnut Paté', 'https://olivesfordinner.com/2011/07/mushroom-walnut-pate.html', 'https://olivesfordinner.com/wp-content/uploads/2011/07/5971239886_495d3367a8-320x320.jpg'], ['Vegan Crab Cakes with Sriracha-Vegenaise Dressing', 'https://olivesfordinner.com/2011/06/vegan-crab-cakes-with-sriracha.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5874981554_d77f709a58_z-320x320.jpg'], ['Fresh Basil and Kale Pesto', 'https://olivesfordinner.com/2011/06/basil-and-kale-pesto.html', 'https://olivesfordinner.com/wp-content/uploads/2011/06/5801443438_574de3e29e_z-320x320.jpg'], ['Deep Fried Daiya Cheeze Sticks', 'https://olivesfordinner.com/2011/05/deep-fried-daiya-cheeze-sticks.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5723853436_4b1af122ce_z-320x320.jpg'], ['Raw Almond Milk', 'https://olivesfordinner.com/2011/05/raw-almond-milk.html', 'https://olivesfordinner.com/wp-content/uploads/2011/05/5673752446_806eca0da3-320x320.jpg'], ['Onion Rings with Thai Dipping Sauce', 'https://olivesfordinner.com/2011/04/onion-rings-with-thai-dipping-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2011/04/5673750696_e9389c8390-320x320.jpg'], ['Spicy Thai Hummus', 'https://olivesfordinner.com/2011/03/spicy-thai-hummus.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/5523599970_e6ac1f5b04-320x320.jpg'], ['Minted Quinoa Spring Rolls with Toasted Cashews and Tahini', 'https://olivesfordinner.com/2011/03/minted-quinoa-spring-rolls-with-toasted.html', 'https://olivesfordinner.com/wp-content/uploads/2011/03/5507119039_254cb9c4dc-320x320.jpg'], ['Raw Cashew Cheese with Oil Cured Olives and Fresh Tarragon', 'https://olivesfordinner.com/2011/02/raw-cashew-cheese-with-oil-cured-olives.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5469946814_24e6de812c-320x320.jpg'], ['Tofutti, Capers and Red Onion Beggars Purses', 'https://olivesfordinner.com/2011/02/tofutti-capers-and-red-onion-beggars.html', 'https://olivesfordinner.com/wp-content/uploads/2011/02/5462974450_d97bb81d5e-320x320.jpg'], ['How to Press Tofu: A Primer', 'https://olivesfordinner.com/2011/01/how-to-press-tofu-primer.html', 'https://olivesfordinner.com/wp-content/uploads/2011/01/5347246937_e568d31c5c-320x281.jpg'], ['Cultured Cashew Cheese', 'https://olivesfordinner.com/2013/08/cultured-cashew-cheese.html', 'https://olivesfordinner.com/wp-content/uploads/2013/08/9507153235_0a9e55b33f_z-320x320.jpg'], ['Lobster Mushroom Tempura, Avocado and Kimchi+Mayo Sushi Rolls', 'https://olivesfordinner.com/2020/12/lobster-mushroom-tempura-avocado-and-kimchimayo-sushi-rolls.html', 'https://olivesfordinner.com/wp-content/uploads/2020/12/Lobster-Mushroom-Tempura-Sushi-Rolls-cover-dec-320x320.jpg'], ['Carrot Lox Crackers', 'https://olivesfordinner.com/2020/11/carrot-lox-crackers.html', 'https://olivesfordinner.com/wp-content/uploads/2020/10/Carrot-Lox-Crackers-Appetizer-FI-320x320.jpg'], ['Homemade Coconut Yogurt', 'https://olivesfordinner.com/2020/04/homemade-coconut-yogurt.html', 'https://olivesfordinner.com/wp-content/uploads/2020/04/Coconut-Yogurt-cover-320x320.jpg'], ['Buffalo Tots', 'https://olivesfordinner.com/2019/12/buffalo-tots.html', 'https://olivesfordinner.com/wp-content/uploads/2019/12/Buffalo-Bleu-Tots_20548-cover-320x320.jpg'], ['The Best Buffalo Cauliflower | Crispy + Spicy + Air Fried!', 'https://olivesfordinner.com/2019/11/the-best-buffalo-cauliflower-crispy-spicy-air-fried.html', 'https://olivesfordinner.com/wp-content/uploads/2019/11/Olives-for-Dinner-The-Best-Buffalo-Cauliflower-cover-320x320.jpg'], ['Deep-fried Brussels Sprouts with Chipotle-Bacon Mayo', 'https://olivesfordinner.com/2019/10/deep-fried-brussels-sprouts-with-chipotle-bacon-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2019/10/Deep-fried-Brussels-Sprouts-with-Chipotle-Bacon-Mayo_cover-320x320.jpg'], ['Smashed + Loaded Tiny Baked Potatoes', 'https://olivesfordinner.com/2019/10/smashed-loaded-tiny-baked-potatoes.html', 'https://olivesfordinner.com/wp-content/uploads/2019/10/Smashed-and-Loaded-tiny-potato-cover-320x320.jpg'], ['Sriracha Pea Crusted Mushrooms with Celery-Garlic Mayo', 'https://olivesfordinner.com/2019/05/sriracha-pea-crusted-mushrooms-with-celery-garlic-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2019/05/Sriracha-Pea-Crusted-Mushrooms-with-Celery-Garlic-Mayo-cover-320x320.jpg'], ['Chickpea Fries with Yogurt-Tahini Sauce', 'https://olivesfordinner.com/2018/07/chickpea-fries-with-yogurt-tahini-sauce.html', 'https://olivesfordinner.com/wp-content/uploads/2018/07/Chickpea-Fries-Cover-1-320x320.jpg'], ['Aquafaba Mayo', 'https://olivesfordinner.com/2017/12/aquafaba-mayo.html', 'https://olivesfordinner.com/wp-content/uploads/2017/11/AF-cover-final-320x320.jpg'], ['Air-Fried Buffalo Cauliflower Steaks', 'https://olivesfordinner.com/2017/10/air-fried-buffalo-cauliflower-steaks.html', 'https://olivesfordinner.com/wp-content/uploads/2017/10/Air-Fried-Buffalo-Cauliflower-Steaks-cover-320x320.jpg'], ['Carrot Lox Stuffed + Fried Ravioli', 'https://olivesfordinner.com/2017/03/carrot-lox-stuffed-fried-ravioli.html', 'https://olivesfordinner.com/wp-content/uploads/2017/02/Carrot-Lox-Stuffed-Fried-Ravioli-cover-320x320.jpg'], ['Soft Pretzel Bites', 'https://olivesfordinner.com/2012/10/soft-pretzel-bites.html', 'https://olivesfordinner.com/wp-content/uploads/2012/10/8076551157_4fd577e190_z-320x320.jpg']]
|
[
"mtgayle17@gmail.com"
] |
mtgayle17@gmail.com
|
58feb52c37f7bcf23a6be186845452d2879973a3
|
0ce487a00f82c262e8624fc5094e0d4b2b6556d6
|
/testdir/test.py
|
22ce659815e5774c69a8bbddfe41ec475a3feee6
|
[] |
no_license
|
netpan/netpan
|
a56ca88d82a44e759fe6deff81e58f77ab192c61
|
3d2ed9921c06026fc3484c3e8261e3519f21dfc5
|
refs/heads/master
| 2020-05-02T18:05:22.139693
| 2019-03-29T08:09:00
| 2019-03-29T08:09:00
| 178,118,032
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
a=int(input("请输入你家狗儿的年龄:"))
if a<0:
print("你是来搞笑的吧!")
elif a==1:
print("相当于人类14岁的年龄!")
elif a==2:
print("相当于人类22岁的年龄!")
else:
b=22+(a-2)*5
print("相当于人类 %d 岁的年龄" %(b))
input("press enter to exit!")
|
[
"pjzjxpf@163.com"
] |
pjzjxpf@163.com
|
f5bb27e1af65281e82c2d2612b64ea120e971722
|
30109f5f173f4e51a20cfcaf6ec41628b177f553
|
/fhir/resources/STU3/documentmanifest.py
|
94a0436357d396ba1018fdc3d460cc56e2befff9
|
[
"BSD-3-Clause"
] |
permissive
|
arkhn/fhir.resources
|
82c8f705c8f19e15621f2bb59fd17600c0ef3697
|
122e89c8599c4034bb3075b31d1a1188e377db91
|
refs/heads/master
| 2022-12-16T07:58:19.448071
| 2020-08-13T03:59:37
| 2020-08-13T03:59:37
| 288,683,730
| 1
| 0
|
NOASSERTION
| 2020-08-19T09:01:02
| 2020-08-19T09:01:01
| null |
UTF-8
|
Python
| false
| false
| 12,206
|
py
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/DocumentManifest
Release: STU3
Version: 3.0.2
Revision: 11917
Last updated: 2019-10-24T11:53:00+11:00
"""
from typing import Any, Dict
from typing import List as ListType
from pydantic import Field, root_validator
from . import backboneelement, domainresource, fhirtypes
class DocumentManifest(domainresource.DomainResource):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
A list that defines a set of documents.
A collection of documents compiled for a purpose together with metadata
that applies to the collection.
"""
resource_type = Field("DocumentManifest", const=True)
author: ListType[fhirtypes.ReferenceType] = Field(
None,
alias="author",
title="Who and/or what authored the manifest",
description=(
"Identifies who is responsible for creating the manifest, and adding "
"documents to it."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=[
"Practitioner",
"Organization",
"Device",
"Patient",
"RelatedPerson",
],
)
content: ListType[fhirtypes.DocumentManifestContentType] = Field(
...,
alias="content",
title="The items included",
description="The list of Documents included in the manifest.",
# if property is element of this resource.
element_property=True,
)
created: fhirtypes.DateTime = Field(
None,
alias="created",
title="When this document manifest created",
description=(
"When the document manifest was created for submission to the server "
"(not necessarily the same thing as the actual resource last modified "
"time, since it may be modified, replicated, etc.)."
),
# if property is element of this resource.
element_property=True,
)
created__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_created", title="Extension field for ``created``."
)
description: fhirtypes.String = Field(
None,
alias="description",
title="Human-readable description (title)",
description=(
"Human-readable description of the source document. This is sometimes "
'known as the "title".'
),
# if property is element of this resource.
element_property=True,
)
description__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_description", title="Extension field for ``description``."
)
identifier: ListType[fhirtypes.IdentifierType] = Field(
None,
alias="identifier",
title="Other identifiers for the manifest",
description=(
"Other identifiers associated with the document manifest, including "
"version independent identifiers."
),
# if property is element of this resource.
element_property=True,
)
masterIdentifier: fhirtypes.IdentifierType = Field(
None,
alias="masterIdentifier",
title="Unique Identifier for the set of documents",
description=(
"A single identifier that uniquely identifies this manifest. "
"Principally used to refer to the manifest in non-FHIR contexts."
),
# if property is element of this resource.
element_property=True,
)
recipient: ListType[fhirtypes.ReferenceType] = Field(
None,
alias="recipient",
title="Intended to get notified about this set of documents",
description=(
"A patient, practitioner, or organization for which this set of "
"documents is intended."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=[
"Patient",
"Practitioner",
"RelatedPerson",
"Organization",
],
)
related: ListType[fhirtypes.DocumentManifestRelatedType] = Field(
None,
alias="related",
title="Related things",
description="Related identifiers or resources associated with the DocumentManifest.",
# if property is element of this resource.
element_property=True,
)
source: fhirtypes.Uri = Field(
None,
alias="source",
title="The source system/application/software",
description=(
"Identifies the source system, application, or software that produced "
"the document manifest."
),
# if property is element of this resource.
element_property=True,
)
source__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_source", title="Extension field for ``source``."
)
status: fhirtypes.Code = Field(
...,
alias="status",
title="current | superseded | entered-in-error",
description="The status of this document manifest.",
# if property is element of this resource.
element_property=True,
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=["current", "superseded", "entered-in-error"],
)
status__ext: fhirtypes.FHIRPrimitiveExtensionType = Field(
None, alias="_status", title="Extension field for ``status``."
)
subject: fhirtypes.ReferenceType = Field(
None,
alias="subject",
title="The subject of the set of documents",
description=(
"Who or what the set of documents is about. The documents can be about "
"a person, (patient or healthcare practitioner), a device (i.e. "
"machine) or even a group of subjects (such as a document about a herd "
"of farm animals, or a set of patients that share a common exposure). "
"If the documents cross more than one subject, then more than one "
"subject is allowed here (unusual use case)."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Patient", "Practitioner", "Group", "Device"],
)
type: fhirtypes.CodeableConceptType = Field(
None,
alias="type",
title="Kind of document set",
description=(
"Specifies the kind of this set of documents (e.g. Patient Summary, "
"Discharge Summary, Prescription, etc.). The type of a set of documents"
" may be the same as one of the documents in it - especially if there "
"is only one - but it may be wider."
),
# if property is element of this resource.
element_property=True,
)
class DocumentManifestContent(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
The items included.
The list of Documents included in the manifest.
"""
resource_type = Field("DocumentManifestContent", const=True)
pAttachment: fhirtypes.AttachmentType = Field(
None,
alias="pAttachment",
title="Contents of this set of documents",
description=(
"The list of references to document content, or Attachment that consist"
" of the parts of this document manifest. Usually, these would be "
"document references, but direct references to Media or Attachments are"
" also allowed."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e p[x]
one_of_many="p",
one_of_many_required=True,
)
pReference: fhirtypes.ReferenceType = Field(
None,
alias="pReference",
title="Contents of this set of documents",
description=(
"The list of references to document content, or Attachment that consist"
" of the parts of this document manifest. Usually, these would be "
"document references, but direct references to Media or Attachments are"
" also allowed."
),
# if property is element of this resource.
element_property=True,
# Choice of Data Types. i.e p[x]
one_of_many="p",
one_of_many_required=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Resource"],
)
@root_validator(pre=True)
def validate_one_of_many(cls, values: Dict[str, Any]) -> Dict[str, Any]:
"""https://www.hl7.org/fhir/formats.html#choice
A few elements have a choice of more than one data type for their content.
All such elements have a name that takes the form nnn[x].
The "nnn" part of the name is constant, and the "[x]" is replaced with
the title-cased name of the type that is actually used.
The table view shows each of these names explicitly.
Elements that have a choice of data type cannot repeat - they must have a
maximum cardinality of 1. When constructing an instance of an element with a
choice of types, the authoring system must create a single element with a
data type chosen from among the list of permitted data types.
"""
one_of_many_fields = {"p": ["pAttachment", "pReference"]}
for prefix, fields in one_of_many_fields.items():
assert cls.__fields__[fields[0]].field_info.extra["one_of_many"] == prefix
required = (
cls.__fields__[fields[0]].field_info.extra["one_of_many_required"]
is True
)
found = False
for field in fields:
if field in values and values[field] is not None:
if found is True:
raise ValueError(
"Any of one field value is expected from "
f"this list {fields}, but got multiple!"
)
else:
found = True
if required is True and found is False:
raise ValueError(f"Expect any of field value from this list {fields}.")
return values
class DocumentManifestRelated(backboneelement.BackboneElement):
"""Disclaimer: Any field name ends with ``__ext`` does't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Related things.
Related identifiers or resources associated with the DocumentManifest.
"""
resource_type = Field("DocumentManifestRelated", const=True)
identifier: fhirtypes.IdentifierType = Field(
None,
alias="identifier",
title="Identifiers of things that are related",
description=(
"Related identifier to this DocumentManifest. For example, Order "
"numbers, accession numbers, XDW workflow numbers."
),
# if property is element of this resource.
element_property=True,
)
ref: fhirtypes.ReferenceType = Field(
None,
alias="ref",
title="Related Resource",
description=(
"Related Resource to this DocumentManifest. For example, Order, "
"ProcedureRequest, Procedure, EligibilityRequest, etc."
),
# if property is element of this resource.
element_property=True,
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Resource"],
)
|
[
"connect2nazrul@gmail.com"
] |
connect2nazrul@gmail.com
|
4525aa767d23f6bb83dd9dc9727e3368900f2e47
|
01f321b011953de639030b010249ec721446e71b
|
/virtual/bin/easy_install-3.6
|
5395a95d1f956a344d068bdc788dada7c9a1edfe
|
[] |
no_license
|
gabrielcoder247/myportfolio
|
a5b37fd809eeb46926f72d9409d31f29f842d179
|
e7e08045d6cea0f8393379bc2feb878cef25ff63
|
refs/heads/master
| 2020-03-30T03:37:15.111963
| 2018-09-28T06:55:31
| 2018-09-28T06:55:31
| 150,698,201
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 277
|
6
|
#!/home/gabrielcoder/Documents/portfolio/virtual/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"gabrielcoder247@gmail.com"
] |
gabrielcoder247@gmail.com
|
47607374b412d3c3ea3a567915b7d2377f77190a
|
27bccae0a0651dec824eb40633448928f3fa87a7
|
/masterpc/apps/user/templatetags/sum.py
|
d6c4b1d6a806d71909cda61bda0e4857984ca5f1
|
[] |
no_license
|
samir04m/MasterPC
|
1d2a2955b491d99afa14a94fc9955140cdbfb9bd
|
be115e2b38e518c2e8d0aa185f53c02ffd56dac8
|
refs/heads/master
| 2022-09-30T01:35:40.792649
| 2020-06-09T03:49:21
| 2020-06-09T03:49:21
| 262,657,403
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 107
|
py
|
from django import template
register = template.Library()
@register.filter
def sum(a, b):
return a + b
|
[
"samir04m@gmail.com"
] |
samir04m@gmail.com
|
3902c35a3082958cebda9eb01b43d71151829312
|
948f254e75e7d0f816c0944d636234f1668eb7b8
|
/meaniceinbox_old.py
|
fafa56764682dcf81c0fcd5a071bfd515871debf
|
[] |
no_license
|
NOAA-Strausz/EcoFOCI_ssmi_ice
|
30f2398ba0e87c5b710cd57e933d3fb885ddb540
|
b0501df633f419a0aebf45308b98919a6d2a5b9e
|
refs/heads/master
| 2021-12-09T11:51:57.638964
| 2021-12-01T21:42:40
| 2021-12-01T21:42:40
| 162,646,458
| 1
| 2
| null | 2020-10-22T22:47:00
| 2018-12-21T00:45:39
|
Python
|
UTF-8
|
Python
| false
| false
| 8,088
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Dec 20 13:15:11 2019
@author: strausz
"""
import argparse
import glob
import numpy as np
import datetime as dt
import pandas as pd
import math
import sys
from haversine import haversine
parser = argparse.ArgumentParser(description='Get ice concentration around a point')
parser.add_argument('-latlon', '--latlon', nargs=2,
help='latitude and longitude of desired point, W lon must be negative', type=float)
parser.add_argument('-y', '--years', nargs=2, help='year range ie "2015 2019"',
type=int)
parser.add_argument('-a', '--name', help='optional name of point', type=str)
parser.add_argument('-d', '--distance', help='size of box around point',
type=float)
parser.add_argument('-n', '--nm', help='use nautical miles instead of km',
action="store_true")
parser.add_argument('-r', '--radius', help='use distance as radius around point instead of box',
action="store_true")
parser.add_argument('-m', '--mooring', help='Mooring name, choose from ck1-9, or bs2-8')
parser.add_argument('-v', '--verbose', help='Print some details while processing files',
action="store_true")
args=parser.parse_args()
latfile='/home/makushin/strausz/ecofoci_github/EcoFOCI_ssmi_ice/psn25lats_v3.dat'
lonfile='/home/makushin/strausz/ecofoci_github/EcoFOCI_ssmi_ice/psn25lons_v3.dat'
#locations of ice files
bootstrap = '/home/akutan/strausz/ssmi_ice/data/bootstrap/'
nrt = '/home/akutan/strausz/ssmi_ice/data/nrt/'
#latest available bootstrap year
boot_year = 2020
#mooring locaitons taken from 'https://www.pmel.noaa.gov/foci/foci_moorings/mooring_info/mooring_location_info.html'
moorings = {'bs2':[56.869,-164.050], 'bs4':[57.895,-168.878], 'bs5':[59.911,-171.73],
'bs8':[62.194,-174.688], 'bs14':[64.00,-167.933], 'ck1':[70.838,-163.125], 'ck2':[71.231,-164.223],
'ck3':[71.828,-166.070], 'ck4':[71.038,-160.514], 'ck5':[71.203,-158.011],
'ck9':[72.464,-156.548], 'ck10':[70.211,-167.787],
'ck11':[70.013,-166.855], 'ck12':[67.911,-168.195]}
if args.mooring:
if args.mooring in moorings:
inlat = moorings[args.mooring][0]
inlon = moorings[args.mooring][1]
else:
sys.exit("Mooring not listed")
mooring = args.mooring + "_"
else:
inlat = args.latlon[0]
inlon = args.latlon[1]
mooring = ''
if args.nm:
units = 'nm'
else:
units = 'km'
if args.name:
pointname = args.name + "_"
else:
pointname = ''
def decode_datafile(filename):
#determine if it's nrt or bootstrap from filename prefix
#note that we remove path first if it exists
prefix = filename.split('/')[-1:][0][:2]
icefile = open(filename, 'rb')
if prefix == 'nt':
#remove the header
icefile.seek(300)
ice = np.fromfile(icefile,dtype=np.uint8)
ice[ice >= 253] = 0
ice = ice/2.5
elif prefix == 'bt':
ice = np.fromfile(icefile,dtype=np.uint16)
ice = ice/10.
ice[ice == 110] = 100 #110 is polar hole
ice[ice == 120] = np.nan #120 is land
else:
ice=np.nan
icefile.close()
return ice;
def get_date(filename):
#gets date from filename
#first remove path from filename if it is there
filename = filename.split('/')[-1:][0]
date = filename[3:11]
date = dt.datetime.strptime(date,"%Y%m%d")
return date;
def decode_latlon(filename):
latlon_file = open(filename, 'rb')
output = np.fromfile(latlon_file,dtype='<i4')
output = output/100000.0
#output = int(output * 1000)/1000 #sets decimal place at 3 without rounding
latlon_file.close()
return output;
def find_box(lat1, lon1, dist, nm):
#formula pulled from this website:
#http://www.movable-type.co.uk/scripts/latlong.html
if nm:
r=3440
else:
r=6371
dist = dist/2
wlon = math.radians(lon1) + math.atan2(math.sin(math.radians(270)) *
math.sin(dist/r) * math.cos(math.radians(lat1)),
math.cos(dist/r) - math.sin(math.radians(lat1)) *
math.sin(math.radians(lat1)))
elon = math.radians(lon1) + math.atan2(math.sin(math.radians(90)) *
math.sin(dist/r) * math.cos(math.radians(lat1)),
math.cos(dist/r) - math.sin(math.radians(lat1)) *
math.sin(math.radians(lat1)))
nlat = math.asin(math.sin(math.radians(lat1)) * math.cos(dist/r) +
math.cos(math.radians(lat1)) * math.sin(dist/r) *
math.cos(math.radians(0)))
slat = math.asin(math.sin(math.radians(lat1)) * math.cos(dist/r) +
math.cos(math.radians(lat1)) * math.sin(dist/r) *
math.cos(math.radians(180)))
wlon = round(math.degrees(wlon), 4)
elon = round(math.degrees(elon), 4)
nlat = round(math.degrees(nlat), 4)
slat = round(math.degrees(slat), 4)
return([nlat,slat,wlon,elon])
if args.radius:
nlat, slat, wlon, elon = find_box(inlat, inlon, 2*args.distance, nm=args.nm)
dist_type = 'Radius'
else:
nlat, slat, wlon, elon = find_box(inlat, inlon, args.distance, nm=args.nm)
dist_type = 'Box'
#put desired years in list
years = list(range(args.years[0],args.years[1]+1))
files = []
for i in years:
year = str(i)
if i <= boot_year:
path = bootstrap + year + '/'
files = files + glob.glob(path + '*.bin')
else:
path = nrt
files = files + glob.glob(path + '*' + year + '*.bin')
output_date = []
output_ice = []
for i in files:
#print('decoding filename: ' + i)
data_ice={'latitude':decode_latlon(latfile), 'longitude':decode_latlon(lonfile),
'ice_conc':decode_datafile(i)}
df_ice=pd.DataFrame(data_ice)
df_ice_chopped = df_ice[(df_ice.latitude <= nlat) & (df_ice.latitude >= slat) &
(df_ice.longitude >= wlon) & (df_ice.longitude <= elon)]
date = get_date(i)
if args.radius:
fn = lambda x: haversine((inlat, inlon),(x.latitude,x.longitude))
distance = df_ice_chopped.apply(fn, axis=1)
df_ice_chopped = df_ice_chopped.assign(dist=distance.values)
df_ice_chopped = df_ice_chopped.loc[df_ice_chopped.dist < args.distance]
#date_string = date.strftime("%Y,%j")
ice = df_ice_chopped.ice_conc.mean().round(decimals=1)
#print(date_string+','+str(ice))
if args.verbose:
print("Working on File: " + i)
print("For " + date.strftime("%Y-%m-%d") + " ice concentration was "
+ str(ice) + "%")
output_date.append(date)
output_ice.append(ice)
data = {'date':output_date, 'ice_concentration': output_ice}
df = pd.DataFrame(data)
df.set_index(['date'], inplace=True)
years_grouped = df.groupby(df.index.year)
dummy_list = []
output={'DOY':range(1,367)}
for name, group in years_grouped:
year = str(name)
if name <= 1988:
group = group.resample('d').mean()
if name == 1978:
dummy_list = [np.nan]*304
elif name in [1979, 1982, 1984, 1985, 1987]:
dummy_list = [np.nan]
elif name == 1988:
dummy_list = [np.nan]*13
else:
dummy_list = []
output[year] = dummy_list + list(group.ice_concentration)
else:
output[year] = list(group.ice_concentration)
df_out = pd.DataFrame.from_dict(output, orient='index').transpose()
df_out['DOY']=df_out.DOY.astype(int)
#get longiude suffix, assum lat is north
lat_suffix = 'N'
if inlon < 0:
lon_suffix = 'W'
else:
lon_suffix = 'E'
filename = ("meaniceinbox_" + mooring + pointname + str(inlat) + lat_suffix + "_" +
str(abs(inlon)) + lon_suffix + "_" + str(args.distance) + units +
"_" + dist_type + "_" + str(args.years[0]) + "-" + str(args.years[1]) + ".csv")
df_out.to_csv(filename, index=False)
|
[
"david.strausz@noaa.gov"
] |
david.strausz@noaa.gov
|
78fce8646714678588c09f4232057ee8abcbf17f
|
075e491540481578d68665bc7679c68a0281e6e2
|
/finance/stock.py
|
00575fbd38c0bd30e6404c3483f99c259ca44e8d
|
[] |
no_license
|
ChenChihChiang/Hahow-Python-Web
|
4a4505d37fa49721b1528c7f726cf0b38a40d2da
|
3b5feeb65d88090d8f663f8ba7e84e024d26c1aa
|
refs/heads/master
| 2021-06-14T22:19:57.938509
| 2017-04-18T03:53:22
| 2017-04-18T03:53:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,151
|
py
|
import requests
from bs4 import BeautifulSoup
import pandas as pd
import time
url = 'http://www.twse.com.tw/ch/trading/exchange/BWIBBU/BWIBBU.php'
def parserTSE(year, month, no):
year = str(year)
month = str(month)
no = str(no)
payload = {
'query_year': year,
'query_month': month,
'CO_ID': no,
'query-button':'%E6%9F%A5%E8%A9%A2'
}
headers = {
'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/57.0.2987.133 Safari/537.36'
}
res = requests.post(url, headers=headers, data=payload)
#data = res.text.encode('latin1').decode('big5')
#print (res.text)
soup = BeautifulSoup(res.text, 'html.parser')
#cpntent = soup.select('.board')
content = soup.find_all('table')[0]
with open ('./' + year + month + '_tst_' + no + '.html', 'w') as f:
f.write(str(content))
table = pd.read_html('./' + year + month + '_tst_' + no + '.html')[0]
print (table.to_csv(header=False, index=False))
with open('./' + year + '_tse_' + no + '.csv', 'a') as f:
f.write(str(table.to_csv(header=False, index=False)))
for m in range(1, 13):
time.sleep(5)
parserTSE(2015, m, 2317)
|
[
"chihchinag@gmail.com"
] |
chihchinag@gmail.com
|
af504185ba8ca0856e0f15dcd57e1301d574f6b8
|
5a0bc261e7ed7418a1237c01ba25155c519e5909
|
/Chapter7/Chapter7part23.py
|
05c0d9dfe38692f6b9c422f5e843cbef73ccd55f
|
[] |
no_license
|
jsigner/ESC411
|
b4af0e3ee09e75c1df5b2879d1a792b1cb8ab029
|
4481449c649e755264605ba619f5a8b9a47894bc
|
refs/heads/master
| 2020-12-20T15:02:59.783210
| 2020-01-29T11:19:28
| 2020-01-29T11:19:28
| 236,115,351
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 688
|
py
|
import numpy as np
def diff2(u):
N = len(u)
d2u = [0]*N
d2u[0] = u[1]-u[0]
d2u[N-1] = u[N-2]-u[N-1]
for n in range(1,N-1):
d2u[n] = u[n+1] + u[n-1]-2*u[n]
return(np.array(d2u))
def RHS(u,dx):
N=len(u)
d2=diff2(u)
r=np.array([0.0 for n in range(N)])
for n in range(0,N):
r[n]=u[n]*(1-u[n])+d2[n]/dx**2
return(r)
def numsolFK(u0,dx,dt,N):
n=len(u0)
u=[np.array([0.0]*n) for i in range(N)]
u[0]=u0
for t in range(1,N):
u[t]=u[t-1]+dt*RHS(u[t-1]+(dt/2)*RHS(u[t-1],dx),dx)
#print(dt*RHS(u[t] + (dt / 2) * RHS(u[t], dx), dx))
return(u)
print(numsolFK(np.array([0,0,0,2.5,1,0,0]),0.1,0.1,100))
|
[
"jens.signer@uzh.ch"
] |
jens.signer@uzh.ch
|
71d1c10d5510aae57aba8c5b966cb8abcea7333d
|
11daa29e6234611fdff7002c0bd6359ee89187f6
|
/lab 4/PlanetWars lab(1)/PlanetWars lab/bots/Blanko.py
|
494b9632e83016dfb28d61692d1fa8afeed5e2b0
|
[] |
no_license
|
TomLatimer92/Artificial-Intelligence-for-Games
|
fef960fe4aacd347272961f3c625a72aed46b2e8
|
b9a6f754be4df33ad836ff423b00bae2448843cd
|
refs/heads/master
| 2020-04-16T01:45:19.644230
| 2017-06-09T02:44:52
| 2017-06-09T02:44:52
| 83,385,863
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,548
|
py
|
''' Simple "Blank" PlanetWars controller bot.
The Bot does nothing, but shows the minimum a bot needs to have.
See the `update` method which is where your code goes.
The `PlanetWars` `Player` object (see players.py), will contain your bot
controller instance. The Player will provide a current `GameInfo` instance
to your bot `update` method each time it is called.
The `gameinfo` instance is a facade of the state of the game for the `Player`,
and includes all planets and fleets that currently exist. Note that the
details are limited by "fog of war" vision and only match what you can see. If
you want to know more you'll need to scout!
A gameinfo instance has various (possibly useful) dict's for you to use:
# all planet and fleets (that you own or can see):
planets.
fleets.
# dict's of just your planets/fleets:
my_planets.
my_fleets.
# dict's of both neutral and enemy planets/fleets:
not_my_planets.
not_my_fleets.
# dict's of just the enemy planet/fleets (fog limited):
enemy_planets.
enemy_fleets.
You issue orders from your bot using the methods of the gameinfo instance.
gameinfo.planet_order(src, dest, ships)
gameinfo.fleet_order(src, dest, ships)
For example, to send 10 ships from planet src to planet dest, you would
say `gameinfo.planet_order(src, dest, 10)`.
There is also a player specific log if you want to leave a message.
gameinfo.log("Here's a message from the bot")
'''
class Blanko(object):
def update(self, gameinfo):
pass
|
[
"9718648@student.swin.edu.au"
] |
9718648@student.swin.edu.au
|
77f5e2718963f38e6f8d3b4f94db63d867327aac
|
fa074f02d654df1a60e5f5d6cc0e53279f352ba3
|
/Pilot3/P3B7/metrics.py
|
2e3b8e8867ce592d35fdca05cce30c73ebec6bb8
|
[
"MIT"
] |
permissive
|
samcom12/Benchmarks-3
|
2ff5b943df7a0b4f20f8cfa6a9373383a74687e5
|
a48c85a4d4d76905c3392b18e42bea4bd28c518c
|
refs/heads/master
| 2023-08-29T19:44:27.455414
| 2021-08-02T14:34:52
| 2021-08-02T14:34:52
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 867
|
py
|
from pytorch_lightning.metrics.classification import F1
class F1Meter:
def __init__(self, tasks, average='micro'):
self.metrics = self._create_metrics(tasks, average)
def _create_metrics(self, tasks, avg):
"""Create F1 metrics for each of the tasks
Args:
tasks: dictionary of tasks and their respective number
of classes
avg: either 'micro' or 'macro'
"""
return {t: F1(c, average=avg) for t, c in tasks.items()}
def f1(self, y_hat, y):
"""Get the batch F1 score"""
scores = {}
for task, pred in y_hat.items():
scores[task] = self.metrics[task](pred, y[task])
return scores
def compute(self):
"""Compute the F1 score over all batches"""
return {t: f1.compute().item() for t, f1 in self.metrics.items()}
|
[
"young.todd.mk@gmail.com"
] |
young.todd.mk@gmail.com
|
05dec73c900f0b0c9e402c0259bf21ec5f4c80fb
|
91f5e289978190e5ae673aeef64da6d2eea003b7
|
/Python/run_manual_prcrawler.py
|
eec0b1162c7d0cd14b14743dd25802c1477c1031
|
[] |
no_license
|
sdownin/mmc-pharma
|
59699e715a7d09057b520d98747523ab7e15c0fa
|
2605a749a697b62cd1f5402f6566296fd6c83113
|
refs/heads/master
| 2020-04-19T01:04:15.674176
| 2019-06-07T06:58:46
| 2019-06-07T06:58:46
| 167,863,040
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,501
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 15 12:34:08 2019
@author: T430
"""
import os
import pandas as pd
from argparse import ArgumentParser
from manual_prcrawler import ManualPRCrawler
def run():
""" Main web crawler run function
"""
work_dir = 'C:\\Users\\T430\\Google Drive\\PhD\\Research\\MMC\\pharma_encounters\\mmc-pharma'
data_dir = os.path.join(work_dir, 'analysis_data')
# ## add logs directory
# log_dir = os.path.join(work_dir,'logs')
# if not os.path.exists(log_dir):
# os.makedirs(log_dir)
# ## logging
# configure_logging(install_root_handler=False)
# logging.basicConfig(
# filename=os.path.join(log_dir,'log_%s.txt' % round(timestamp())),
# format='%(levelname)s: %(message)s',
# level=logging.INFO
# )
## parse arguments
par = ArgumentParser(description="Run Manual Industry Press Release Crawler")
par.add_argument('-f','--files', type=str, help="The data files to process (comma separated)")
par.add_argument('-n','--names', type=str, help="The firm names to crawl (comma separated)")
args = par.parse_args()
files = args.files.split(',') if args.files is not None else []
names = args.names.split(',') if args.names is not None else []
## if no files specified, run all files in data dir
if not files:
files = os.listdir(data_dir)
print('files to crawl:')
print(files)
## run crawlers for each data file
for file in files:
## check data file
if file not in os.listdir(data_dir):
print('skipping missing file %s' % file)
next
else:
print('processing file: %s' % file)
## load data
df = pd.read_csv(os.path.join(data_dir, file), na_values=[""])
## run web crawlers per domain in data file
for index, row in df.iterrows():
## check firm name
if names and row.name not in names:
next
## dynamic content: temporarily skip
if int(row.pdf):
print(' skipping %s for dynamic pdf content' % row.name)
next
## runner crawl spider
print(' running firm %s' % row.name)
clr = ManualPRCrawler(row.to_dict(), data_dir)
clr.fetch_article_urls()
clr.fetch_articles()
print(clr.article_urls)
if __name__ == '__main__':
run()
|
[
"stephendowning2008@gmail.com"
] |
stephendowning2008@gmail.com
|
864fc05a6117b18a00d629f8d80d7a448f6517f9
|
4c7750cd7d645ce0704032bf4ca9afd33b37020d
|
/configs/top_down/resnet/mpii/res50_mpii_256x256.py
|
e83aa9400a85c3017812fb8ffb293601bde131e0
|
[
"Apache-2.0"
] |
permissive
|
Nabin-Subedi/mmpose
|
c5239bb8d6155c724030736e51a44aba87e34657
|
586cea709a407c4e9cdd5d2d5adc23f8caec924b
|
refs/heads/master
| 2023-01-11T06:48:57.703848
| 2020-11-18T03:11:24
| 2020-11-18T03:11:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,346
|
py
|
log_level = 'INFO'
load_from = None
resume_from = None
dist_params = dict(backend='nccl')
workflow = [('train', 1)]
checkpoint_config = dict(interval=10)
evaluation = dict(interval=10, metric='PCKh', key_indicator='PCKh')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=50, hooks=[
dict(type='TextLoggerHook'),
])
channel_cfg = dict(
num_output_channels=16,
dataset_joints=16,
dataset_channel=list(range(16)),
inference_channel=list(range(16)))
# model settings
model = dict(
type='TopDown',
pretrained='torchvision://resnet50',
backbone=dict(type='ResNet', depth=50),
keypoint_head=dict(
type='TopDownSimpleHead',
in_channels=2048,
out_channels=channel_cfg['num_output_channels'],
),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process=True,
shift_heatmap=True,
unbiased_decoding=False,
modulate_kernel=11),
loss_pose=dict(type='JointsMSELoss', use_target_weight=True))
data_cfg = dict(
image_size=[256, 256],
heatmap_size=[64, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
use_gt_bbox=True,
bbox_file=None,
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=['image_file', 'center', 'scale', 'rotation', 'flip_pairs']),
]
test_pipeline = val_pipeline
data_root = 'data/mpii'
data = dict(
samples_per_gpu=64,
workers_per_gpu=2,
train=dict(
type='TopDownMpiiDataset',
ann_file=f'{data_root}/annotations/mpii_train.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=train_pipeline),
val=dict(
type='TopDownMpiiDataset',
ann_file=f'{data_root}/annotations/mpii_val.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=val_pipeline),
test=dict(
type='TopDownMpiiDataset',
ann_file=f'{data_root}/annotations/mpii_val.json',
img_prefix=f'{data_root}/images/',
data_cfg=data_cfg,
pipeline=test_pipeline),
)
|
[
"noreply@github.com"
] |
noreply@github.com
|
5b7118aae6780676a9b1e5dd9aaf4e5714a2dd11
|
36a094a44450d1353e9dfc8242a54e2bb70bb9b5
|
/src/ebonite/build/helpers.py
|
4d914e3f781459de8dab388a9fd817735874b529
|
[
"Apache-2.0"
] |
permissive
|
zyfra/ebonite
|
52843ce847a3fd28e4ba8ab64d986dcfb23671c0
|
b01b662c43709d152940f488574d78ff25f89ecf
|
refs/heads/master
| 2022-11-29T21:20:02.358797
| 2020-10-19T12:22:49
| 2020-10-19T12:22:49
| 221,721,146
| 275
| 18
|
Apache-2.0
| 2022-11-21T22:44:02
| 2019-11-14T14:49:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,239
|
py
|
from functools import wraps
from typing import List, Callable
from pyjackson.utils import get_function_fields, turn_args_to_kwargs
def validate_kwargs(f=None, *, allowed: List[str] = None, allowed_funcs: List[Callable] = None):
def inner(func):
all_fields = allowed or []
if allowed_funcs is not None:
all_fields += [field.name for func in allowed_funcs for field in get_function_fields(func, False)]
all_fields = set(all_fields)
fields = get_function_fields(func, False)
all_fields.update(field.name for field in fields)
@wraps(func)
def inner_inner(self, *args, **kwargs):
if len(args) > len(fields):
raise TypeError(
f'{func.__name__}() takes {len(fields)} positional arguments but {len(args)} were given')
kwargs = turn_args_to_kwargs(func, args, kwargs, False)
extra_kwargs = set(kwargs.keys()).difference(all_fields)
if len(extra_kwargs) > 0:
raise TypeError(f'{extra_kwargs} are an invalid keyword arguments for this function')
return func(self, **kwargs)
return inner_inner
if f is None:
return inner
return inner(f)
|
[
"noreply@github.com"
] |
noreply@github.com
|
70a8dd326fb2ca09e7b9dafc697a919fc5f4956e
|
e56214188faae8ebfb36a463e34fc8324935b3c2
|
/test/test_hcl_firmware.py
|
773860479af00219eb4688debc01670faea8c88d
|
[
"Apache-2.0"
] |
permissive
|
CiscoUcs/intersight-python
|
866d6c63e0cb8c33440771efd93541d679bb1ecc
|
a92fccb1c8df4332ba1f05a0e784efbb4f2efdc4
|
refs/heads/master
| 2021-11-07T12:54:41.888973
| 2021-10-25T16:15:50
| 2021-10-25T16:15:50
| 115,440,875
| 25
| 18
|
Apache-2.0
| 2020-03-02T16:19:49
| 2017-12-26T17:14:03
|
Python
|
UTF-8
|
Python
| false
| false
| 1,857
|
py
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.hcl_firmware import HclFirmware # noqa: E501
from intersight.rest import ApiException
class TestHclFirmware(unittest.TestCase):
"""HclFirmware unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHclFirmware(self):
"""Test HclFirmware"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.hcl_firmware.HclFirmware() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"ucs-build@github.com"
] |
ucs-build@github.com
|
f6a5c9b594417257ba6c45214cb08941d6ed3a86
|
66c3ff83c3e3e63bf8642742356f6c1817a30eca
|
/.vim/tmp/neocomplete/tags_output/=+home=+abel=+.virtualenvs=+django=+lib=+python2.7=+site-packages=+django=+views=+generic=+detail.py
|
a3f8b315eb315bda85117f9e7f2d3232d007aa1d
|
[] |
no_license
|
pacifi/vim
|
0a708e8bc741b4510a8da37da0d0e1eabb05ec83
|
22e706704357b961acb584e74689c7080e86a800
|
refs/heads/master
| 2021-05-20T17:18:10.481921
| 2020-08-06T12:38:58
| 2020-08-06T12:38:58
| 30,074,530
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,602
|
py
|
!_TAG_FILE_FORMAT 2 /extended format; --format=1 will not append ;" to lines/
!_TAG_FILE_SORTED 1 /0=unsorted, 1=sorted, 2=foldcase/
!_TAG_PROGRAM_AUTHOR Darren Hiebert /dhiebert@users.sourceforge.net/
!_TAG_PROGRAM_NAME Exuberant Ctags //
!_TAG_PROGRAM_URL http://ctags.sourceforge.net /official site/
!_TAG_PROGRAM_VERSION 5.9~svn20110310 //
BaseDetailView /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^class BaseDetailView(SingleObjectMixin, View):$/;" c
DetailView /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^class DetailView(SingleObjectTemplateResponseMixin, BaseDetailView):$/;" c
SingleObjectMixin /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^class SingleObjectMixin(ContextMixin):$/;" c
SingleObjectTemplateResponseMixin /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^class SingleObjectTemplateResponseMixin(TemplateResponseMixin):$/;" c
context_object_name /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ context_object_name = None$/;" v class:SingleObjectMixin
get /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get(self, request, *args, **kwargs):$/;" m class:BaseDetailView
get_context_data /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_context_data(self, **kwargs):$/;" m class:SingleObjectMixin
get_context_object_name /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_context_object_name(self, obj):$/;" m class:SingleObjectMixin
get_object /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_object(self, queryset=None):$/;" m class:SingleObjectMixin
get_queryset /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_queryset(self):$/;" m class:SingleObjectMixin
get_slug_field /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_slug_field(self):$/;" m class:SingleObjectMixin
get_template_names /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ def get_template_names(self):$/;" m class:SingleObjectTemplateResponseMixin
model /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ model = None$/;" v class:SingleObjectMixin
pk_url_kwarg /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ pk_url_kwarg = 'pk'$/;" v class:SingleObjectMixin
queryset /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ queryset = None$/;" v class:SingleObjectMixin
slug_field /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ slug_field = 'slug'$/;" v class:SingleObjectMixin
slug_url_kwarg /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ slug_url_kwarg = 'slug'$/;" v class:SingleObjectMixin
template_name_field /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ template_name_field = None$/;" v class:SingleObjectTemplateResponseMixin
template_name_suffix /home/abel/.virtualenvs/django/lib/python2.7/site-packages/django/views/generic/detail.py /^ template_name_suffix = '_detail'$/;" v class:SingleObjectTemplateResponseMixin
|
[
"pacifi.bnr@gmail.com"
] |
pacifi.bnr@gmail.com
|
58245768cd84f9603e8f689a19db03e72f3ccfd5
|
343e2805922596f5f7028624cbe293dc26cfe0a7
|
/binary_search_tree_tests.py
|
d9f4485c115b7167840fdc6ceaadee166fe9e083
|
[] |
no_license
|
cdrn/Algorithms
|
73e556fe7f9e7116760879ee279ceec1ec3b527d
|
5a4d7621466ae86bbeafb137f77652c736f2d5d3
|
refs/heads/master
| 2021-09-15T16:13:57.215421
| 2018-06-06T12:28:33
| 2018-06-06T12:28:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 787
|
py
|
import pytest
import binary_search_tree
# setup
def init_bst():
bst = binary_search_tree.BinarySearchTree(25)
bst.insert(bst.root, 10)
bst.insert(bst.root, 15)
bst.insert(bst.root, 500)
bst.insert(bst.root, 1000)
bst.insert(bst.root, 4)
return bst
def test_bst_init():
bst = init_bst()
assert bst.root.value == 25
def test_add_node_bst():
bst = init_bst()
bst.insert(bst.root, 10)
print(bst.root.left)
assert bst.root.left.value == 10
def test_bst_contains():
bst = init_bst()
assert bst.contains(bst.root, 500)
def test_bst_contains_false():
bst = init_bst()
print(bst.contains(bst.root, 12512))
assert bst.contains(bst.root, 12512) == False
def test_bst_get_parent():
bst = init_bst()
assert bst.find_parent(bst.root, 1000).value == 500
|
[
"chrisdoran@protonmail.com"
] |
chrisdoran@protonmail.com
|
c026325912bbc226f2020f4804cb3964da43e858
|
4252102a1946b2ba06d3fa914891ec7f73570287
|
/pylearn2/linear/linear_transform.py
|
657282a1c1dbc8111ae74b874623568fcce31f81
|
[] |
no_license
|
lpigou/chalearn2014
|
21d487f314c4836dd1631943e20f7ab908226771
|
73b99cdbdb609fecff3cf85e500c1f1bfd589930
|
refs/heads/master
| 2020-05-17T00:08:11.764642
| 2014-09-24T14:42:00
| 2014-09-24T14:42:00
| 24,418,815
| 2
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,812
|
py
|
"""
.. todo::
WRITEME
"""
__authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "Ian Goodfellow"
__email__ = "goodfeli@iro"
class LinearTransform(object):
"""
A generic class describing a LinearTransform. Derived classes may implement linear
transformation as a dense matrix multiply, a convolution, etc.
Classes inheriting from this should also inherit from TheanoLinear's LinearTransform
This class does not directly inherit from TheanoLinear's LinearTransform because
most LinearTransform classes in pylearn2 will inherit from a TheanoLinear derived
class and don't want to end up inheriting from TheanoLinear by two paths
This class is basically just here as a placeholder to show you what extra methods you
need to add to make a TheanoLinear LinearTransform work with pylearn2
"""
def get_params(self):
"""
Return a list of parameters that govern the linear transformation
"""
raise NotImplementedError()
def get_weights_topo(self):
"""
Return a batch of filters, formatted topologically.
This only really makes sense if you are working with a topological space,
such as for a convolution operator.
If your transformation is defined on a VectorSpace then some other class
like a ViewConverter will need to transform your vector into a topological
space; you are not responsible for doing so here.
"""
raise NotImplementedError()
def set_batch_size(self, batch_size):
"""
Some transformers such as Conv2D have a fixed batch size.
Use this method to change the batch size.
"""
pass
|
[
"lionelpigou@gmail.com"
] |
lionelpigou@gmail.com
|
79d7698a4437041440511147e14d336945d9fffe
|
e942cafaf64f6354e1f9ebd4a84bcf236ad93004
|
/yawast/commands/ssl.py
|
bbfe7b60ff039aab4923e020844fa135c88a4fb5
|
[
"MIT"
] |
permissive
|
Prodject/yawast
|
9a441a0576012dc5f0664cd23cfa0a803fd7a477
|
044309709cf3782de75a35f77297f2d2850d8e1c
|
refs/heads/master
| 2020-03-23T02:32:12.357082
| 2020-01-21T18:13:19
| 2020-01-21T18:13:19
| 140,978,938
| 0
| 0
|
BSD-3-Clause
| 2020-01-21T18:13:20
| 2018-07-14T21:23:05
|
Ruby
|
UTF-8
|
Python
| false
| false
| 1,974
|
py
|
# Copyright (c) 2013 - 2019 Adam Caudill and Contributors.
# This file is part of YAWAST which is released under the MIT license.
# See the LICENSE file or go to https://yawast.org/license/ for full license details.
import socket
from yawast.commands import utils as cutils
from yawast.scanner.cli import ssl_internal, ssl_sweet32, ssl_labs
from yawast.scanner.session import Session
from yawast.shared import utils, output
def start(session: Session):
print(f"Scanning: {session.url}")
# make sure it resolves
try:
socket.gethostbyname(session.domain)
except socket.gaierror as error:
output.debug_exception()
output.error(f"Fatal Error: Unable to resolve {session.domain} ({str(error)})")
return
try:
cutils.check_redirect(session)
except Exception as error:
output.debug_exception()
output.error(f"Unable to continue: {str(error)}")
return
# check to see if we are looking at an HTTPS server
if session.url_parsed.scheme == "https":
if (
session.args.internalssl
or utils.is_ip(session.domain)
or utils.get_port(session.url) != 443
):
# use SSLyze
try:
ssl_internal.scan(session)
except Exception as error:
output.error(f"Error running scan with SSLyze: {str(error)}")
else:
try:
ssl_labs.scan(session)
except Exception as error:
output.debug_exception()
output.error(f"Error running scan with SSL Labs: {str(error)}")
output.norm("Switching to internal SSL scanner...")
try:
ssl_internal.scan(session)
except Exception as error:
output.error(f"Error running scan with SSLyze: {str(error)}")
if session.args.tdessessioncount:
ssl_sweet32.scan(session)
|
[
"adam@adamcaudill.com"
] |
adam@adamcaudill.com
|
d9ebb9da6703c60aa1b6bae5d27a4646a86c8585
|
9405aa570ede31a9b11ce07c0da69a2c73ab0570
|
/aliyun-python-sdk-petadata/aliyunsdkpetadata/request/v20160101/DescribeInstanceInfoRequest.py
|
9e54e048432bd24eba7c3baff1ba1d512898802c
|
[
"Apache-2.0"
] |
permissive
|
liumihust/aliyun-openapi-python-sdk
|
7fa3f5b7ea5177a9dbffc99e73cf9f00e640b72b
|
c7b5dd4befae4b9c59181654289f9272531207ef
|
refs/heads/master
| 2020-09-25T12:10:14.245354
| 2019-12-04T14:43:27
| 2019-12-04T14:43:27
| 226,002,339
| 1
| 0
|
NOASSERTION
| 2019-12-05T02:50:35
| 2019-12-05T02:50:34
| null |
UTF-8
|
Python
| false
| false
| 2,128
|
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeInstanceInfoRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'PetaData', '2016-01-01', 'DescribeInstanceInfo','petadata')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
|
[
"yixiong.jxy@alibaba-inc.com"
] |
yixiong.jxy@alibaba-inc.com
|
d52046d70e0b72fb5f903072c94bfbe2ee87fac2
|
35bb363d97e33861c76106251991410311f193ca
|
/maskrcnn_benchmark/utils/checkpoint.py
|
e095937236eff225b5eb9cb0c40bc3e18b666bd2
|
[
"MIT"
] |
permissive
|
krumo/Domain-Adaptive-Faster-RCNN-PyTorch
|
82c17dda3d133d6cbae8b16d5a6653e8d1c38df5
|
0da7af8ae4e62d86fb97239026ef1875470e4ca0
|
refs/heads/master
| 2022-07-25T02:39:28.775147
| 2022-07-11T15:04:13
| 2022-07-11T15:04:13
| 211,833,935
| 298
| 77
|
MIT
| 2019-10-27T22:09:33
| 2019-09-30T10:22:00
|
Python
|
UTF-8
|
Python
| false
| false
| 4,810
|
py
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import logging
import os
import torch
from maskrcnn_benchmark.utils.model_serialization import load_state_dict
from maskrcnn_benchmark.utils.c2_model_loading import load_c2_format
from maskrcnn_benchmark.utils.imports import import_file
from maskrcnn_benchmark.utils.model_zoo import cache_url
class Checkpointer(object):
def __init__(
self,
model,
optimizer=None,
scheduler=None,
save_dir="",
save_to_disk=None,
logger=None,
):
self.model = model
self.optimizer = optimizer
self.scheduler = scheduler
self.save_dir = save_dir
self.save_to_disk = save_to_disk
if logger is None:
logger = logging.getLogger(__name__)
self.logger = logger
def save(self, name, **kwargs):
if not self.save_dir:
return
if not self.save_to_disk:
return
data = {}
data["model"] = self.model.state_dict()
if self.optimizer is not None:
data["optimizer"] = self.optimizer.state_dict()
if self.scheduler is not None:
data["scheduler"] = self.scheduler.state_dict()
data.update(kwargs)
save_file = os.path.join(self.save_dir, "{}.pth".format(name))
self.logger.info("Saving checkpoint to {}".format(save_file))
torch.save(data, save_file)
self.tag_last_checkpoint(save_file)
def load(self, f=None):
# if self.has_checkpoint():
# # override argument with existing checkpoint
# f = self.get_checkpoint_file()
if not f:
# no checkpoint could be found
self.logger.info("No checkpoint found. Initializing model from scratch")
return {}
self.logger.info("Loading checkpoint from {}".format(f))
checkpoint = self._load_file(f)
self._load_model(checkpoint)
if "optimizer" in checkpoint and self.optimizer:
self.logger.info("Loading optimizer from {}".format(f))
self.optimizer.load_state_dict(checkpoint.pop("optimizer"))
if "scheduler" in checkpoint and self.scheduler:
self.logger.info("Loading scheduler from {}".format(f))
self.scheduler.load_state_dict(checkpoint.pop("scheduler"))
# return any further checkpoint data
return checkpoint
def has_checkpoint(self):
save_file = os.path.join(self.save_dir, "last_checkpoint")
return os.path.exists(save_file)
def get_checkpoint_file(self):
save_file = os.path.join(self.save_dir, "last_checkpoint")
try:
with open(save_file, "r") as f:
last_saved = f.read()
last_saved = last_saved.strip()
except IOError:
# if file doesn't exist, maybe because it has just been
# deleted by a separate process
last_saved = ""
return last_saved
def tag_last_checkpoint(self, last_filename):
save_file = os.path.join(self.save_dir, "last_checkpoint")
with open(save_file, "w") as f:
f.write(last_filename)
def _load_file(self, f):
return torch.load(f, map_location=torch.device("cpu"))
def _load_model(self, checkpoint):
load_state_dict(self.model, checkpoint.pop("model"))
class DetectronCheckpointer(Checkpointer):
def __init__(
self,
cfg,
model,
optimizer=None,
scheduler=None,
save_dir="",
save_to_disk=None,
logger=None,
):
super(DetectronCheckpointer, self).__init__(
model, optimizer, scheduler, save_dir, save_to_disk, logger
)
self.cfg = cfg.clone()
def _load_file(self, f):
# catalog lookup
if f.startswith("catalog://"):
paths_catalog = import_file(
"maskrcnn_benchmark.config.paths_catalog", self.cfg.PATHS_CATALOG, True
)
catalog_f = paths_catalog.ModelCatalog.get(f[len("catalog://") :])
self.logger.info("{} points to {}".format(f, catalog_f))
f = catalog_f
# download url files
if f.startswith("http"):
# if the file is a url path, download it and cache it
cached_f = cache_url(f)
self.logger.info("url {} cached in {}".format(f, cached_f))
f = cached_f
# convert Caffe2 checkpoint from pkl
if f.endswith(".pkl"):
return load_c2_format(self.cfg, f)
# load native detectron.pytorch checkpoint
loaded = super(DetectronCheckpointer, self)._load_file(f)
if "model" not in loaded:
loaded = dict(model=loaded)
return loaded
|
[
"whrzxzero@gmail.com"
] |
whrzxzero@gmail.com
|
25b320acb305f818264ed9fbc78710bc50781607
|
744771e7ee537d9f13018f2158cc85ea2ba4331c
|
/opencv/camera/cvimport.py
|
1761733a694fe55c926e28fa1b21d6034fb6a52f
|
[] |
no_license
|
Miragecore/python_study
|
b654269e9c0a202dd72aa341380f6bbecccbe394
|
475477590afd12a7e93444bea2329db9f74d9e10
|
refs/heads/master
| 2023-01-18T14:56:00.233073
| 2023-01-09T01:46:52
| 2023-01-09T01:46:52
| 59,117,762
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 102
|
py
|
import site
site.addsitedir("/usr/local/lib/python2.7/site-packages")
import cv2
import numpy as np
|
[
"MirageKimYs@Gmail.com"
] |
MirageKimYs@Gmail.com
|
3e0ab987c99c4e799ac09dce61e5b0b24b715612
|
6ed8b908eac26b029214078a58b2791dadd2c968
|
/2048puzzle/PlayerAI_3.py
|
9f9ffb04d2f8d00473afb317937339c7096e81b1
|
[] |
no_license
|
Kunapalli/leetcode
|
d73d6b4abec93da4a3a2dc92709dbc7b14144bbc
|
5fbaac2dd6d5f74b20de6da8a41bb6af4c73ec30
|
refs/heads/master
| 2020-04-25T20:02:18.396671
| 2019-05-07T04:53:16
| 2019-05-07T04:53:16
| 173,041,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,509
|
py
|
from random import randint
from BaseAI import BaseAI
import time
import Grid_3
import math
def terminal(state):
return not state.canMove()
def getChild(state, dir):
t = state.clone()
moved = t.move(dir)
return (t, moved)
def children(state):
c = []
for dir in state.getAvailableMoves():
(child, moved) = getChild(state, dir)
if moved:
c.append((child,dir))
return c
class PlayerAI(BaseAI):
def __init__(self):
self.previous_max_score = 0
self.previous_min_score = float('inf')
self.available_cells = 14
def getMove(self, state):
start = time.process_time()
limit = 4
(dir, utility) = self.maximize(state, float('-inf'), float('inf'), limit, start)
return dir
def emptyness(self, state):
return (len(state.getAvailableCells())/16.0)* 100
def smoothness(self, state):
u2 = 0
for i in range(state.size):
for j in range(state.size - 1):
if state.map[i][j] == state.map[i][j+1]:
u2 += 1
for j in range(state.size):
for i in range(state.size - 1):
if state.map[i][j] == state.map[i+1][j]:
u2 += 1
return (u2/16.0)*100.0
def edge_bonus(self, state):
e = 0
m = max(max(state.map[1][1], state.map[1][2]), max(state.map[2][1], state.map[2][2]))
for x,y in [(0,0), (0,1), (0,2), (0,3), (1,0), (1,3), (2,0), (2,3), (3,0), (3,1), (3,2), (3,3)]:
for v in range(1,13,1):
if state.map[x][y] > (2**v)*m:
e += v
break
return e
def monotonicity(self, state):
m = 0
a1 = True
a2 = True
for i in range(state.size):
a1 = a1 and (state.map[i][0] <= state.map[i][1] <= state.map[i][2] <= state.map[i][3])
a2 = a2 and (state.map[i][0] >= state.map[i][1] >= state.map[i][2] >= state.map[i][3])
if a1 or a2:
m += 10
a3 = True
a4 = True
for j in range(state.size):
a3 = a4 and (state.map[0][j] <= state.map[1][j] <= state.map[2][j] <= state.map[j][3])
a4 = a4 and (state.map[0][j] >= state.map[1][j] >= state.map[2][j] >= state.map[j][3])
if a3 or a4:
m += 10
if (a1 and a4) or (a1 and a3) or (a2 and a4) or (a2 and a3):
m += 10
return (m/40.0)*100
def max_weight(self, state):
return (state.getMaxTile()/32768)*100.0
def eval(self, state):
h1 = self.emptyness(state)
h2 = self.smoothness(state)
h3 = self.edge_bonus(state)
h4 = self.monotonicity(state)
h5 = math.log(self.max_weight(state),2)
return (0, 3*h1 + h2 + h4 + h5) # 1024, 512, 256, 128
def maximize(self, state, alpha, beta, depth, start):
if terminal(state) or depth == 0 or (time.process_time() - start > 0.195):
return self.eval(state)
(maxDir, maxUtility) = (None, float('-inf'))
for (child,dir) in children(state):
(_,utility) = self.minimize(child, alpha, beta, depth-1, start)
if utility > maxUtility:
(maxDir, maxUtility) = (dir, utility)
if maxUtility >= beta:
break
if maxUtility > alpha:
alpha = maxUtility
return (maxDir, maxUtility)
def minimize(self, state, alpha, beta, depth, start):
if terminal(state) or depth == 0 or (time.process_time() - start > 0.195):
return self.eval(state)
(minDir, minUtility) = (None, float('inf'))
L = state.getAvailableCells()
children = []
for pos in L:
grid2 = state.clone()
grid4 = state.clone()
grid2.insertTile(pos,2)
grid4.insertTile(pos,2)
children.append(grid2)
children.append(grid4)
for child in children:
(_, utility) = self.maximize(child, alpha, beta, depth-1, start)
if utility < minUtility:
minUtility = utility
if minUtility <= alpha:
break
if minUtility < beta:
beta = minUtility
return (0, minUtility)
|
[
"sharma@Sharmas-MacBook-Pro.local"
] |
sharma@Sharmas-MacBook-Pro.local
|
ff363c8bbbbcb10df1b0550fdc8d72a9e1003e47
|
e5793284d9327bfef1b9b8b4bdb3a012fa721def
|
/plataforma/sujetos/migrations/0020_auto__add_field_persona_telefono.py
|
e0d8fbb9a152c103b14cecf3f2c61f085ff984dc
|
[] |
no_license
|
saraif/plataforma
|
1b486fd1dd41c136d3be011957ad254e2ecc2dfd
|
4e27260bcce19dc337c5cc804d26400cefdcf86f
|
refs/heads/master
| 2020-04-29T13:02:09.787275
| 2013-11-22T13:29:28
| 2013-11-22T13:29:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,692
|
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Persona.telefono'
db.add_column(u'sujetos_persona', 'telefono',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Persona.telefono'
db.delete_column(u'sujetos_persona', 'telefono')
models = {
u'sujetos.persona': {
'Meta': {'object_name': 'Persona'},
'apellidos': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'cedula': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'edad': ('django.db.models.fields.IntegerField', [], {}),
'fecha_nacimiento': ('django.db.models.fields.DateField', [], {}),
'nombres': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'proyecto': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sujetos.Proyecto']", 'symmetrical': 'False'}),
'sexo': ('django.db.models.fields.CharField', [], {'default': "'F'", 'max_length': '1'}),
'telefono': ('django.db.models.fields.IntegerField', [], {})
},
u'sujetos.proyecto': {
'Meta': {'object_name': 'Proyecto'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '60'})
}
}
complete_apps = ['sujetos']
|
[
"salacuatro@salacuatro.(none)"
] |
salacuatro@salacuatro.(none)
|
b3ed8beabe64300b4c689147e18f17f5b1453e18
|
7565e9fb464e12d950a8e0dc4195c6b60035559b
|
/hw5/save_original_info.py
|
11262f34acc448960290e9419938dd7f267c916a
|
[] |
no_license
|
proevgenii/EPAM-HW-2020-
|
0cb3d32d3399aada6e30d300da7b6036181171bc
|
3d884c8f10b1b183f1802f815e7b6dddbec79c9f
|
refs/heads/main
| 2023-07-06T20:04:28.069300
| 2021-08-23T20:50:00
| 2021-08-23T20:50:00
| 344,617,122
| 0
| 0
| null | 2021-08-23T20:50:17
| 2021-03-04T21:41:38
|
Python
|
UTF-8
|
Python
| false
| false
| 2,020
|
py
|
"""
Написать декоратор который позволит сохранять информацию из
исходной функции (__name__ and __doc__), а так же сохранит саму
исходную функцию в атрибуте __original_func
print_result изменять нельзя, за исключением добавления вашего
декоратора на строку отведенную под него - замените комментарий
До применения вашего декоратор будет вызываться AttributeError при custom_sum.__original_func
Это корректное поведение
После применения там должна быть исходная функция
Ожидаемый результат:
print(custom_sum.__doc__) # 'This function can sum any objects which have __add___'
print(custom_sum.__name__) # 'custom_sum'
print(custom_sum.__original_func) # <function custom_sum at <some_id>>
"""
import functools
def print_result(func):
# Возможно ли это сделать, если позволено добавлять строки только в этом месте ???
def wrapper(*args, **kwargs):
"""Function-wrapper which print result of an original function"""
result = func(*args, **kwargs)
print(result)
return result
wrapper.__original_func = func.__call__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
@print_result
def custom_sum(*args):
"""This function can sum any objects which have __add___"""
return functools.reduce(lambda x, y: x + y, args)
if __name__ == "__main__":
custom_sum([1, 2, 3], [4, 5])
custom_sum(1, 2, 3, 4)
print(custom_sum.__doc__)
print(custom_sum.__name__)
without_print = custom_sum.__original_func
# the result returns without printing
without_print(1, 2, 3, 4)
print(custom_sum.__original_func)
|
[
"proevgenii19@gmail.com"
] |
proevgenii19@gmail.com
|
19907a4d12c158d81ffc7e4e964f3e29250c770d
|
6b53ece215f6de1ce87b0c2a9aece8ede6b4a05d
|
/ah.py
|
a96b1e183a06a108adf7deca657f60f4d5f0cefe
|
[] |
no_license
|
micheldavalos/kattis_aaah-
|
fad19fd0596caab04154f1ad111f6e38bb74662b
|
bbedc7335b1c530cb3f869482ed0b2b54e4a0d51
|
refs/heads/master
| 2020-06-06T19:25:30.962955
| 2019-06-20T02:24:34
| 2019-06-20T02:24:34
| 192,833,995
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 155
|
py
|
first = input()
second = input()
if first == 'h' and second == 'h':
print('go')
elif len(first) >= len(second):
print('go')
else:
print('no')
|
[
"michel.davalos@academicos.udg.mx"
] |
michel.davalos@academicos.udg.mx
|
a467cdec749495cd2cef258f4ff84b48fc93d69b
|
0686c615c9a78b4aae8b7be064d7ee6653f45fff
|
/GUI/Contain/lable_frame.py
|
f75d7fdf4d979a6e79fa4101c09e73e1d2dd4de2
|
[] |
no_license
|
QuangVuong85/Tkinter
|
647bf6b8873dd9698befcad131ebdead70f5f0ee
|
a87f31b9e805d4fad0ee16616ae8ef9b42371b4e
|
refs/heads/master
| 2020-06-04T10:03:52.941390
| 2019-06-16T09:55:56
| 2019-06-16T09:55:56
| 191,978,270
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
from tkinter import *
from tkinter import messagebox
import random as rd
def random_value():
messagebox.showinfo("Message", "Value random=%f"%rd.random())
master = Tk()
master.title("Title")
lableframe = LabelFrame(master, fg="red", font=14, text="Random value")
lableframe.place(relwidth = '1.0', relheight = '0.3')
btn = Button(lableframe, text = "Click here", fg = "blue", command=random_value)
btn.pack(side=BOTTOM)
lbl = Label(lableframe, text="Lable Test LableFrame", font=30)
lbl.pack(side=BOTTOM)
master.mainloop()
|
[
"quangvuong0805@gmail.com"
] |
quangvuong0805@gmail.com
|
db96e3cc2ff9ff0b7d64455bcbb747357bd27f7f
|
8a1351dd9a9f7ab8f58e133cab788aed29edbf04
|
/pyspark_model/online_pred.py
|
d4f9d75f07a2c90ea0002a1f2881f807c8c71001
|
[] |
no_license
|
stubird/onlinePred
|
3999c2444c6599d01564258e232b966e2858a326
|
9208118fa48de3484e00bed044cac1bf99e0be8f
|
refs/heads/master
| 2020-03-30T01:40:45.220258
| 2018-11-02T07:21:12
| 2018-11-02T07:21:12
| 150,588,975
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,886
|
py
|
import flask
from flask import Flask, request, url_for, Response
from sklearn.externals import joblib
from pyspark.mllib.tree import GradientBoostedTreesModel
from pyspark.mllib.linalg import SparseVector
from pyspark import SparkContext,SparkConf
import json
app = Flask(__name__)
# 加载模型
conf = SparkConf().setMaster("local").setAppName("My App")
sc = SparkContext(conf = conf)
model =GradientBoostedTreesModel.load(sc,'./sellModel')
@app.route("/", methods=["GET"])
def index():
with app.test_request_context():
# 生成每个函数监听的url以及该url的参数
result = {"gbdt": {"url": url_for("gbdt"),
"params": ["vector"]}}
result_body = flask.json.dumps(result)
return Response(result_body, mimetype="application/json")
@app.route("/ml/gbdt", methods=["GET"])
def gbdt():
request_args = request.args
# 如果没有传入参数,返回提示信息
if not request_args:
result = {
"message": "请输入参数:vector"
}
result_body = flask.json.dumps(result, ensure_ascii=False)
return Response(result_body, mimetype="application/json")
# 获取请求参数
vector = request_args.get("vector", "{vector=[[0,0,0,0,0,0,0]]}")
print(vector)
jsob = json.loads(vector)
vector = [[float(j) for j in i] for i in jsob["vector"]]
ret = [model.predict(arr) for arr in vector]
print("predict result :" + str(ret))
# 构造返回数据
result = {
"features": {
"vector":vector
},
"result": ret
}
result_body = flask.json.dumps(result, ensure_ascii=False)
return Response(result_body, mimetype="application/json")
if __name__ == "__main__":
app.run(port=8000)
#http://127.0.0.1:8000/ml/gbdt?sepal_length=10&sepal_width=1&petal_length=3&petal_width=2
|
[
"360789029@qq.com"
] |
360789029@qq.com
|
3fb6d2edd1c23332a727301f124c23df1d0b4df0
|
b8717e92625da4c754a84b5bb2bc56545b5bdccf
|
/azext_iot/operations/generic.py
|
95a70b63587e749c138c98b3056526671b87fc83
|
[
"MIT",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
andrewhill00001/azure-iot-cli-extension
|
b158456a649eb60ea5dc85778b70c752710522ad
|
2b1bbedd91a4253548b5edb82fd3d8cf26f69dfd
|
refs/heads/master
| 2020-03-28T20:42:27.320102
| 2018-09-07T21:18:40
| 2018-09-07T21:18:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,463
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.util import CLIError
from azext_iot.assets.user_messages import ERROR_PARAM_TOP_OUT_OF_BOUNDS
def _execute_query(query, query_method, top=None):
payload = []
headers = {}
if top:
headers['x-ms-max-item-count'] = str(top)
result, token = query_method(query, headers)
payload.extend(result)
while token:
# In case requested count is > service max page size
if top:
pl = len(payload)
if pl < top:
page = top - pl
headers['x-ms-max-item-count'] = str(page)
else:
break
headers['x-ms-continuation'] = token
result, token = query_method(query, headers)
payload.extend(result)
return payload[:top] if top else payload
def _process_top(top, upper_limit=None):
# Consider top == 0
if not top and top != 0:
return None
if top == -1 and not upper_limit:
return None
if top <= 0 or (upper_limit and top > upper_limit):
raise CLIError(ERROR_PARAM_TOP_OUT_OF_BOUNDS(upper_limit))
return int(top)
|
[
"noreply@github.com"
] |
noreply@github.com
|
49b17dda58267f24453517afa4f2ea37b95f754d
|
961d8796f1e6c39f8d3f150a8cb25a351320bc38
|
/utilities/clubUserPurchases.py
|
0fc1a94e8fbd048fadc979df2690ad93447728c1
|
[] |
no_license
|
akshitar/TargetChallenge
|
98aec45b3264ce90c41f325c565fa0ad70eedf08
|
22415d5ddf3d921beb1b8ede7ec46c0309458dbb
|
refs/heads/master
| 2020-12-24T20:52:49.162479
| 2016-05-03T17:46:39
| 2016-05-03T17:46:39
| 57,988,964
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 897
|
py
|
import csv
import marshal as pickle
fileUserPurchase = open('assets/userPurchaseChronology', 'r+')
userPurchases = {}
prevUID = ''
with open('data/transactionLog.csv', 'rb') as transactionLog:
count = -1
reader = csv.reader(transactionLog)
purchaseLog = []
for userData in reader:
count = count + 1
if (count == 0):
continue
if count % 10000000 == 0:
print(count)
uID = str(userData[0])
skuID = str(userData[1])
if prevUID != '' and uID != prevUID:
if prevUID in userPurchases:
userPurchases[prevUID].append(purchaseLog)
else:
userPurchases[prevUID] = [purchaseLog]
prevUID = uID
purchaseLog = [skuID]
else:
prevUID = uID
purchaseLog.append(skuID)
pickle.dump(userPurchases, fileUserPurchase)
|
[
"akshitar@usc.edu"
] |
akshitar@usc.edu
|
c78ac5f125525e45499d74f4a07081a1f77f4fad
|
fc91ba7357f75c3dca5e85a74c58f49948585b94
|
/views.py
|
bd6cc9216f74755eb3d5a72a4a05508f9b117984
|
[] |
no_license
|
acrawford13/local-library
|
0e1f3f0c237d97e97cf0ba383cdda47b54e46797
|
69bec622b30f695099fdb61e8cf390f321c761b8
|
refs/heads/master
| 2021-07-13T08:36:35.912494
| 2017-10-06T01:00:30
| 2017-10-06T01:00:30
| 105,849,820
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,519
|
py
|
from flask import Flask, render_template, flash, url_for
from flask import redirect, request, jsonify, make_response
from flask import session as login_session
from sqlalchemy import create_engine, and_
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import IntegrityError
from models import Base, Book, User
import json
from datetime import datetime
from math import sqrt
import httplib2
# for saving user images:
from werkzeug.utils import secure_filename
import os
# to generate state token:
import random
import string
# to access google maps api:
import googlemaps
# to 'slugify' username:
import re
# for google authentication:
import requests
from oauth2client.client import flow_from_clientsecrets, FlowExchangeError
UPLOAD_FOLDER = 'static/images/'
ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
engine = create_engine('sqlite:///catalog.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
GOOGLE_API_KEY = json.load(open('google_api_key.json'))['key']
gmaps = googlemaps.Client(key=GOOGLE_API_KEY)
FACEBOOK_APP_ID = (json.loads(open('fb_client_secret.json', 'r').read())
['web']['app_id'])
FACEBOOK_APP_SECRET = (json.loads(open('fb_client_secret.json', 'r').read())
['web']['app_secret'])
GOOGLE_CLIENT_SECRET_FILE = 'google_client_secret.json'
# csrf protection snippet from http://flask.pocoo.org/snippets/3/
# check state token matches on post requests
@app.before_request
def csrf_protect():
if request.method == 'POST':
token = login_session.get('csrf_token', None)
if not token or (token != request.form.get('csrf_token') and
token != request.args.get('csrf_token')):
response = make_response(json.dumps('Invalid state'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# generate a random state token if none exists
def generate_csrf_token():
if 'csrf_token' not in login_session:
login_session['csrf_token'] = (''.join(random.choice(
string.ascii_uppercase + string.digits) for x in xrange(32)))
return login_session['csrf_token']
# make csrf token function available to template files
app.jinja_env.globals['csrf_token'] = generate_csrf_token
# check that uploaded files match the allowed extensions
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
# remove spaces and replace non-letters/numbers with hyphens in a given string
def slugify(name):
string = re.sub(r'\s{2,}', '', name)
string = re.sub(r'[^\w\d\s]', '', string)
string = re.sub(r'[^\w\d]', '-', string)
return string.lower()
# create/fetch user, and add username to login session
def loginUser(name, email, picture):
user = session.query(User).filter_by(email=email).one_or_none()
if user:
if user.location_lat:
login_session['location_lat'] = user.location_lat
login_session['location_lng'] = user.location_lng
else:
user = User(
username=slugify(name),
name=name,
email=email,
picture=picture
)
session.add(user)
# if the username is not unique add a number to the end
for attempt in range(1, 20):
try:
session.commit()
break
except IntegrityError:
session.rollback()
user.username = slugify(name) + "-" + str(attempt)
session.add(user)
login_session['username'] = user.username
return user
@app.route('/')
def home():
if 'location_lat' in login_session:
location = {'lat': login_session['location_lat'],
'lng': login_session['location_lng']}
search_radius = 20
search_radius_deg = (search_radius/111.0)**2
# get all books within the search radius
books = (session.query(Book).join(User)
.filter(and_(
User.distance(location) <= search_radius_deg,
User.username != login_session['username']))
.order_by(User.distance(location), Book.name).all())
return render_template('home-logged-in.html',
books=books, search_radius=search_radius,
location=location, api_key=GOOGLE_API_KEY)
else:
books = session.query(Book).order_by('date_added').limit(5)
return render_template('home-logged-out.html',
books=books, api_key=GOOGLE_API_KEY)
@app.route('/login/')
def showLogin():
if 'username' in login_session:
return redirect(url_for('home'))
return render_template('login.html')
@app.route('/fbconnect', methods=['POST'])
def fbconnect():
access_token = request.data
url = ('https://graph.facebook.com/oauth/access_token?grant_type=fb_exchange_token&client_id=%s&client_secret=%s&fb_exchange_token=%s'
% (FACEBOOK_APP_ID, FACEBOOK_APP_SECRET, access_token))
h = httplib2.Http()
response = h.request(url, 'GET')[1]
login_session['app_access_token'] = json.loads(response)['access_token']
url = ('https://graph.facebook.com/me?fields=name,id,email&access_token=%s'
% login_session['app_access_token'])
h = httplib2.Http()
result = h.request(url, 'GET')[1]
user_data = json.loads(result)
if 'email' not in user_data:
response = make_response(
json.dumps("Sorry, email address is required for login"),
400)
response.headers['Content-Type'] = 'application/json'
return response
login_session['facebook_id'] = user_data['id']
login_session['provider'] = "facebook"
url = ('https://graph.facebook.com/me/picture?redirect=0&height=80&width=80&access_token=%s'
% login_session['app_access_token'])
h = httplib2.Http()
result = h.request(url, 'GET')[1]
user_picture = json.loads(result)['data']
user = loginUser(name=user_data['name'],
email=user_data['email'],
picture=user_picture['url'])
return url_for('home')
@app.route('/gconnect', methods=['POST'])
def gconnect():
code = request.data
client_id = (json.loads(open(GOOGLE_CLIENT_SECRET_FILE, 'r').read())
['web']['client_id'])
try:
oauth_flow = flow_from_clientsecrets(GOOGLE_CLIENT_SECRET_FILE,
scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(json.dumps(
'Failed to upgrade the authorisation code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
h = httplib2.Http()
result = json.loads(h.request(url, 'GET')[1])
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(json.dumps(
"Token's user ID doesn't match given user ID"), 401)
response.headers['Content-Type'] = 'application/json'
return response
if result['issued_to'] != client_id:
response = make_response(json.dumps(
"Token's client ID doesn't match application's ID"), 401)
response.headers['Content-Type'] = 'application/json'
return response
stored_credentials = login_session.get('credentials')
stored_gplus_id = login_session.get('gplus_id')
if stored_credentials is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps("User is already connected"), 200)
response.headers['Content-Type'] = 'application/json'
return response
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
login_session['provider'] = "google"
# get user info
userinfo_url = 'https://www.googleapis.com/oauth2/v2/userinfo'
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = json.loads(answer.text)
user = loginUser(name=data['name'],
email=data['email'],
picture=data['picture'])
return redirect(url_for('home'))
@app.route('/logout/')
def logout():
if login_session['provider'] == 'facebook':
fbdisconnect()
elif login_session['provider'] == 'google':
gdisconnect()
del login_session['username']
if 'location_lat' in login_session:
del login_session['location_lat']
del login_session['location_lng']
del login_session['provider']
return redirect(url_for('home'))
@app.route('/fbdisconnect/')
def fbdisconnect():
facebook_id = login_session['facebook_id']
access_token = login_session['app_access_token']
h = httplib2.Http()
url = ("https://graph.facebook.com/%s/permissions?access_token=%s"
% (facebook_id, access_token))
result = json.loads(h.request(url, 'DELETE')[1])
del login_session['facebook_id']
del login_session['app_access_token']
if 'success' in result:
return "you have been logged out"
return "you were not able to be logged out"
@app.route('/gdisconnect/')
def gdisconnect():
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(json.dumps('User not connected'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'http://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
del login_session['access_token']
del login_session['gplus_id']
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
response = make_response(json.dumps('Failed to revoke token'), 400)
response.headers['Content-Type'] = 'application/json'
return response
@app.route('/search/')
def search():
location = {}
if "search" in request.args:
try:
location = (gmaps.geocode(request.args.get('search'))
[0]['geometry']['location'])
except googlemaps.exceptions.TransportError:
response = make_response(
json.dumps('Request to Google Maps API was unsuccessful'), 408)
response.headers['Content-Type'] = 'application/json'
return response
search_radius = float(request.args.get('radius', 10))
elif "radius" in request.args and login_session['location_lat']:
search_radius = float(request.args.get('radius'))
location['lat'] = login_session['location_lat']
location['lng'] = login_session['location_lng']
# convert search radius km to degrees (approx.)
# then square the value to compare with the User.distance(location) result
# because sqlite doesn't natively support sqrt
search_radius_deg = (search_radius/111.0)**2
if 'username' in login_session:
# get all books within the search radius
books = (session
.query(Book)
.join(User)
.filter(and_(User.distance(location) <= search_radius_deg,
User.username != login_session['username']))
.order_by(User.distance(location), Book.name).all())
else:
# get all books within the search radius
books = (session.query(Book).join(User)
.filter(User.distance(location) <= search_radius_deg)
.order_by(User.distance(location), Book.name).all())
if books:
return make_response(render_template('_search-list.html',
books=books, location=location), 200)
else:
return ('No results within %skm of %s' %
(request.args.get('radius'),
request.args.get('search', 'your location')))
@app.route('/api/search/')
def showJson():
# search radius in km
location = {}
search_radius = float(request.args.get('radius', 10))
location['lat'] = request.args.get('lat')
location['lng'] = request.args.get('lng')
search_term = request.args.get('search')
if search_term:
location = (gmaps.geocode(request.args.get('search'))
[0]['geometry']['location'])
search_radius_deg = (search_radius/111.0)**2
books = (session.query(Book).join(User)
.filter(User.distance(location) <= search_radius_deg)
.order_by(User.distance(location), Book.name).all())
return jsonify(books=[book.serialize_detailed(location=location)
for book in books])
@app.route('/api/user/<string:username>/')
def userJSON(username):
user = session.query(User).filter_by(username=username).one_or_none()
if not user:
return "No user by this name"
books = session.query(Book).filter_by(owner_username=username).all()
return jsonify(name=user.name,
city=user.city,
country=user.country,
picture=user.picture,
books=[book.serialize_simple for book in books])
@app.route('/api/book/<string:username>/<string:bookslug>')
def bookJSON(username, bookslug):
book = (session.query(Book).filter_by(owner_username=username)
.filter_by(slug=bookslug).one_or_none())
if not book:
return "No book by this name"
return jsonify(book.serialize_detailed())
@app.route('/<string:username>/editprofile/', methods=['GET', 'POST'])
def editProfile(username):
# check if a user is logged in
if 'username' not in login_session:
flash("Please log in to edit your profile")
return redirect(url_for('showLogin'))
# check if logged in user matches the username in the URL
# if not, redirect to the correct username
if login_session['username'] != username:
return redirect(url_for('editProfile',
username=login_session['username']))
user = session.query(User).filter_by(username=username).one()
# get request: render form to edit user profile
if request.method == 'GET':
return render_template('edit-profile.html',
user=user, api_key=GOOGLE_API_KEY)
# post request: update user info in database
if request.method == 'POST':
# upload profile picture to upload folder
if 'profilepic' in request.files:
file = request.files['profilepic']
if file and allowed_file(file.filename):
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
user.picture = filename
# store the data from the form
form = request.form
user.name = form['name']
user.city = form['city']
user.country = form['country']
user.location_lat = float(form['location_lat'])
user.location_lng = float(form['location_lng'])
login_session['location_lat'] = float(form['location_lat'])
login_session['location_lng'] = float(form['location_lng'])
session.commit()
return redirect(url_for('showUser', username=username))
@app.route('/<string:username>/')
def showUser(username):
location = {}
if 'location_lat' in login_session:
location = {'lat': login_session['location_lat'],
'lng': login_session['location_lng']}
books = session.query(Book).filter_by(owner_username=username).all()
user = session.query(User).filter_by(username=username).one_or_none()
if not user:
flash("The user <strong>%s</strong> does not exist" % username)
return redirect(url_for('home'))
return render_template('user-profile.html',
user=user, books=books, location=location)
@app.route('/<string:username>/<string:bookslug>')
def showBook(username, bookslug):
book = (session.query(Book).filter_by(owner_username=username)
.filter_by(slug=bookslug).one_or_none())
# check if book exists
if book:
return render_template('book.html', book=book)
else:
flash("This book does not exist")
return redirect(url_for('home'))
@app.route('/<string:username>/new', methods=['GET', 'POST'])
def newBook(username):
# check if a user is logged in
if 'username' not in login_session:
flash("Please log in to add a book")
return redirect(url_for('showLogin'))
# check if logged in user matches the username in the URL
# if not, redirect to the correct username
if login_session['username'] != username:
return redirect(url_for('newBook', username=login_session['username']))
# get request: render a form to add a new book
if request.method == 'GET':
return render_template('new-book.html')
# post request: add new book to the database
elif request.method == 'POST':
# store the data from the form
form = request.form
# create new book object
book = Book(name=form['name'],
slug=slugify(form['name']),
author=form['author'],
description=form['description'],
date_added=datetime.now(),
owner_username=username)
session.add(book)
# if the book slug + username unique constraint fails,
# add a number to the end of the book slug
for attempt in range(1, 20):
try:
session.commit()
break
except IntegrityError:
session.rollback()
book.slug = slugify(form['name']) + "-" + str(attempt)
session.add(book)
return redirect(url_for('showBook',
username=username, bookslug=book.slug))
@app.route('/<string:username>/<string:bookslug>/edit',
methods=['GET', 'POST'])
def editBook(username, bookslug):
# check if a user is logged in
if 'username' not in login_session:
flash("Please <a href='%s'>log in</a> to edit this book"
% url_for('showLogin'))
return redirect(url_for('showBook',
username=username, bookslug=bookslug))
# check if logged in user owns the book
if login_session['username'] != username:
flash("This is not your book")
return redirect(url_for('showBook',
username=username, bookslug=bookslug))
# get the book object that matches username & slug
book = (session.query(Book).filter_by(owner_username=username)
.filter_by(slug=bookslug).one_or_none())
# check if the book exists
if not book:
flash("This book does not exist")
return redirect(url_for('home'))
# get request: render a form to edit the book's details
if request.method == 'GET':
return render_template('edit-book.html', book=book)
# post request: update the book's details in the database
elif request.method == 'POST':
# store the data from the form
form = request.form
# if the name has changed, generate a new slug
if book.name != form['name']:
book.slug = slugify(form['name'])
book.name = form['name']
book.author = form['author']
book.description = form['description']
# if the book slug + username unique constraint fails,
# add a number to the end of the book slug
for attempt in range(1, 20):
try:
session.commit()
break
except IntegrityError:
session.rollback()
book.name = form['name']
book.author = form['author']
book.description = form['description']
book.slug = slugify(form['name']) + "-" + str(attempt)
return redirect(url_for('showBook',
username=username, bookslug=book.slug))
@app.route('/<string:username>/<string:bookslug>/delete',
methods=['GET', 'POST'])
def deleteBook(username, bookslug):
# check if a user is logged in
if 'username' not in login_session:
flash("Please <a href='%s'>log in</a> to delete this book"
% url_for('showLogin'))
return redirect(url_for('showBook',
username=username, bookslug=bookslug))
# check if logged in user owns the book
if login_session['username'] != username:
flash("This is not your book")
return redirect(url_for('showBook',
username=username, bookslug=bookslug))
# get the book object that matches username & slug
book = (session.query(Book).filter_by(owner_username=username)
.filter_by(slug=bookslug).one_or_none())
# check if the book exists
if not book:
flash("This book does not exist")
return redirect(url_for('home'))
# get request: render a confirmation page
if request.method == 'GET':
return render_template('delete-book.html', book=book)
# post request: delete the book and redirect to the user's profile
elif request.method == 'POST':
session.delete(book)
session.commit()
flash("<strong>%s</strong> has been deleted" % book.name)
return redirect(url_for('showUser', username=username))
if __name__ == '__main__':
app.debug = True
app.secret_key = 'make_this_better'
app.run(host='0.0.0.0', port=5000)
|
[
"andrea.crawford13@gmail.com"
] |
andrea.crawford13@gmail.com
|
f29a1716fb77131024301e47e4439bc769de638a
|
ef32b87973a8dc08ba46bf03c5601548675de649
|
/pytglib/api/types/search_messages_filter_animation.py
|
e52ba5032981ef7f5289e9d53f9ec2a0230f7cab
|
[
"MIT"
] |
permissive
|
iTeam-co/pytglib
|
1a7580f0e0c9e317fbb0de1d3259c8c4cb90e721
|
d3b52d7c74ee5d82f4c3e15e4aa8c9caa007b4b5
|
refs/heads/master
| 2022-07-26T09:17:08.622398
| 2022-07-14T11:24:22
| 2022-07-14T11:24:22
| 178,060,880
| 10
| 9
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 559
|
py
|
from ..utils import Object
class SearchMessagesFilterAnimation(Object):
"""
Returns only animation messages
Attributes:
ID (:obj:`str`): ``SearchMessagesFilterAnimation``
No parameters required.
Returns:
SearchMessagesFilter
Raises:
:class:`telegram.Error`
"""
ID = "searchMessagesFilterAnimation"
def __init__(self, **kwargs):
pass
@staticmethod
def read(q: dict, *args) -> "SearchMessagesFilterAnimation":
return SearchMessagesFilterAnimation()
|
[
"me@amirh.co"
] |
me@amirh.co
|
344a82674f33771880a5b0e100072e35f8462279
|
ed44dfecd44f7fee6cfaf9630b472e3c0968bccb
|
/gestionadmin/shops/urls.py
|
2c3eded9d0d3bf50b1697e9f38a69716285fb33b
|
[] |
no_license
|
paulemxx/Orgo
|
0119429acbefaa2f878e4588a2ebfa75759d8c7a
|
74ec1904fc61fdfd1d543d01e55f134c504b59f8
|
refs/heads/main
| 2023-01-23T19:26:14.344946
| 2020-11-24T23:38:05
| 2020-11-24T23:38:05
| 305,420,661
| 0
| 1
| null | 2020-10-21T22:48:46
| 2020-10-19T14:57:40
|
HTML
|
UTF-8
|
Python
| false
| false
| 496
|
py
|
from django.urls import path
from gestionadmin.shops import views
app_name = 'shops'
urlpatterns = [
path('ajoutproduit', views.ajoutproduit, name='ajoutproduit'),
path('ajoutcategorie', views.ajoutcategorie, name='ajoutcategorie'),
path('ajouttag', views.ajouttag, name='ajouttag'),
path('listeproduit', views.listeproduit, name='listeproduit'),
path('listecategorie', views.listecategorie, name='listecategorie'),
path('listetag', views.listetag, name='listetag'),
]
|
[
"gnogan.paul@gmail.com"
] |
gnogan.paul@gmail.com
|
5865f3e2289173a978094911b88a1b6f0e1d7ebe
|
dd638b83d7f1b351affa17bee0300b825ec7f833
|
/serious_pysam/game/game.py
|
62dbaff43aa2f69857b679cfbd6b08adba76935a
|
[
"MIT"
] |
permissive
|
notrurs/Serious_PySam
|
1b3d3b9cc25875ef7cd9d4c47203bb9aba719010
|
37acbf3d2a1136f35b907f0528a4005e6870b4a1
|
refs/heads/master
| 2023-05-10T10:14:51.961755
| 2021-06-15T10:43:01
| 2021-06-15T10:43:01
| 255,668,435
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,503
|
py
|
import pygame
import sys
from collections import defaultdict
from pygame_menu.locals import ALIGN_CENTER
from serious_pysam import config as c
from serious_pysam.menu.menu import MainMenu
class Game:
"""Main super class for Game object."""
def __init__(self,
caption,
width,
height,
back_image_filename,
frame_rate):
"""Game has own vars:
background_image - picture for background on the screen
frame_rate - pfs of the game
game_over - current state of game over
is_boss_spawn - current state boss spawn
objects - contains all game objects
enemies - contains only enemies objects
bullets - contains only bullets objects
enemy_bullets - contains only enemy_bullets objects
hud_objects - contains only hud_objects objects
hero_objects - contains only hero_objects objects
channel_hero_fire - sound channel for hero shooting
channel_hero_dialog - sound channel for hero dialog
channel_enemy_sound - sound channel for enemy sound
surface - screen object
keydown_handlers - dict with all button down handlers for keyboard buttons
keyup_handlers - dict with all button up handlers for keyboard buttons
mouse_handlers - dict with all handlers for mouse buttons
"""
self.background_image = pygame.image.load(back_image_filename)
self.frame_rate = frame_rate
self.game_over = False
self.is_boss_spawn = False
self.objects = []
self.enemies = []
self.bullets = []
self.enemy_bullets = []
self.hud_objects = []
self.hero_objects = []
pygame.mixer.init(44100, 16, 2, 4096)
pygame.init()
self.channel_hero_fire = pygame.mixer.Channel(0)
self.channel_hero_fire.set_volume(c.HERO_FIRE_VOLUME)
self.channel_hero_dialog = pygame.mixer.Channel(1)
self.channel_hero_dialog.set_volume(c.HERO_DIALOG_VOLUME)
self.channel_enemy_sound = pygame.mixer.Channel(2)
self.channel_enemy_sound.set_volume(c.ENEMY_SOUND_VOLUME)
self.surface = pygame.display.set_mode((width, height))
pygame.display.set_caption(caption)
self.clock = pygame.time.Clock()
self.keydown_handlers = defaultdict(list)
self.keyup_handlers = defaultdict(list)
self.mouse_handlers = defaultdict(list)
def reinitializied_game(self):
"""Sets params below to start values."""
self.is_boss_spawn = False
self.objects = []
self.enemies = []
self.bullets = []
self.enemy_bullets = []
self.hud_objects = []
self.hero_objects = []
self.channel_hero_fire = pygame.mixer.Channel(0)
self.channel_hero_dialog = pygame.mixer.Channel(1)
self.channel_enemy_sound = pygame.mixer.Channel(2)
self.channels_set_volume()
self.keydown_handlers = defaultdict(list)
self.keyup_handlers = defaultdict(list)
self.mouse_handlers = defaultdict(list)
def channels_set_volume(self, volume='default'):
"""Sets game volume.
:param volume: volume level
:type volume: str
"""
if volume == 'default':
self.channel_hero_fire.set_volume(c.HERO_FIRE_VOLUME)
self.channel_hero_dialog.set_volume(c.HERO_DIALOG_VOLUME)
self.channel_enemy_sound.set_volume(c.ENEMY_SOUND_VOLUME)
elif volume == 'mute':
self.channel_hero_fire.set_volume(0)
self.channel_hero_dialog.set_volume(0)
self.channel_enemy_sound.set_volume(0)
def update(self):
"""Updates all objects on the screen."""
for o in self.objects:
o.update()
def blit(self, surface):
"""Blits all objects on the screen."""
for o in self.objects:
o.blit(surface)
def handle_events(self):
"""Handler for game events."""
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
elif event.type == pygame.KEYDOWN:
for handler in self.keydown_handlers[event.key]:
handler(event.key)
elif event.type == pygame.KEYUP:
for handler in self.keyup_handlers[event.key]:
handler(event.key)
elif event.type in (pygame.MOUSEBUTTONDOWN,
pygame.MOUSEBUTTONUP,
pygame.MOUSEMOTION):
for handler in self.mouse_handlers[event.type]:
handler(event.type, event.pos)
def start_game(self):
"""Starts game. Turn ob music, blits and updates all objects, set fps and
checks handlers."""
pygame.mixer.music.load(c.MUSIC_FIGHT)
pygame.mixer.music.set_volume(c.MUSIC_VOLUME)
pygame.mixer.music.play(-1)
hero_start_sound = pygame.mixer.Sound(c.hero_start_level_random_dialog())
self.channel_hero_dialog.play(hero_start_sound)
while not self.game_over:
self.surface.blit(self.background_image, (0, 0))
self.objects = [*self.enemies, *self.bullets, *self.enemy_bullets, *self.hero_objects, *self.hud_objects]
self.handle_events()
self.update()
self.blit(self.surface)
pygame.display.update()
self.clock.tick(self.frame_rate)
def create_main_menu(self):
"""Starts main menu and turn on menu music."""
pygame.mixer.music.load(c.MENU_MUSIC)
pygame.mixer.music.set_volume(c.MENU_MUSIC_VOLUME)
pygame.mixer.music.play(-1)
menu_control = MainMenu()
menu_control.add_label(c.MENU_CONTROL_LABEL, align=ALIGN_CENTER, font_size=30, margin=(0, 100))
menu_control.add_button('Назад', menu_control.event_back)
menu = MainMenu()
menu.add_button('Новая игра', self.start_game)
menu.add_button('Управление', menu_control)
menu.add_button('Выход', menu.event_quit)
menu.mainloop(self.surface)
def background_function(self):
"""Blits background for menu."""
bg_img = pygame.image.load(c.MENU_BACKGROUND_IMAGE)
self.surface.blit(bg_img, (0, 0))
def run(self):
"""Runs game."""
self.create_main_menu()
|
[
"fursik111@gmail.com"
] |
fursik111@gmail.com
|
042f26bfe56643c6652b56921c76c835ae78b86e
|
fcf99db2d9f58da7065369c70f81e3e7cb53356b
|
/extra/dynamic1.py
|
53d37a6922ed684b88e5d2cd97b18c2a630e82aa
|
[] |
no_license
|
manankshastri/self-d
|
b0f438e19d1eb6378093205c49eacd7ad3c53275
|
4266c27118354391cc9677e56c0f494506d390cd
|
refs/heads/master
| 2020-04-24T00:38:53.226656
| 2019-10-14T03:44:40
| 2019-10-14T03:44:40
| 171,572,278
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
import time
def fib(x):
if x ==0:
return 0
elif x == 1:
return 1
else:
return fib(x-1) + fib(x-2)
startTime = time.time()
print("%-14s:%d" % ("Result:" , fib(32)))
print("%-14s:%.4f seconds" % ("Elapsed time: ", time.time() - startTime))
|
[
"manank.shastri@gmail.com"
] |
manank.shastri@gmail.com
|
a0839df16294a099c433e6529f1a9a3ba7994d38
|
b1004bfd794910911b4eb510a6e5a0460ce1cb5a
|
/python3course/finalcapstone/euler_path/graph.py
|
842336aca7d9c19e066032be50eebd69e4813a06
|
[] |
no_license
|
GeorgeKeith/udemy_py
|
4e0385bbf10441534d5c0ee22d022f7d441b0fae
|
b4e68240e6954e0f70b17265e8f4ed343243a6b9
|
refs/heads/master
| 2020-03-19T04:44:24.806075
| 2018-06-03T04:20:14
| 2018-06-03T04:20:14
| 135,860,896
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 795
|
py
|
from node import Node
from link import Link
class Graph:
def __init__(self):
self.nodes = {}
def __len__(self):
return len(self.nodes)
def add_node(self, name):
node = Node(name)
self.nodes[name] = node
def add_nodes(self, names):
name_list = names.split(",")
for name in name_list:
self.add_node(name)
def add_link(self, name_a, name_b, cost=1):
node_a = self.nodes[name_a]
node_b = self.nodes[name_b]
link = Link(node_a, node_b, cost)
def node(self, name):
return self.nodes[name]
def link_set(self):
result = set()
for node in self.nodes.values():
for link in node.links.values():
result.add(link)
return result
|
[
"george@techfeathers.com"
] |
george@techfeathers.com
|
ea29a9cc461cc772418606651a63a753c9adce36
|
eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7
|
/google/cloud/securitycenter/v1p1beta1/securitycenter-v1p1beta1-py/google/cloud/securitycenter_v1p1beta1/types/organization_settings.py
|
faec729075707f892513d3f7e9e1c999722a8557
|
[
"Apache-2.0"
] |
permissive
|
Tryweirder/googleapis-gen
|
2e5daf46574c3af3d448f1177eaebe809100c346
|
45d8e9377379f9d1d4e166e80415a8c1737f284d
|
refs/heads/master
| 2023-04-05T06:30:04.726589
| 2021-04-13T23:35:20
| 2021-04-13T23:35:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,410
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.cloud.securitycenter.v1p1beta1',
manifest={
'OrganizationSettings',
},
)
class OrganizationSettings(proto.Message):
r"""User specified settings that are attached to the Security
Command Center organization.
Attributes:
name (str):
The relative resource name of the settings. See:
https://cloud.google.com/apis/design/resource_names#relative_resource_name
Example:
"organizations/{organization_id}/organizationSettings".
enable_asset_discovery (bool):
A flag that indicates if Asset Discovery should be enabled.
If the flag is set to ``true``, then discovery of assets
will occur. If it is set to \`false, all historical assets
will remain, but discovery of future assets will not occur.
asset_discovery_config (google.cloud.securitycenter_v1p1beta1.types.OrganizationSettings.AssetDiscoveryConfig):
The configuration used for Asset Discovery
runs.
"""
class AssetDiscoveryConfig(proto.Message):
r"""The configuration used for Asset Discovery runs.
Attributes:
project_ids (Sequence[str]):
The project ids to use for filtering asset
discovery.
inclusion_mode (google.cloud.securitycenter_v1p1beta1.types.OrganizationSettings.AssetDiscoveryConfig.InclusionMode):
The mode to use for filtering asset
discovery.
"""
class InclusionMode(proto.Enum):
r"""The mode of inclusion when running Asset Discovery. Asset discovery
can be limited by explicitly identifying projects to be included or
excluded. If INCLUDE_ONLY is set, then only those projects within
the organization and their children are discovered during asset
discovery. If EXCLUDE is set, then projects that don't match those
projects are discovered during asset discovery. If neither are set,
then all projects within the organization are discovered during
asset discovery.
"""
INCLUSION_MODE_UNSPECIFIED = 0
INCLUDE_ONLY = 1
EXCLUDE = 2
project_ids = proto.RepeatedField(proto.STRING, number=1)
inclusion_mode = proto.Field(proto.ENUM, number=2,
enum='OrganizationSettings.AssetDiscoveryConfig.InclusionMode',
)
name = proto.Field(proto.STRING, number=1)
enable_asset_discovery = proto.Field(proto.BOOL, number=2)
asset_discovery_config = proto.Field(proto.MESSAGE, number=3,
message=AssetDiscoveryConfig,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
40ebde4a66db2044009fa4345a931b67003e657b
|
c91d029b59f4e6090a523bf571b3094e09852258
|
/src/comercial/apps.py
|
8f54832caa2d497204eca6944b8e7c6dc3c3e09e
|
[
"MIT"
] |
permissive
|
anselmobd/fo2
|
d51b63ebae2541b00af79448ede76b02638c41f0
|
8e7f8f3d9a296c7da39d0faf38a266e9c6c162ab
|
refs/heads/master
| 2023-08-31T19:59:33.964813
| 2023-08-31T19:50:53
| 2023-08-31T19:50:53
| 92,856,677
| 1
| 0
|
MIT
| 2023-04-21T21:50:46
| 2017-05-30T17:04:27
|
Python
|
UTF-8
|
Python
| false
| false
| 93
|
py
|
from django.apps import AppConfig
class ComercialConfig(AppConfig):
name = 'comercial'
|
[
"anselmo.blanco.dominguez+github@gmail.com"
] |
anselmo.blanco.dominguez+github@gmail.com
|
10e06c96d12545cd7e9bbb9a2321187fd1f606ea
|
6b937035c51cf5653e30478ffe7e677ba557258f
|
/clock.py
|
af29f6c4d9ad47a4695fcf2b9c4619058e7ddbf3
|
[] |
no_license
|
greeedyboy/HttpServer
|
920d4c4c3ba0ac9f225d9bcddbf9d4c4847b5bdd
|
bbd62a490a64bb975f23d93ef0ed729e28c91ccb
|
refs/heads/master
| 2020-05-17T21:51:20.957123
| 2019-04-28T16:56:51
| 2019-04-28T16:56:51
| 183,983,432
| 0
| 0
| null | 2019-04-29T02:04:43
| 2019-04-29T02:04:42
| null |
UTF-8
|
Python
| false
| false
| 745
|
py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
"""info
"""
from gking.plugin.creatart import git_post
from apscheduler.schedulers.blocking import BlockingScheduler
import subprocess
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=120)
def timed_job():
print('This job is run every five minutes.')
print('begin to scrapy crawl gking')
# # bellow is ok
rs=subprocess.check_output(['python','runspider.py','gking'])
print(rs)
print('begin to gking post')
res=git_post()
data={"postnum":str(res)}
print(data)
@sched.scheduled_job('cron', day_of_week='mon-fri', hour=17)
def scheduled_job():
print('This job is run every weekday at 5pm.')
sched.start()
|
[
"greeedyboy@163.com"
] |
greeedyboy@163.com
|
56dce1c7a2e622474cab828faaa1e75698d9e760
|
4ac11b3ac16e8ede6075b7da7008b6c63aab2788
|
/app.py
|
bc0e5c2f2298f73a2bca6dddd8593aeb75e34064
|
[
"MIT"
] |
permissive
|
DrFirestream/Tabaqui
|
59faa185db7fc030dcb9e8278441e3a9bf9413af
|
dad4fe2b3dcf01195bac330de509663ea2bd6e54
|
refs/heads/master
| 2022-12-24T15:20:27.472475
| 2020-09-19T21:22:22
| 2020-09-19T21:22:22
| 296,133,707
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,254
|
py
|
import flask
import boto3
import botocore
import json
import datetime
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.sql.functions import func
import sys
import os
import random
MAX_REQUESTS_TO_AWS = 20
MAX_PROMPT_LENGTH = 200
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SECRET_KEY = os.environ['DB_SECRET_KEY']
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
app = flask.Flask(__name__, template_folder='templates')
aws_key = os.environ['AWS_ACCESS_KEY_ID']
aws_secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
app.config.from_object('app.Config')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
names = ['Саша', 'Игорек', 'Люда', 'Илья', 'Миша', 'Дима', 'Вова', 'Миклуха', 'Артем'];
from models import Journal
def get_count(q):
count_q = q.statement.with_only_columns([func.count()]).order_by(None)
count = q.session.execute(count_q).scalar()
return count
@app.route('/', methods=['GET', 'POST'])
def main():
if flask.request.method == 'GET':
return flask.render_template('main.html')
try:
prompt = flask.request.form['message'][-MAX_PROMPT_LENGTH:]
#cfg = botocore.config.Config(retries={'max_attempts': 0}, read_timeout=360, connect_timeout=360, region_name="eu-central-1" )
if flask.request.form['requestid']:
submitted = flask.request.form['submitted']
requestid = flask.request.form['requestid']
sqs = boto3.resource('sqs', region_name='eu-central-1', aws_access_key_id=aws_key, aws_secret_access_key=aws_secret_key)
queue = sqs.get_queue_by_name(QueueName = 'TabaquiQueue')
for message in queue.receive_messages(MessageAttributeNames=['RequestId']):
if message.message_attributes is not None:
mrequestid = message.message_attributes.get('RequestId').get('StringValue')
print(mrequestid + " " + requestid)
if mrequestid == requestid:
res = message.body
message.delete()
return flask.render_template('main.html', result = submitted + message.body)
return flask.render_template('main.html', result = prompt + '... ', submitted = submitted, requestid = requestid)
db.session.query(Journal).filter(func.date(Journal.request) <= datetime.date.today() - datetime.timedelta(days=1)).delete(synchronize_session='fetch')
db.session.commit()
q = db.session.query(Journal.id)#.filter(...).order_by(...)
if get_count(q) > MAX_REQUESTS_TO_AWS:
return flask.render_template('main.html', result = 'Please try tomorrow')
journal = Journal()
db.session.add(journal)
db.session.commit()
db.session.refresh(journal)
requestid = str(journal.id)
print('Request id: ' + requestid)
#boto3.setup_default_session(region_name='us-east-1')
client = boto3.client('lambda', region_name='eu-central-1', aws_access_key_id=aws_key, aws_secret_access_key=aws_secret_key) #config = cfg
prompt = ''.join([(s.strip() if s.find(':') > 0 and s.find('"') > 0 else
names[random.randrange(0, len(names))] + ': "' + s.strip() + ' "') + '\n'
for s in prompt.split('\n') if s.strip()])
if prompt.endswith(': "\n'):
prompt = prompt[:-1]
else:
prompt = prompt + names[random.randrange(0, len(names))] + ': "'
payload={"RequestId": requestid, "Prompt": prompt, "Temperature": 0.9}#, "NQuotes": 1}
response = client.invoke(FunctionName = 'tabaqui_response', InvocationType = 'Event', LogType = 'Tail', Payload = json.dumps(payload))
#dictj = json.loads(response['Payload'].read().decode())
return flask.render_template('main.html', result = prompt + "\n" + str(response['StatusCode']) + "\nHave to wait for request " + requestid, submitted = prompt, requestid = requestid)
except:
return flask.render_template('main.html', result = str(sys.exc_info()))#[0]
if __name__ == '__main__':
app.run()#(host='0.0.0.0', port = os.environ['PORT'])
|
[
"dr.firestream@gmail.com"
] |
dr.firestream@gmail.com
|
548aa10877c45afcb7de075e147edf9994827bd2
|
bdd8ea8cad219d0fbe7d1151ba566e852dd9b640
|
/analyze_job_output.py
|
bc9943b2c9972c1721c2a26ac8c7d9dd9bb86398
|
[] |
no_license
|
edquist/vm-test-runs
|
ff13928b907c32f07e0d4539e8e27188d88132c1
|
1847bf6e60d20f9662f52b84aa7dc0dddcbdc9ce
|
refs/heads/master
| 2023-02-09T11:39:28.723434
| 2016-06-22T20:31:20
| 2016-06-22T20:31:20
| 63,362,384
| 0
| 0
| null | 2016-07-14T19:04:58
| 2016-07-14T19:04:58
| null |
UTF-8
|
Python
| false
| false
| 12,607
|
py
|
#!/usr/bin/python
import glob
import os
import re
import socket
import subprocess
import sys
from datetime import datetime, date
import yaml
def run_command(command, shell=False):
# Preprocess command
if shell:
if not isinstance(command, str):
command = ' '.join(command)
elif not (isinstance(command, list) or isinstance(command, tuple)):
raise TypeError, 'Need list or tuple, got %s' % (repr(command))
# Run and return command
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=shell)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout, stderr)
def read_file(path):
data_file = open(path, 'r')
data = data_file.read()
data_file.close()
return data
def load_yaml(filename):
yaml_file = read_file(filename)
return yaml.load(yaml_file)
def re_extract(regexp, data, flags=0, default=None, group=None):
m = re.search(regexp, data, flags)
if m is None:
return default
elif group is None:
return m.groups()
else:
return m.group(group)
def extract_inet_address(run_job_log):
eth0_block = re_extract(r'^\d:\s*eth0:(.*?)(?:^\d)', run_job_log, re.MULTILINE | re.DOTALL, group=1)
ens3_block = re_extract(r'^\d:\s*ens3:(.*?)(?:^\d)', run_job_log, re.MULTILINE | re.DOTALL, group=1)
if eth0_block:
return re_extract(r'^\s+inet\s+(.*?)\/', eth0_block, re.MULTILINE, group=1)
elif ens3_block:
return re_extract(r'^\s+inet\s+(.*?)\/', ens3_block, re.MULTILINE, group=1)
else:
return None
def extract_last(run_job_log, regexp):
all_installs = re.findall(regexp, run_job_log, re.MULTILINE)
if len(all_installs) == 0:
return None
return all_installs[-1]
def write_yaml_value(value):
if value is None:
result = '~'
elif isinstance(value, str):
if re.search(r"['\"\n|>,#\[\]]", value):
result = "'%s'" % value.replace("'", "''").replace('\n', '\\n')
else:
result = value
elif isinstance(value, list):
result = ''
if len(value) > 0:
for element in value:
result += '\n -%s' % write_yaml_value(element)
else:
result = str(value)
if '\n' in result:
return result
return ' ' + result
def write_yaml_mapping(data, key):
if key in data:
if key == 'job_serial':
print ' %s: \'%s\'' % (key, data[key])
else:
value = data[key]
print ' %s:%s' % (key, write_yaml_value(value))
def write_yaml(data):
print '-'
for key in sorted(data.keys()):
write_yaml_mapping(data, key)
def write_failure_and_exit(data, status, message, extra=None):
data['run_status'] = status
data['run_summary'] = message
if extra is not None:
data['run_details'] = extra
write_yaml(data)
sys.exit(0)
def parse_log(osg_test_log, test_exceptions, components):
# Extract problems
today = date.today()
run_status = ''
problems = []
ignored_failures = 0
cleanup_failures = 0
for m in re.finditer(r'^(ERROR|FAIL): (\w+) \(osgtest\.tests\.(\w+)\.(\w+)\)', osg_test_log, re.MULTILINE):
status, function, module, module_name = m.groups()
if module == 'special_cleanup':
cleanup_failures += 1
if test_exceptions:
for exception in test_exceptions:
ex_function, ex_module, ex_start, ex_finish = exception
if status == 'FAIL' and ex_function == function and ex_module == module and \
today >= ex_start and today <= ex_finish:
ignored_failures += 1
problems.append('|'.join((module, function, module_name, status, '-')))
if ignored_failures and ignored_failures == len(problems) - cleanup_failures:
# Runs consisting only of 'ignored' and 'cleanup' failures should be marked
# as 'ignored' since 'cleanup' failures are mostly due to errors in the
# test framework and are thus less interesting. Checking that ignored_
# failures is non-zero ensures that we don't capture cleanup-only failures.
run_status = 'ignore'
elif all('special_cleanup' in problem for problem in problems) and problems: # all() returns true for empty lists!
run_status = 'cleanup'
elif any('_update_' in problem for problem in problems):
run_status = 'update'
elif any('install_packages' in problem for problem in problems):
run_status = 'install'
m = re.search(r'^=+\nBAD SKIPS:\n-+\n(.*?)\n\n', osg_test_log, re.MULTILINE | re.DOTALL)
if m is not None:
for n in re.finditer(r'^(\w+) \(osgtest\.tests\.(\w+)\.(\w+)\) (.*)$', m.group(1), re.MULTILINE):
function, module, module_name, comment = n.groups()
problems.append('|'.join((module, function, module_name, 'SKIP', comment)))
if not problems:
run_status = 'pass'
elif not run_status: # catch missed failures
run_status = 'fail'
okskips = {}
m = re.finditer(r'\S+ \(osgtest\.tests\.([^\.]+).*okskip$', osg_test_log, re.MULTILINE)
if m is not None:
for module in m:
try:
tags = components[module.group(1)]
except KeyError:
continue
try:
for tag in tags:
okskips[tag] += 1
except KeyError:
okskips[tag] = 1
if re.search('AssertionError: Retries terminated after timeout period', osg_test_log, re.MULTILINE):
run_status += ' yum_timeout'
return run_status, problems, okskips
# ======================================================================================================================
if __name__ == '__main__':
# Process command-line arguments
if len(sys.argv) != 3:
print 'usage: %s SERIAL JOBID' % os.path.basename(sys.argv[0])
sys.exit(1)
job_serial = sys.argv[1]
job_id = sys.argv[2]
# Start hash
data = {
'job_serial': job_serial,
'job_id': job_id,
'run_directory': os.getcwd()
}
# Construct expected directory name
test_run_dir = 'output-' + job_serial
# Read condor_history output for transfer-in time (in seconds)
(rc, stdout, _) = run_command(('condor_history', '-format', '%d\\n ', 'JobCurrentStartExecutingDate - JobCurrentStartDate',
job_id, '-match', '1'))
if rc == 0:
data['transfer_in'] = re_extract(r'^(\S+)$', stdout, group=1)
# Separate query for hostname in case the JobAd's missing attributes
(rc, stdout, _) = run_command(('condor_history', '-format', '%s\\n', 'LastRemoteHost', job_id, '-match', '1'))
if rc ==0:
data['host_name'] = re_extract(r'^\S+@(\S+)$', stdout, group=1)
try:
data['host_address'] = socket.gethostbyname(data['host_name'])
except TypeError:
# Missing hostname from condor_history
data['host_name'] = 'unavailable'
data['host_address'] = 'unavailable'
except socket.gaierror:
# When gethostbyname can't find address by hostname1
data['host_address'] = 'unavailable'
# Read osg-test.conf
conf_file_name = os.path.join(test_run_dir, 'input', 'osg-test.conf')
conf_file = read_file(conf_file_name)
data['param_sources'] = re_extract(r'^sources\s*=\s*(.*)$', conf_file, re.MULTILINE, group=1)
data['param_packages'] = re_extract(r'^packages\s*=\s*(.*)$', conf_file, re.MULTILINE, group=1)
# Read free size left in the IO image
io_free_size_file = os.path.join(test_run_dir, 'io_free_size')
data['io_free_size'] = read_file(io_free_size_file)
# Read run-job.log
run_job_logfile = os.path.join(test_run_dir, 'run-job.log')
run_job_log = read_file(run_job_logfile)
# Get VM creation date
data['vm_creation_date'] = re_extract(r'cat /etc/creation_date\n(.*?)\n==> OK', run_job_log, group=1)
# Get and simplify OS release string
os_long_string = re_extract(r'cat /etc/redhat-release\n(.*?)\n==> OK', run_job_log, group=1)
os_string = re.sub(r'release\s+', '', os_long_string)
os_string = re.sub(r'\s*\(.*\)$', '', os_string)
data['os_release'] = os_string
# Look for whole-run failures
inet_address = extract_inet_address(run_job_log)
if inet_address is None:
write_failure_and_exit(data, 1, 'No apparent IP address')
data['guest_address'] = inet_address
# See if the rpm install of epel-release failed
failed_epel_install = re.search(r'Could not install EPEL repository', run_job_log)
if failed_epel_install:
write_failure_and_exit(data, 1, 'rpm install of epel-release failed')
# See if the final yum install of osg-test failed
final_osgtest_install = extract_last(run_job_log, r'^.*install osg-test(?:.*\n)*?^==>.*$')
if final_osgtest_install is not None:
install_result = re_extract(r'^==> (\w+)', final_osgtest_install, re.MULTILINE, group=1)
if install_result != 'OK':
write_failure_and_exit(data, 1, 'yum install of osg-test failed', final_osgtest_install)
# Extract osg-test source string
for package in ['osg-test', 'osg-ca-generator']:
source_re = r'^%s source: (.*)$' % package
data[package.replace('-', '_') + '_version'] = re_extract(source_re, run_job_log, re.MULTILINE,
default='(unknown)', group=1)
# Read osg-test output
osg_test_logfile_list = glob.glob(os.path.join(test_run_dir, 'output', 'osg-test-*.log'))
if len(osg_test_logfile_list) == 0:
write_failure_and_exit(data, 1, 'No osg-test-DATE.log file found')
osg_test_logfile = osg_test_logfile_list[0]
osg_test_log = read_file(osg_test_logfile)
data['run_status'] = 0
data['osg_test_logfile'] = osg_test_logfile
data['osg_test_status'], data['tests_messages'], data['ok_skips'] = parse_log(osg_test_log,
load_yaml('test-exceptions.yaml'),
load_yaml('component-tags.yaml'))
# Extract start time
data['start_time'] = re_extract(r'^Start time: (.*)$', osg_test_log, re.MULTILINE, group=1)
# Determine if the run timed out which can be found at the end of the log
timeout_re = re.compile('Caught alarm:\n.*message: (.*):')
reversed_osg_test_log = '\n'.join(reversed(osg_test_log.split('\n')))
timeout_match = timeout_re.search(reversed_osg_test_log, re.MULTILINE)
if timeout_match:
data['osg_test_status'] = 'timeout'
end_time = datetime.strptime(timeout_match.group(1), '%Y-%m-%d %H:%M:%S')
start_time = datetime.strptime(data['start_time'], '%Y-%m-%d %H:%M:%S')
data['run_time'] = end_time - start_time
last_test_re = re.compile(r'File .*osgtest\/tests\/(.*)\".*in (.*)')
last_test = last_test_re.search(reversed_osg_test_log)
failed_module, failed_test = last_test.groups()
data['timeout_test'] = failed_test + ' (' + failed_module + ')'
# Extract summary statistics
summary_re = re.compile(r'(Ran \d+ tests in .*)\s+((?:OK|FAILED)\s*\([^)]+\))\n(?!STDOUT)')
summary_lines = re_extract(summary_re, osg_test_log)
data['tests_total'] = data['tests_failed'] = data['tests_error'] = data['tests_bad_skip'] = data['tests_ok_skip'] = 0
if summary_lines is None:
data['run_time'] = 0.0
else:
tests_total, run_time = re_extract(r'Ran (\d+) tests in ([\d.]+)s', summary_lines[0])
data['tests_total'] = int(tests_total)
data['run_time'] = float(run_time)
summary = summary_lines[1]
overall, details = re_extract(r'(OK|FAILED)\s*\(([^)]+)\)', summary_lines[1])
for detailed_count in re.split(r'\s*,\s*', details):
label, value = detailed_count.split('=')
if label == 'failures': data['tests_failed'] = int(value)
elif label == 'errors': data['tests_error'] = int(value)
elif label == 'badSkips': data['tests_bad_skip'] = int(value)
elif label == 'okSkips': data['tests_ok_skip'] = int(value)
else: raise ValueError()
data['tests_ok'] = data['tests_total'] - \
(data['tests_failed'] + data['tests_error'] + data['tests_bad_skip'] + data['tests_ok_skip'])
write_yaml(data)
|
[
"blin@cs.wisc.edu"
] |
blin@cs.wisc.edu
|
b7f1b707aff8227e5a6adbf2cc67481d0bb0a5a8
|
82008bd4464276674c0f87bc5920563877d2a9dc
|
/validphone.py
|
5d24834a8fcfff8e9197404b5e603e807e80c08b
|
[] |
no_license
|
smudugal/ValidPhoneNumber
|
6d9e0fcc1b0687f1ec86b38c6260b7d9da32f06c
|
998f8719426b42a5687f59a84e936df42ee4b7a7
|
refs/heads/master
| 2021-01-19T13:43:57.037748
| 2017-02-18T21:48:45
| 2017-02-18T21:48:45
| 82,414,227
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 314
|
py
|
import re
def telephone_check(num):
pattern = '^1?\s?[(]?[0-9]{3}[-,)]?\s?[0-9]{3}[-,\s]?[0-9]{4}$'
if re.match(pattern,num):
return True
return False
if __name__ == "__main__":
print 'Enter a telephone number to check: '
num = raw_input()
valid = telephone_check(num)
pass
|
[
"samyukta.11@gmail.com"
] |
samyukta.11@gmail.com
|
c981f1e95abb4190bf05190cd91b6a38d5e84056
|
507947847e9b72c2b427b74c15f325f36b269e02
|
/apps/belt/models.py
|
e8510049f3924077500d033054504837f4d43185
|
[] |
no_license
|
echeverria-oscar/oscar_belts
|
57916cf9875db2d004427fb8b382c1df8c296e32
|
446ccaa6cdf3776844284dca64ce45f3d9f7aa65
|
refs/heads/master
| 2020-12-24T12:34:57.968726
| 2016-11-06T06:33:33
| 2016-11-06T06:33:33
| 72,974,155
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,873
|
py
|
from __future__ import unicode_literals
from datetime import datetime, date
from django.db import models
import bcrypt
from django.db import models
# Create your models here.
class UserManager(models.Manager):
def login(self, post):
user_list = User.objects.filter(username = post['username'])
if user_list:
user = user_list[0]
if bcrypt.hashpw(post['password'].encode(), user.password.encode()) == user.password:
return user
return None
def register(self, post):
encrypted_password = bcrypt.hashpw(post['password'].encode(), bcrypt.gensalt())
User.objects.create(name = post['name'], username = post['username'],date_hired = post['date_hired'], password = encrypted_password )
def validate(self, post):
errors = []
today = date.today()
print today
if len(post['name']) == 0:
errors.append("Name is required")
elif len(post['name']) < 3:
errors.append("Name is too short")
if len(post['username']) == 0:
errors.append("username is required")
elif len(post['username']) < 3:
errors.append("Username is too short")
if len(post['password']) == 0:
errors.append("must enter a password")
elif len(post['password']) < 8:
errors.append("password must have at least 8 characters")
elif post['password'] != post['confirm_pass']:
errors.append("password and confirmation must match")
if not post['date_hired']:
errors.append("Date Hired Field cannot be empty")
try:
date_hired = datetime.strptime(post['date_hired'], '%Y-%m-%d')
if date_hired.date() > today:
errors.append("Date cannot from the future")
except:
errors.append("Please enter a valid date for the From Date")
return errors
class WishManager(models.Manager):
def v_wish(self,post):
errors = []
if len(post['wish']) == 0:
errors.append("Wish is required")
elif len(post['wish']) < 3:
errors.append("Wish is too short")
return errors
class User(models.Model):
name = models.CharField(max_length = 45)
username = models.CharField(max_length = 45)
date_hired = models.DateField()
password = models.CharField(max_length = 100)
created_at = models.DateTimeField(auto_now_add= True)
updated_at = models.DateTimeField(auto_now=True)
objects = UserManager()
class Wish(models.Model):
user = models.ForeignKey(User)
wish = models.TextField(max_length = 1000)
wisher = models.ManyToManyField(User, related_name= "other_wish")
created_at = models.DateTimeField(auto_now_add= True)
updated_at = models.DateTimeField(auto_now=True)
objects = WishManager()
|
[
"oscar@oscars-MacBook-Pro.local"
] |
oscar@oscars-MacBook-Pro.local
|
4efa1687dadd46892464c946083720005116424d
|
888f65551bb3fe1b8e84c205796b24678669a649
|
/venv/bin/mako-render
|
e6e8f3b2ebd988dca4cd46c0956c7a2d59f20d2a
|
[] |
no_license
|
chunharrison/NBA-Predictor
|
e6514c70f2cf26d6db4c14aee225cfbd9d5984a7
|
967951ba34debee012385af63f2bf8031dee51ca
|
refs/heads/master
| 2022-05-04T22:02:03.374496
| 2019-05-15T05:55:34
| 2019-05-15T05:55:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 258
|
#!/Users/harrison/Documents/NBA-Predictor/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from mako.cmd import cmdline
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(cmdline())
|
[
"wjsdntjr@hotmail.com"
] |
wjsdntjr@hotmail.com
|
|
9f531ead96710329b7c89d17a9c2abd4b35294a1
|
41704c5c80283fd87afdbf41949ad69c1c240303
|
/Lectures/s9/exercise.py
|
1946ba302591e70f6631fad240b47cd6655b146f
|
[] |
no_license
|
geonsoo/HCDE310
|
3263dbc7135351eb7996e5ba7386562e70562138
|
6605df0c65310b3c412df5d4e15e31d20625c825
|
refs/heads/master
| 2020-06-14T18:31:13.701033
| 2016-12-02T01:00:18
| 2016-12-02T01:00:18
| 75,346,840
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,654
|
py
|
import urllib2
from BeautifulSoup import BeautifulSoup
def try_it(someURL):
# Get the web page at that URL
response = urllib2.urlopen(someURL)
# Read the page data as one big string
page_data = response.read()
# Print out the URL
print "URL is: " + response.geturl()
# Print out the number of characters
print "# of characters: " + str(len(page_data)) + "\n"
# Print out some extra information
print "--- info ---"
print response.info()
def soupIt(someURL):
# note, the next line won't work until you have installed
# BeautifulSoup. See s11.py or the slides for instructions
htmlstring = urllib2.urlopen(someURL).read()
soup = BeautifulSoup(htmlstring)
# now do something interesting here
# to figure out what to do, maybe view the source
# of the URL to learn the structure of the page
### Main block
if __name__ == '__main__':
print "\n----- PART 1 -----"
# The URL I want to use
myURL = "http://hcde.washington.edu"
# Call try_it with myURL
try_it(myURL)
print "\n----- PART 2 -----"
# Uncomment the next two lines and set the URL to "http://www.google.com"
#myURL = "WHAT GOES HERE"
#try_it(myURL)
print "\n----- PART 3 -----"
# This time, replace PICKSOMETHING with an address of your choice.
# Remember to include the "http://"
#myURL = "PICKSOMETHING"
#try_it(myURL)
print "\n----- Let's make this more interesting? -----"
# (1) Uncomment the next code, pick a URL
# (2) fill in the function soupIt(myURL)to do something interesting
#myURL = "PICKSOMETHING"
soupIt(myURL)
|
[
"smunson@gmail.com"
] |
smunson@gmail.com
|
42af71a8602e0cdea4da087e51fed92e10e0e222
|
e2311b68b22d9bfeae2de1e159b9d36ae677fff9
|
/Day 10/days_in_month.py
|
1236731bc894a4d8ef4e1506b2160e3a70e82ecf
|
[] |
no_license
|
Omisw/Python_100_days
|
80be2886fae8e67cdcc35956fbceab91d9cd5b4d
|
8699e6a34938f4a370fc03e1fded53923625aa1d
|
refs/heads/main
| 2023-03-10T06:38:45.858562
| 2021-02-24T22:23:11
| 2021-02-24T22:23:11
| 330,755,226
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,382
|
py
|
# Day 10 - First exercise.
# Days in Month
# Instructions
# In the starting code, you'll find the solution from the Leap Year challenge. First, convert this function is_leap() so that instead of printing "Leap year." or "Not leap year."
# it should return True if it is a leap year and return False if it is not a leap year.
# You are then going to create a function called days_in_month() which will take a year and a month as inputs, e.g.
# days_in_month(year=2022, month=2)
# And it will use this information to work out the number of days in the month, then return that as the output, e.g.:
# 28
# The List month_days contains the number of days in a month from January to December for a non-leap year. A leap year has 29 days in February.
def is_leap(year):
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0:
return True
else:
return False
else:
return True
else:
return False
def days_in_month(year, month):
if month > 12 or month < 1:
return f"The month {month}, is a invalid month."
month_days = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
if is_leap(year) and month == 2:
return 29
return month_days[month -1]
#🚨 Do NOT change any of the code below
year = int(input("Enter a year: "))
month = int(input("Enter a month: "))
days = days_in_month(year, month)
print(days)
|
[
"o_mis_6@hotmail.com"
] |
o_mis_6@hotmail.com
|
8fa26a7fc731cf1959afc22db8fe82bb1fe9ed7b
|
f37a5d7d252e014a0664cccac5a567fd94ba76eb
|
/api/src/entities/recommendation.py
|
cd1800336b7ed64a62b3ddb3117dc82300bd47b5
|
[] |
no_license
|
GabrielVanLoon/Projeto_Teach.me
|
3e795b04815716d9644bc06145e3d3c9caf8032f
|
43188ebb27142904b1b2c8a620898971733ce298
|
refs/heads/master
| 2022-12-25T19:00:09.536937
| 2020-10-03T21:50:26
| 2020-10-03T21:50:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 328
|
py
|
class Recommendation:
def __init__(self, student=None, instructor=None, text=None):
self.student = student
self.instructor = instructor
self.text = text
def __iter__(self):
yield 'student', self.student
yield 'instructor', self.instructor
yield 'text', self.text
|
[
"gabriel.vl.rojas@gmail.com"
] |
gabriel.vl.rojas@gmail.com
|
a28899990a52251b03fac204e9b1ffb85d3044c3
|
04ffe1395bec0943f1f9fc26cffdd9eb8777cf9a
|
/CONSTANTS.py
|
d495aacf47c7cb3ec7a517349b702461c99584bb
|
[
"MIT"
] |
permissive
|
elben10/corona-dashboard
|
8693954bee0f58c2f01509db08e33403754b4f4a
|
ce3be765ee560b9cfec364f3dca32cc804776b8a
|
refs/heads/master
| 2022-12-15T08:59:16.116354
| 2020-03-29T18:34:55
| 2020-03-29T18:34:55
| 248,964,130
| 0
| 0
|
MIT
| 2022-12-10T23:18:59
| 2020-03-21T11:41:48
|
CSS
|
UTF-8
|
Python
| false
| false
| 106
|
py
|
DATE = "dateRep"
CASES = "cases"
DEATHS = "deaths"
ISO2 = "geoId"
COUNTRY_TEXT = "countriesAndTerritories"
|
[
"jakob@datamaga.com"
] |
jakob@datamaga.com
|
90eeb932286f2b70f61e93c7696e6754db0c0289
|
82cdfdc0cc3a3de9d2b3f3ef9e51f25f0f3a11ad
|
/SAMP/people/migrations/0005_auto_20190711_2032.py
|
ef10a25d1fcf4accfb4b3d364670cc2a114f4337
|
[] |
no_license
|
akitsuryoko/StudentAssosiationManagementPlatform
|
8a6a6ad41ee8b1a187aa7a3b6dcfb29247aab58e
|
02db11de01275606b3d05119284362cb55e1bc63
|
refs/heads/master
| 2020-06-19T05:07:30.159642
| 2019-07-12T12:12:30
| 2019-07-12T12:12:30
| 195,827,677
| 0
| 0
| null | 2019-07-08T14:20:50
| 2019-07-08T14:20:50
| null |
UTF-8
|
Python
| false
| false
| 400
|
py
|
# Generated by Django 2.2.1 on 2019-07-11 12:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('people', '0004_auto_20190710_1516'),
]
operations = [
migrations.AlterField(
model_name='person',
name='cookie_id',
field=models.CharField(max_length=256, null=True),
),
]
|
[
"1138164561@qq.com"
] |
1138164561@qq.com
|
0d9dec6db8744147c3f50fe1865d02563d2fcd29
|
9db53eb9e79f120084461b1ddf4d8df1d16277f4
|
/lab1.py
|
112d1659cb0fd6c62d79c3d9548925f0cb2458ae
|
[] |
no_license
|
petersterling1/cmput404lab1
|
c8c409ca3f98c7aecaba4e0d734f5234f7a35f62
|
54ba07aed422c3a551e76188f500697ee151bde4
|
refs/heads/master
| 2020-12-07T19:27:53.045658
| 2016-09-07T21:16:09
| 2016-09-07T21:16:09
| 67,643,210
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
import requests
response = requests.get('https://github.com/petersterling1/cmput404lab1/raw/master/lab1.py')
print response.text
|
[
"psterlin@ualberta.ca"
] |
psterlin@ualberta.ca
|
2f61e1a2a9ae30f8aa6baa190d2c72e14392c2d1
|
99958f3786f4ec372f4f5f5752ed5040dbda4ac3
|
/GAE_install/RPG/werkzeug/testsuite/wrappers.py
|
6c1d39d1092ce583d371594f8b2f70e2339558ff
|
[] |
no_license
|
armadillu/LocalProjectsRPG
|
01f03eadcfd2f8c7e6bfdc20e149047d3c7f24a4
|
4b974d9de06f85b7d4f807e44c7f836d0b195220
|
refs/heads/master
| 2021-01-22T12:02:10.023666
| 2013-05-24T20:48:03
| 2013-05-24T20:48:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27,912
|
py
|
# -*- coding: utf-8 -*-
"""
werkzeug.testsuite.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests for the response and request objects.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
import pickle
from StringIO import StringIO
from datetime import datetime
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug import wrappers
from werkzeug.exceptions import SecurityError
from werkzeug.datastructures import MultiDict, ImmutableOrderedMultiDict, \
ImmutableList, ImmutableTypeConversionDict, CharsetAccept, \
CombinedMultiDict
from werkzeug.test import Client, create_environ, run_wsgi_app
class RequestTestResponse(wrappers.BaseResponse):
"""Subclass of the normal response class we use to test response
and base classes. Has some methods to test if things in the
response match.
"""
def __init__(self, response, status, headers):
wrappers.BaseResponse.__init__(self, response, status, headers)
self.body_data = pickle.loads(self.data)
def __getitem__(self, key):
return self.body_data[key]
def request_demo_app(environ, start_response):
request = wrappers.BaseRequest(environ)
assert 'werkzeug.request' in environ
start_response('200 OK', [('Content-Type', 'text/plain')])
return [pickle.dumps({
'args': request.args,
'args_as_list': request.args.lists(),
'form': request.form,
'form_as_list': request.form.lists(),
'environ': prepare_environ_pickle(request.environ),
'data': request.data
})]
def prepare_environ_pickle(environ):
result = {}
for key, value in environ.iteritems():
try:
pickle.dumps((key, value))
except Exception:
continue
result[key] = value
return result
class WrappersTestCase(WerkzeugTestCase):
def assert_environ(self, environ, method):
assert environ['REQUEST_METHOD'] == method
assert environ['PATH_INFO'] == '/'
assert environ['SCRIPT_NAME'] == ''
assert environ['SERVER_NAME'] == 'localhost'
assert environ['wsgi.version'] == (1, 0)
assert environ['wsgi.url_scheme'] == 'http'
def test_base_request(self):
client = Client(request_demo_app, RequestTestResponse)
# get requests
response = client.get('/?foo=bar&foo=hehe')
assert response['args'] == MultiDict([('foo', 'bar'), ('foo', 'hehe')])
assert response['args_as_list'] == [('foo', ['bar', 'hehe'])]
assert response['form'] == MultiDict()
assert response['form_as_list'] == []
assert response['data'] == ''
self.assert_environ(response['environ'], 'GET')
# post requests with form data
response = client.post('/?blub=blah', data='foo=blub+hehe&blah=42',
content_type='application/x-www-form-urlencoded')
assert response['args'] == MultiDict([('blub', 'blah')])
assert response['args_as_list'] == [('blub', ['blah'])]
assert response['form'] == MultiDict([('foo', 'blub hehe'), ('blah', '42')])
assert response['data'] == ''
# currently we do not guarantee that the values are ordered correctly
# for post data.
## assert response['form_as_list'] == [('foo', ['blub hehe']), ('blah', ['42'])]
self.assert_environ(response['environ'], 'POST')
# patch requests with form data
response = client.patch('/?blub=blah', data='foo=blub+hehe&blah=42',
content_type='application/x-www-form-urlencoded')
assert response['args'] == MultiDict([('blub', 'blah')])
assert response['args_as_list'] == [('blub', ['blah'])]
assert response['form'] == MultiDict([('foo', 'blub hehe'), ('blah', '42')])
assert response['data'] == ''
self.assert_environ(response['environ'], 'PATCH')
# post requests with json data
json = '{"foo": "bar", "blub": "blah"}'
response = client.post('/?a=b', data=json, content_type='application/json')
assert response['data'] == json
assert response['args'] == MultiDict([('a', 'b')])
assert response['form'] == MultiDict()
def test_access_route(self):
req = wrappers.Request.from_values(headers={
'X-Forwarded-For': '192.168.1.2, 192.168.1.1'
})
req.environ['REMOTE_ADDR'] = '192.168.1.3'
assert req.access_route == ['192.168.1.2', '192.168.1.1']
assert req.remote_addr == '192.168.1.3'
req = wrappers.Request.from_values()
req.environ['REMOTE_ADDR'] = '192.168.1.3'
assert req.access_route == ['192.168.1.3']
def test_url_request_descriptors(self):
req = wrappers.Request.from_values('/bar?foo=baz', 'http://example.com/test')
assert req.path == u'/bar'
assert req.full_path == u'/bar?foo=baz'
assert req.script_root == u'/test'
assert req.url == 'http://example.com/test/bar?foo=baz'
assert req.base_url == 'http://example.com/test/bar'
assert req.url_root == 'http://example.com/test/'
assert req.host_url == 'http://example.com/'
assert req.host == 'example.com'
assert req.scheme == 'http'
req = wrappers.Request.from_values('/bar?foo=baz', 'https://example.com/test')
assert req.scheme == 'https'
def test_url_request_descriptors_hosts(self):
req = wrappers.Request.from_values('/bar?foo=baz', 'http://example.com/test')
req.trusted_hosts = ['example.com']
assert req.path == u'/bar'
assert req.full_path == u'/bar?foo=baz'
assert req.script_root == u'/test'
assert req.url == 'http://example.com/test/bar?foo=baz'
assert req.base_url == 'http://example.com/test/bar'
assert req.url_root == 'http://example.com/test/'
assert req.host_url == 'http://example.com/'
assert req.host == 'example.com'
assert req.scheme == 'http'
req = wrappers.Request.from_values('/bar?foo=baz', 'https://example.com/test')
assert req.scheme == 'https'
req = wrappers.Request.from_values('/bar?foo=baz', 'http://example.com/test')
req.trusted_hosts = ['example.org']
self.assert_raises(SecurityError, lambda: req.url)
self.assert_raises(SecurityError, lambda: req.base_url)
self.assert_raises(SecurityError, lambda: req.url_root)
self.assert_raises(SecurityError, lambda: req.host_url)
self.assert_raises(SecurityError, lambda: req.host)
def test_authorization_mixin(self):
request = wrappers.Request.from_values(headers={
'Authorization': 'Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ=='
})
a = request.authorization
assert a.type == 'basic'
assert a.username == 'Aladdin'
assert a.password == 'open sesame'
def test_base_response(self):
# unicode
response = wrappers.BaseResponse(u'öäü')
assert response.data == 'öäü'
# writing
response = wrappers.Response('foo')
response.stream.write('bar')
assert response.data == 'foobar'
# set cookie
response = wrappers.BaseResponse()
response.set_cookie('foo', 'bar', 60, 0, '/blub', 'example.org', False)
assert response.headers.to_list() == [
('Content-Type', 'text/plain; charset=utf-8'),
('Set-Cookie', 'foo=bar; Domain=example.org; expires=Thu, '
'01-Jan-1970 00:00:00 GMT; Max-Age=60; Path=/blub')
]
# delete cookie
response = wrappers.BaseResponse()
response.delete_cookie('foo')
assert response.headers.to_list() == [
('Content-Type', 'text/plain; charset=utf-8'),
('Set-Cookie', 'foo=; expires=Thu, 01-Jan-1970 00:00:00 GMT; Max-Age=0; Path=/')
]
# close call forwarding
closed = []
class Iterable(object):
def next(self):
raise StopIteration()
def __iter__(self):
return self
def close(self):
closed.append(True)
response = wrappers.BaseResponse(Iterable())
response.call_on_close(lambda: closed.append(True))
app_iter, status, headers = run_wsgi_app(response,
create_environ(),
buffered=True)
assert status == '200 OK'
assert ''.join(app_iter) == ''
assert len(closed) == 2
def test_response_status_codes(self):
response = wrappers.BaseResponse()
response.status_code = 404
assert response.status == '404 NOT FOUND'
response.status = '200 OK'
assert response.status_code == 200
response.status = '999 WTF'
assert response.status_code == 999
response.status_code = 588
assert response.status_code == 588
assert response.status == '588 UNKNOWN'
response.status = 'wtf'
assert response.status_code == 0
def test_type_forcing(self):
def wsgi_application(environ, start_response):
start_response('200 OK', [('Content-Type', 'text/html')])
return ['Hello World!']
base_response = wrappers.BaseResponse('Hello World!', content_type='text/html')
class SpecialResponse(wrappers.Response):
def foo(self):
return 42
# good enough for this simple application, but don't ever use that in
# real world examples!
fake_env = {}
for orig_resp in wsgi_application, base_response:
response = SpecialResponse.force_type(orig_resp, fake_env)
assert response.__class__ is SpecialResponse
assert response.foo() == 42
assert response.data == 'Hello World!'
assert response.content_type == 'text/html'
# without env, no arbitrary conversion
self.assert_raises(TypeError, SpecialResponse.force_type, wsgi_application)
def test_accept_mixin(self):
request = wrappers.Request({
'HTTP_ACCEPT': 'text/xml,application/xml,application/xhtml+xml,'
'text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
'HTTP_ACCEPT_CHARSET': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'HTTP_ACCEPT_ENCODING': 'gzip,deflate',
'HTTP_ACCEPT_LANGUAGE': 'en-us,en;q=0.5'
})
assert request.accept_mimetypes == CharsetAccept([
('text/xml', 1), ('image/png', 1), ('application/xml', 1),
('application/xhtml+xml', 1), ('text/html', 0.9),
('text/plain', 0.8), ('*/*', 0.5)
])
assert request.accept_charsets == CharsetAccept([
('ISO-8859-1', 1), ('utf-8', 0.7), ('*', 0.7)
])
assert request.accept_encodings == CharsetAccept([('gzip', 1), ('deflate', 1)])
assert request.accept_languages == CharsetAccept([('en-us', 1), ('en', 0.5)])
request = wrappers.Request({'HTTP_ACCEPT': ''})
assert request.accept_mimetypes == CharsetAccept()
def test_etag_request_mixin(self):
request = wrappers.Request({
'HTTP_CACHE_CONTROL': 'no-store, no-cache',
'HTTP_IF_MATCH': 'w/"foo", bar, "baz"',
'HTTP_IF_NONE_MATCH': 'w/"foo", bar, "baz"',
'HTTP_IF_MODIFIED_SINCE': 'Tue, 22 Jan 2008 11:18:44 GMT',
'HTTP_IF_UNMODIFIED_SINCE': 'Tue, 22 Jan 2008 11:18:44 GMT'
})
assert request.cache_control.no_store
assert request.cache_control.no_cache
for etags in request.if_match, request.if_none_match:
assert etags('bar')
assert etags.contains_raw('w/"foo"')
assert etags.contains_weak('foo')
assert not etags.contains('foo')
assert request.if_modified_since == datetime(2008, 1, 22, 11, 18, 44)
assert request.if_unmodified_since == datetime(2008, 1, 22, 11, 18, 44)
def test_user_agent_mixin(self):
user_agents = [
('Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.11) '
'Gecko/20071127 Firefox/2.0.0.11', 'firefox', 'macos', '2.0.0.11',
'en-US'),
('Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; de-DE) Opera 8.54',
'opera', 'windows', '8.54', 'de-DE'),
('Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420 '
'(KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3',
'safari', 'iphone', '419.3', 'en'),
('Bot Googlebot/2.1 ( http://www.googlebot.com/bot.html)',
'google', None, '2.1', None)
]
for ua, browser, platform, version, lang in user_agents:
request = wrappers.Request({'HTTP_USER_AGENT': ua})
assert request.user_agent.browser == browser
assert request.user_agent.platform == platform
assert request.user_agent.version == version
assert request.user_agent.language == lang
assert bool(request.user_agent)
assert request.user_agent.to_header() == ua
assert str(request.user_agent) == ua
request = wrappers.Request({'HTTP_USER_AGENT': 'foo'})
assert not request.user_agent
def test_etag_response_mixin(self):
response = wrappers.Response('Hello World')
assert response.get_etag() == (None, None)
response.add_etag()
assert response.get_etag() == ('b10a8db164e0754105b7a99be72e3fe5', False)
assert not response.cache_control
response.cache_control.must_revalidate = True
response.cache_control.max_age = 60
response.headers['Content-Length'] = len(response.data)
assert response.headers['Cache-Control'] in ('must-revalidate, max-age=60',
'max-age=60, must-revalidate')
assert 'date' not in response.headers
env = create_environ()
env.update({
'REQUEST_METHOD': 'GET',
'HTTP_IF_NONE_MATCH': response.get_etag()[0]
})
response.make_conditional(env)
assert 'date' in response.headers
# after the thing is invoked by the server as wsgi application
# (we're emulating this here), there must not be any entity
# headers left and the status code would have to be 304
resp = wrappers.Response.from_app(response, env)
assert resp.status_code == 304
assert not 'content-length' in resp.headers
# make sure date is not overriden
response = wrappers.Response('Hello World')
response.date = 1337
d = response.date
response.make_conditional(env)
assert response.date == d
# make sure content length is only set if missing
response = wrappers.Response('Hello World')
response.content_length = 999
response.make_conditional(env)
self.assert_equal(response.content_length, 999)
def test_etag_response_mixin_freezing(self):
class WithFreeze(wrappers.ETagResponseMixin, wrappers.BaseResponse):
pass
class WithoutFreeze(wrappers.BaseResponse, wrappers.ETagResponseMixin):
pass
response = WithFreeze('Hello World')
response.freeze()
assert response.get_etag() == (wrappers.generate_etag('Hello World'), False)
response = WithoutFreeze('Hello World')
response.freeze()
assert response.get_etag() == (None, None)
response = wrappers.Response('Hello World')
response.freeze()
assert response.get_etag() == (None, None)
def test_authenticate_mixin(self):
resp = wrappers.Response()
resp.www_authenticate.type = 'basic'
resp.www_authenticate.realm = 'Testing'
assert resp.headers['WWW-Authenticate'] == 'Basic realm="Testing"'
resp.www_authenticate.realm = None
resp.www_authenticate.type = None
assert 'WWW-Authenticate' not in resp.headers
def test_response_stream_mixin(self):
response = wrappers.Response()
response.stream.write('Hello ')
response.stream.write('World!')
assert response.response == ['Hello ', 'World!']
assert response.data == 'Hello World!'
def test_common_response_descriptors_mixin(self):
response = wrappers.Response()
response.mimetype = 'text/html'
assert response.mimetype == 'text/html'
assert response.content_type == 'text/html; charset=utf-8'
assert response.mimetype_params == {'charset': 'utf-8'}
response.mimetype_params['x-foo'] = 'yep'
del response.mimetype_params['charset']
assert response.content_type == 'text/html; x-foo=yep'
now = datetime.utcnow().replace(microsecond=0)
assert response.content_length is None
response.content_length = '42'
assert response.content_length == 42
for attr in 'date', 'age', 'expires':
assert getattr(response, attr) is None
setattr(response, attr, now)
assert getattr(response, attr) == now
assert response.retry_after is None
response.retry_after = now
assert response.retry_after == now
assert not response.vary
response.vary.add('Cookie')
response.vary.add('Content-Language')
assert 'cookie' in response.vary
assert response.vary.to_header() == 'Cookie, Content-Language'
response.headers['Vary'] = 'Content-Encoding'
assert response.vary.as_set() == set(['content-encoding'])
response.allow.update(['GET', 'POST'])
assert response.headers['Allow'] == 'GET, POST'
response.content_language.add('en-US')
response.content_language.add('fr')
assert response.headers['Content-Language'] == 'en-US, fr'
def test_common_request_descriptors_mixin(self):
request = wrappers.Request.from_values(content_type='text/html; charset=utf-8',
content_length='23',
headers={
'Referer': 'http://www.example.com/',
'Date': 'Sat, 28 Feb 2009 19:04:35 GMT',
'Max-Forwards': '10',
'Pragma': 'no-cache'
})
assert request.content_type == 'text/html; charset=utf-8'
assert request.mimetype == 'text/html'
assert request.mimetype_params == {'charset': 'utf-8'}
assert request.content_length == 23
assert request.referrer == 'http://www.example.com/'
assert request.date == datetime(2009, 2, 28, 19, 4, 35)
assert request.max_forwards == 10
assert 'no-cache' in request.pragma
def test_shallow_mode(self):
request = wrappers.Request({'QUERY_STRING': 'foo=bar'}, shallow=True)
assert request.args['foo'] == 'bar'
self.assert_raises(RuntimeError, lambda: request.form['foo'])
def test_form_parsing_failed(self):
data = (
'--blah\r\n'
)
data = wrappers.Request.from_values(input_stream=StringIO(data),
content_length=len(data),
content_type='multipart/form-data; boundary=foo',
method='POST')
assert not data.files
assert not data.form
def test_url_charset_reflection(self):
req = wrappers.Request.from_values()
req.charset = 'utf-7'
assert req.url_charset == 'utf-7'
def test_response_streamed(self):
r = wrappers.Response()
assert not r.is_streamed
r = wrappers.Response("Hello World")
assert not r.is_streamed
r = wrappers.Response(["foo", "bar"])
assert not r.is_streamed
def gen():
if 0:
yield None
r = wrappers.Response(gen())
assert r.is_streamed
def test_response_freeze(self):
def generate():
yield "foo"
yield "bar"
resp = wrappers.Response(generate())
resp.freeze()
assert resp.response == ['foo', 'bar']
assert resp.headers['content-length'] == '6'
def test_other_method_payload(self):
data = 'Hello World'
req = wrappers.Request.from_values(input_stream=StringIO(data),
content_length=len(data),
content_type='text/plain',
method='WHAT_THE_FUCK')
assert req.data == data
assert isinstance(req.stream, wrappers.LimitedStream)
def test_urlfication(self):
resp = wrappers.Response()
resp.headers['Location'] = u'http://üser:pässword@☃.net/påth'
resp.headers['Content-Location'] = u'http://☃.net/'
headers = resp.get_wsgi_headers(create_environ())
assert headers['location'] == \
'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th'
assert headers['content-location'] == 'http://xn--n3h.net/'
def test_new_response_iterator_behavior(self):
req = wrappers.Request.from_values()
resp = wrappers.Response(u'Hello Wörld!')
def get_content_length(resp):
headers = wrappers.Headers.linked(resp.get_wsgi_headers(req.environ))
return headers.get('content-length', type=int)
def generate_items():
yield "Hello "
yield u"Wörld!"
# werkzeug encodes when set to `data` now, which happens
# if a string is passed to the response object.
assert resp.response == [u'Hello Wörld!'.encode('utf-8')]
assert resp.data == u'Hello Wörld!'.encode('utf-8')
assert get_content_length(resp) == 13
assert not resp.is_streamed
assert resp.is_sequence
# try the same for manual assignment
resp.data = u'Wörd'
assert resp.response == [u'Wörd'.encode('utf-8')]
assert resp.data == u'Wörd'.encode('utf-8')
assert get_content_length(resp) == 5
assert not resp.is_streamed
assert resp.is_sequence
# automatic generator sequence conversion
resp.response = generate_items()
assert resp.is_streamed
assert not resp.is_sequence
assert resp.data == u'Hello Wörld!'.encode('utf-8')
assert resp.response == ['Hello ', u'Wörld!'.encode('utf-8')]
assert not resp.is_streamed
assert resp.is_sequence
# automatic generator sequence conversion
resp.response = generate_items()
resp.implicit_sequence_conversion = False
assert resp.is_streamed
assert not resp.is_sequence
self.assert_raises(RuntimeError, lambda: resp.data)
resp.make_sequence()
assert resp.data == u'Hello Wörld!'.encode('utf-8')
assert resp.response == ['Hello ', u'Wörld!'.encode('utf-8')]
assert not resp.is_streamed
assert resp.is_sequence
# stream makes it a list no matter how the conversion is set
for val in True, False:
resp.implicit_sequence_conversion = val
resp.response = ("foo", "bar")
assert resp.is_sequence
resp.stream.write('baz')
assert resp.response == ['foo', 'bar', 'baz']
def test_form_data_ordering(self):
class MyRequest(wrappers.Request):
parameter_storage_class = ImmutableOrderedMultiDict
req = MyRequest.from_values('/?foo=1&bar=0&foo=3')
assert list(req.args) == ['foo', 'bar']
assert req.args.items(multi=True) == [
('foo', '1'),
('bar', '0'),
('foo', '3')
]
assert isinstance(req.args, ImmutableOrderedMultiDict)
assert isinstance(req.values, CombinedMultiDict)
assert req.values['foo'] == '1'
assert req.values.getlist('foo') == ['1', '3']
def test_storage_classes(self):
class MyRequest(wrappers.Request):
dict_storage_class = dict
list_storage_class = list
parameter_storage_class = dict
req = MyRequest.from_values('/?foo=baz', headers={
'Cookie': 'foo=bar'
})
assert type(req.cookies) is dict
assert req.cookies == {'foo': 'bar'}
assert type(req.access_route) is list
assert type(req.args) is dict
assert type(req.values) is CombinedMultiDict
assert req.values['foo'] == 'baz'
req = wrappers.Request.from_values(headers={
'Cookie': 'foo=bar'
})
assert type(req.cookies) is ImmutableTypeConversionDict
assert req.cookies == {'foo': 'bar'}
assert type(req.access_route) is ImmutableList
MyRequest.list_storage_class = tuple
req = MyRequest.from_values()
assert type(req.access_route) is tuple
def test_response_headers_passthrough(self):
headers = wrappers.Headers()
resp = wrappers.Response(headers=headers)
assert resp.headers is headers
def test_response_304_no_content_length(self):
resp = wrappers.Response('Test', status=304)
env = create_environ()
assert 'content-length' not in resp.get_wsgi_headers(env)
def test_ranges(self):
# basic range stuff
req = wrappers.Request.from_values()
assert req.range is None
req = wrappers.Request.from_values(headers={'Range': 'bytes=0-499'})
assert req.range.ranges == [(0, 500)]
resp = wrappers.Response()
resp.content_range = req.range.make_content_range(1000)
assert resp.content_range.units == 'bytes'
assert resp.content_range.start == 0
assert resp.content_range.stop == 500
assert resp.content_range.length == 1000
assert resp.headers['Content-Range'] == 'bytes 0-499/1000'
resp.content_range.unset()
assert 'Content-Range' not in resp.headers
resp.headers['Content-Range'] = 'bytes 0-499/1000'
assert resp.content_range.units == 'bytes'
assert resp.content_range.start == 0
assert resp.content_range.stop == 500
assert resp.content_range.length == 1000
def test_auto_content_length(self):
resp = wrappers.Response('Hello World!')
assert resp.content_length == 12
resp = wrappers.Response(['Hello World!'])
assert resp.content_length is None
assert resp.get_wsgi_headers({})['Content-Length'] == '12'
def test_disabled_auto_content_length(self):
class MyResponse(wrappers.Response):
automatically_set_content_length = False
resp = MyResponse('Hello World!')
self.assert_(resp.content_length is None)
resp = MyResponse(['Hello World!'])
self.assert_(resp.content_length is None)
self.assert_('Content-Length' not in resp.get_wsgi_headers({}))
def test_location_header_autocorrect(self):
env = create_environ()
class MyResponse(wrappers.Response):
autocorrect_location_header = False
resp = MyResponse('Hello World!')
resp.headers['Location'] = '/test'
self.assert_equal(resp.get_wsgi_headers(env)['Location'], '/test')
resp = wrappers.Response('Hello World!')
resp.headers['Location'] = '/test'
self.assert_equal(resp.get_wsgi_headers(env)['Location'], 'http://localhost/test')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(WrappersTestCase))
return suite
|
[
"oriol@uri.cat"
] |
oriol@uri.cat
|
0e61b44d530b816a0aa5513e650f5a85bfbc5a73
|
1c37f33a0031c4caee8a6bb5e7f748ff147f1d23
|
/product/migrations/0005_variationmanager.py
|
cdf85bd944d895afdbe4b679da2c219ddc2bdccf
|
[] |
no_license
|
fikocavit/e-commerce
|
34003821de7e58b8554acffe98f22958b072aea5
|
0ae07653042677e107cbe2da43ae247848a281da
|
refs/heads/master
| 2023-05-02T18:53:45.995674
| 2021-05-26T09:56:34
| 2021-05-26T09:56:34
| 368,474,647
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
# Generated by Django 3.1 on 2021-05-21 09:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0004_auto_20210521_0843'),
]
operations = [
migrations.CreateModel(
name='VariationManager',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
]
|
[
"firat.cavit@hotmail.com"
] |
firat.cavit@hotmail.com
|
f1e92c9e31fb60a62c8c597e1bf458d31f4fce2b
|
08954e1a6405612aa7ee432f55210ad053127ccc
|
/test/test_parser.py
|
d4476a33e04f4ead9bd31c0c0068b84c6cbb0e8b
|
[
"MIT"
] |
permissive
|
burritojustice/xyz-qgis-plugin
|
d4260fe0faa853761387aae84475ef3b737dcbc2
|
37b7d84992155fe35d9578b58c9d74a198eccb40
|
refs/heads/master
| 2022-10-14T18:41:50.251108
| 2020-06-09T15:48:18
| 2020-06-10T14:51:38
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,230
|
py
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (c) 2019 HERE Europe B.V.
#
# SPDX-License-Identifier: MIT
#
###############################################################################
import json
import random
import numpy as np
from test.utils import (BaseTestAsync, TestFolder, format_long_args,
len_of_struct, len_of_struct_sorted, flatten,
format_map_fields)
from qgis.core import QgsFields, QgsVectorLayer
from qgis.testing import unittest
from XYZHubConnector.xyz_qgis.layer import parser
# import unittest
# class TestParser(BaseTestAsync, unittest.TestCase):
class TestParser(BaseTestAsync):
def __init__(self,*a,**kw):
super().__init__(*a,**kw)
self.similarity_threshold=80
######## Parse xyz geojson -> QgsFeature
def test_parse_xyzjson(self):
folder = "xyzjson-small"
fnames = [
"airport-xyz.geojson",
"water-xyz.geojson"
]
for fname in fnames:
self.subtest_parse_xyzjson(folder,fname)
def subtest_parse_xyzjson(self,folder,fname):
with self.subTest(folder=folder,fname=fname):
resource = TestFolder(folder)
txt = resource.load(fname)
obj = json.loads(txt)
obj_feat = obj["features"]
fields = QgsFields()
feat = [parser.xyz_json_to_feat(ft, fields) for ft in obj_feat]
self._assert_parsed_fields(obj_feat, feat, fields)
self._assert_parsed_geom(obj_feat, feat, fields)
def _assert_parsed_fields_unorder(self, obj_feat, feat, fields):
# self._log_debug(fields.names())
# self._log_debug("debug id, json vs. QgsFeature")
# self._log_debug([o["id"] for o in obj_feat])
# self._log_debug([ft.attribute(parser.QGS_XYZ_ID) for ft in feat])
names = fields.names()
self.assertTrue(parser.QGS_XYZ_ID in names,
"%s %s" % (len(names), names))
self.assertEqual( len(obj_feat), len(feat))
def _assert_parsed_fields(self, obj_feat, feat, fields):
self._assert_parsed_fields_unorder(obj_feat, feat, fields)
def msg_fields(obj):
return (
"{sep}{0}{sep}{1}"
"{sep}fields-props {2}"
"{sep}props-fields {3}"
"{sep}json {4}"
.format(*tuple(map(
lambda x: "%s %s" % (len(x), x), [
obj_props,
fields.names(),
set(fields.names()).difference(obj_props),
set(obj_props).difference(fields.names())
])),
format_long_args(json.dumps(obj)),
sep="\n>> ")
)
for o in obj_feat:
obj_props = list(o["properties"].keys())
self.assertLessEqual( len(obj_props), fields.size(), msg_fields(o))
self.assertTrue( set(obj_props) < set(fields.names()), msg_fields(o))
# self.assertEqual( obj_props, fields.names(), msg_fields(o)) # strict assert
def _assert_parsed_geom_unorder(self, obj_feat, feat, fields, geom_str):
for ft in feat:
geom = json.loads(ft.geometry().asJson()) # limited to 13 or 14 precison (ogr.CreateGeometryFromJson)
self.assertEqual(geom["type"], geom_str)
def _assert_parsed_geom(self, obj_feat, feat, fields):
# both crs is WGS84
for o, ft in zip(obj_feat, feat):
geom = json.loads(ft.geometry().asJson()) # limited to 13 or 14 precison (ogr.CreateGeometryFromJson)
obj_geom = o["geometry"]
self.assertEqual(geom["type"], obj_geom["type"])
id_ = ft.attribute(parser.QGS_XYZ_ID)
obj_id_ = o["id"]
self.assertEqual(id_, obj_id_)
# self._log_debug(geom)
# self._log_debug(obj_geom)
# coords = obj_geom["coordinates"]
# obj_geom["coordinates"] = [round(c, 13) for c in coords]
# obj_geom["coordinates"] = [float("%.13f"%c) for c in coords]
# self.assertDictEqual(geom, obj_geom) # precision
# for c1, c2 in zip(geom["coordinates"], obj_geom["coordinates"]):
# self.assertAlmostEqual(c1, c2, places=13)
c1 = np.array(obj_geom["coordinates"])
c2 = np.array(geom["coordinates"])
if c1.shape != c2.shape:
self._log_debug(
"\nWARNING: Geometry has mismatch shape",
c1.shape, c2.shape,
"\nOriginal geom has problem. Testing parsed geom..")
self.assertEqual(c2.shape[-1], 2,
"parsed geom has wrong shape of coord")
continue
else:
self.assertLess( np.max(np.abs(c1 - c2)), 1e-13,
"parsed geometry error > 1e-13")
# @unittest.skip("large")
def test_parse_xyzjson_large(self):
folder = "xyzjson-large"
fnames = [
"cmcs-osm-dev-building-xyz.geojson",
"cmcs-osm-dev-building-xyz-30000.geojson",
]
for fname in fnames:
self.subtest_parse_xyzjson(folder,fname)
######## Parse xyz geojson -> struct of geom: [fields], [[QgsFeature]]
def test_parse_xyzjson_map(self):
folder = "xyzjson-small"
fnames = [
"mixed-xyz.geojson",
]
for fname in fnames:
self.subtest_parse_xyzjson_map(folder,fname)
mix_fnames = [
"airport-xyz.geojson",
"water-xyz.geojson",
]
self.subtest_parse_xyzjson_mix(folder,mix_fnames)
def test_parse_xyzjson_map_similarity_0(self):
s = self.similarity_threshold
self.similarity_threshold = 0
try:
folder = "xyzjson-small"
fnames = [
"mixed-xyz.geojson",
]
for fname in fnames:
with self.subTest(folder=folder,fname=fname,
similarity_threshold=self.similarity_threshold):
map_fields = self._parse_xyzjson_map_simple(folder,fname)
self._assert_map_fields_similarity_0(map_fields)
finally:
self.similarity_threshold = s
def test_parse_xyzjson_map_dupe_case(self):
folder = "xyzjson-small"
fnames = [
"airport-xyz.geojson",
"water-xyz.geojson",
]
for fname in fnames:
self.subtest_parse_xyzjson_map_dupe_case(folder,fname)
def _parse_xyzjson_map_simple(self,folder,fname):
resource = TestFolder(folder)
txt = resource.load(fname)
obj = json.loads(txt)
return self.subtest_parse_xyzjson_map_chunk(obj)
def subtest_parse_xyzjson_map_dupe_case(self,folder,fname):
with self.subTest(folder=folder,fname=fname):
import random
mix_case = lambda txt, idx: "".join([
(s.lower() if s.isupper() else s.upper())
if i == idx else s
for i, s in enumerate(txt)])
new_feat = lambda ft, props: dict(ft, properties=dict(props))
n_new_ft = 2
with self.subTest(folder=folder,fname=fname):
resource = TestFolder(folder)
txt = resource.load(fname)
obj = json.loads(txt)
features = obj["features"]
features[0]["properties"].update(fid=1) # test fid
lst_k = list()
lst_new_k = list()
props_ = dict(obj["features"][0]["properties"])
props_ = sorted(props_.keys())
debug_msg = ""
for k in props_:
lst_k.append(k)
for i in range(n_new_ft):
ft = dict(features[0])
props = dict(ft["properties"])
new_k = k
while new_k == k:
idx = random.randint(0,len(k)-1)
if k == "fid": idx = i
new_k = mix_case(k, idx)
if new_k not in lst_new_k: lst_new_k.append(new_k)
debug_msg += format_long_args("\n", "mix_case", k, new_k, props[k], idx)
props[new_k] = props.pop(k) or ""
new_ft = new_feat(ft, props)
features.append(new_ft)
map_fields = self.subtest_parse_xyzjson_map_chunk(obj,chunk_size=1)
# assert that parser handle dupe of case insensitive prop name, e.g. name vs Name
self.assertEqual(len(map_fields),1, "not single geom")
lst_fields = list(map_fields.values())[0]
for k in lst_k:
self.assertIn(k, lst_fields[0].names())
# debug
debug_msg += format_long_args("\n", lst_fields[0].names())
for k, fields in zip(lst_new_k, lst_fields[1:]):
if k.lower() in {parser.QGS_ID, parser.QGS_XYZ_ID}:
k = "{}_{}".format(k,
"".join(str(i) for i, s in enumerate(k) if s.isupper()))
debug_msg += format_long_args("\n", k in fields.names(), k, fields.names())
# self.assertEqual(len(lst_fields), len(lst_new_k) + 1)
for k, fields in zip(lst_new_k, lst_fields[1:]):
if k.lower() in {parser.QGS_ID, parser.QGS_XYZ_ID}:
k = "{}_{}".format(k,
"".join(str(i) for i, s in enumerate(k) if s.isupper()))
self.assertIn(k, fields.names(),
"len lst_fields vs. len keys: %s != %s" %
(len(lst_fields), len(lst_new_k) + 1) +
debug_msg
)
def subtest_parse_xyzjson_map(self,folder,fname):
with self.subTest(folder=folder,fname=fname):
resource = TestFolder(folder)
txt = resource.load(fname)
obj = json.loads(txt)
self.subtest_parse_xyzjson_map_shuffle(obj)
self.subtest_parse_xyzjson_map_multi_chunk(obj)
def subtest_parse_xyzjson_mix(self,folder,fnames):
if len(fnames) < 2: return
with self.subTest(folder=folder, fname="mix:"+",".join(fnames)):
resource = TestFolder(folder)
lst_obj = [
json.loads(resource.load(fname))
for fname in fnames
]
obj = lst_obj[0]
for o in lst_obj[1:]:
obj["features"].extend(o["features"])
random.seed(0.1)
random.shuffle(obj["features"])
self.subtest_parse_xyzjson_map_shuffle(obj)
self.subtest_parse_xyzjson_map_multi_chunk(obj)
def subtest_parse_xyzjson_map_multi_chunk(self, obj, lst_chunk_size=None):
if not lst_chunk_size:
p10 = 1+len(str(len(obj["features"])))
lst_chunk_size = [10**i for i in range(p10)]
with self.subTest(lst_chunk_size=lst_chunk_size):
ref_map_feat, ref_map_fields = self.do_test_parse_xyzjson_map(obj)
lst_map_fields = list()
for chunk_size in lst_chunk_size:
map_fields = self.subtest_parse_xyzjson_map_chunk(obj, chunk_size)
if map_fields is None: continue
lst_map_fields.append(map_fields)
for map_fields, chunk_size in zip(lst_map_fields, lst_chunk_size):
with self.subTest(chunk_size=chunk_size):
self._assert_len_map_fields(
map_fields, ref_map_fields)
def subtest_parse_xyzjson_map_shuffle(self, obj, n_shuffle=5, chunk_size=10):
with self.subTest(n_shuffle=n_shuffle):
o = dict(obj)
ref_map_feat, ref_map_fields = self.do_test_parse_xyzjson_map(o)
lst_map_fields = list()
random.seed(0.5)
for i in range(n_shuffle):
random.shuffle(o["features"])
map_fields = self.subtest_parse_xyzjson_map_chunk(o, chunk_size)
if map_fields is None: continue
lst_map_fields.append(map_fields)
# self._log_debug("parsed fields shuffle", len_of_struct(map_fields))
for i, map_fields in enumerate(lst_map_fields):
with self.subTest(shuffle=i):
self._assert_len_map_fields(
map_fields, ref_map_fields)
def subtest_parse_xyzjson_map_chunk(self, obj, chunk_size=100):
similarity_threshold = self.similarity_threshold
with self.subTest(chunk_size=chunk_size, similarity_threshold=similarity_threshold):
o = dict(obj)
obj_feat = obj["features"]
lst_map_feat = list()
map_fields = dict()
for i0 in range(0,len(obj_feat), chunk_size):
chunk = obj_feat[i0:i0+chunk_size]
o["features"] = chunk
map_feat, _ = parser.xyz_json_to_feature_map(o, map_fields, similarity_threshold)
self._assert_parsed_map(chunk, map_feat, map_fields)
lst_map_feat.append(map_feat)
# self._log_debug("len feat", len(chunk))
# self._log_debug("parsed feat", len_of_struct(map_feat))
# self._log_debug("parsed fields", len_of_struct(map_fields))
lst_feat = flatten([x.values() for x in lst_map_feat])
self.assertEqual(len(lst_feat), len(obj["features"]))
return map_fields
def do_test_parse_xyzjson_map(self, obj, similarity_threshold=None):
obj_feat = obj["features"]
# map_fields=dict()
if similarity_threshold is None:
similarity_threshold = self.similarity_threshold
map_feat, map_fields = parser.xyz_json_to_feature_map(obj, similarity_threshold=similarity_threshold)
self._log_debug("len feat", len(obj_feat))
self._log_debug("parsed feat", len_of_struct(map_feat))
self._log_debug("parsed fields", len_of_struct(map_fields))
self._assert_parsed_map(obj_feat, map_feat, map_fields)
return map_feat, map_fields
def _assert_len_map_fields(self, map_fields, ref, strict=False):
len_ = len_of_struct if strict else len_of_struct_sorted
self.assertEqual(
len_(map_fields), len_(ref), "\n".join([
"map_fields, ref_map_fields",
format_map_fields(map_fields),
format_map_fields(ref),
])
)
def _assert_parsed_map(self, obj_feat, map_feat, map_fields):
self._assert_len_map_feat_fields(map_feat, map_fields)
self.assertEqual(len(obj_feat),
sum(len(lst)
for lst_lst in map_feat.values()
for lst in lst_lst),
"total len of parsed feat incorrect")
# NOTE: obj_feat order does not corresponds to that of map_feat
# -> use unorder assert
for geom_str in map_feat:
for feat, fields in zip(map_feat[geom_str], map_fields[geom_str]):
o = obj_feat[:len(feat)]
self._assert_parsed_fields_unorder(o, feat, fields)
self._assert_parsed_geom_unorder(o, feat, fields, geom_str)
obj_feat = obj_feat[len(feat):]
def _assert_len_map_feat_fields(self, map_feat, map_fields):
self.assertEqual(map_feat.keys(), map_fields.keys())
for geom_str in map_feat:
self.assertEqual(len(map_feat[geom_str]), len(map_fields[geom_str]),
"len mismatch: map_feat, map_fields" +
"\n %s \n %s" % (len_of_struct(map_feat), len_of_struct(map_fields))
)
def _assert_map_fields_similarity_0(self, map_fields):
fields_cnt = {k:len(lst_fields) for k, lst_fields in map_fields.items()}
ref = {k:1 for k in map_fields}
self.assertEqual(fields_cnt, ref,
"given similarity_threshold=0, " +
"map_fields should have exact 1 layer/fields per geom")
def test_parse_xyzjson_map_large(self):
folder = "xyzjson-large"
fnames = [
"cmcs-osm-dev-building-xyz.geojson",
"cmcs-osm-dev-road-xyz.geojson",
]
for fname in fnames:
self.subtest_parse_xyzjson_map(folder,fname)
######## Parse QgsFeature -> json
def test_parse_qgsfeature(self):
self.subtest_parse_qgsfeature("geojson-small","airport-qgis.geojson") # no xyz_id
def subtest_parse_qgsfeature(self,folder,fname):
# qgs layer load geojson -> qgs feature
# parse feature to xyz geojson
# compare geojson and xyzgeojson
with self.subTest(folder=folder,fname=fname):
resource = TestFolder(folder)
path = resource.fullpath(fname)
txt = resource.load(fname)
obj = json.loads(txt)
vlayer = QgsVectorLayer(path, "test", "ogr")
feat = parser.feature_to_xyz_json(list(vlayer.getFeatures()),is_new=True) # remove QGS_XYZ_ID if exist
self._log_debug(feat)
self.assertListEqual(obj["features"],feat)
self.assertEqual(len(obj["features"]),len(feat))
def test_parse_qgsfeature_large(self):
pass
if __name__ == "__main__":
# unittest.main()
tests = [
# "TestParser.test_parse_xyzjson",
"TestParser.test_parse_xyzjson_map_similarity_0",
# "TestParser.test_parse_xyzjson_map",
# "TestParser.test_parse_xyzjson_map_dupe_case",
# "TestParser.test_parse_xyzjson_large",
# "TestParser.test_parse_xyzjson_map_large",
]
# unittest.main(defaultTest = tests, failfast=True) # will not run all subtest
unittest.main(defaultTest = tests)
|
[
"16268924+minff@users.noreply.github.com"
] |
16268924+minff@users.noreply.github.com
|
f763e33920d9e9ed6b6427fa7aa3b468468add6b
|
ea8860ef1bde80ea7a9f087f67709918626c3220
|
/scrapyprojects/motube/motube/spiders/recursivetube.py
|
e0133cbe315cfa42d9247588324288c3a36bfb34
|
[] |
no_license
|
yorin/scripts
|
29db2d6d0737c508f9c16e0a04b820fc278e3733
|
100b18ec11861e42fcf8de2762d2f7ebe4d74e50
|
refs/heads/master
| 2020-06-04T00:36:18.592100
| 2019-05-15T13:16:22
| 2019-05-15T13:16:22
| 32,155,714
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 892
|
py
|
# -*- coding: utf-8 -*-
from scrapy.http import Request
from motube.items import MotubeItem
import scrapy
class RecursivetubeSpider(scrapy.Spider):
name = "recursivetube"
allowed_domains = ["youtube.com"]
start_urls = (
# 'http://www.youtube.com/',
'https://www.youtube.com/results?search_query=rush',
)
def start_requests(self):
for i in range(1,5):
yield Request('https://www.youtube.com/results?search_query=rush&page=' + str(i), self.parse_items)
print i
def parse_items(self, response):
sels = []
for sel in response.xpath('//div[@class="yt-lockup-content"]/h3[@class="yt-lockup-title"]'):
datafiles = MotubeItem()
datafiles["title"] = sel.xpath('a/@title').extract()
datafiles["url"] = sel.xpath('a/@href').extract()
sels.append(datafiles)
return sels
|
[
"werbyvillarino@gmail.com"
] |
werbyvillarino@gmail.com
|
300fb83211f3f1329e2de6d6bc5c4976fda3927d
|
82e02cee23da4c12a6e39bc6fc53760bfdd1d7a6
|
/bottle-test.py
|
3e488f3ad390feaada6ee09b21123c6f54c3a230
|
[
"MIT"
] |
permissive
|
hanky312/Gcoin-api
|
a40d78e0e7ed4ca1da8417741fa06bfdc56d9935
|
81572de95de45f4e46c9e50da5584855c41d6f54
|
refs/heads/master
| 2021-01-18T17:24:55.612132
| 2017-04-25T19:00:43
| 2017-04-25T19:00:43
| 86,796,596
| 6
| 1
| null | 2017-04-25T19:00:43
| 2017-03-31T08:29:24
|
Python
|
UTF-8
|
Python
| false
| false
| 11,179
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from bottle import route, request, run, response
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from datetime import datetime
import urllib,json
import pprint
import decimal, simplejson
#fix json type
class DecimalJSONEncoder(simplejson.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return str(o)
return super(DecimalJSONEncoder, self).default(o)
#./gcoin/gcoin.conf type rpcuser; rpcpassword; rpcport
RPC_USER = 'bitcoinrpc'
RPC_PASSWORD = '6SWniYid45ph9VFhVPepSzin2oJSsyepWiZKnJitZELD'
rpc_connection = AuthServiceProxy("http://%s:%s@localhost:58345"%(RPC_USER, RPC_PASSWORD))
LENGTH_CHECK = 64
#gcoin-cli listwalletaddress
@route('/listwalletaddress', method='GET')
def listwalletaddress():
listwalletaddress = rpc_connection.listwalletaddress()
response.add_header("Access-Control-Allow-Origin", "*")
return json.dumps(listwalletaddress)
#gcoin-cli getrawtransaction
@route('/tx/<tx_id>', method='GET')
def tx(tx_id=''):
if len(tx_id) != LENGTH_CHECK:
return "no tx_id "
raw_tx = rpc_connection.getrawtransaction(tx_id,1)
response.add_header("Access-Control-Allow-Origin", "*")
return raw_tx
#gcoin-cli sendrawtransaction
@route('/sendrawtransaction/<sendrawtransaction_data>', methon='GET')
def sendrawtransaction(sendrawtransaction_data=''):
sendrawtransaction = rpc_connection.sendrawtransaction(sendrawtransaction_data)
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": sendrawtransaction }
#gcoin-cli signrawtransaction
@route('/signrawtransaction/<signrawtransaction_data>', methon='GET')
def signrawtransaction(signrawtransaction_data=''):
signrawtransaction = rpc_connection.signrawtransaction(signrawtransaction_data)
response.add_header("Access-Control-Allow-Origin", "*")
return signrawtransaction
#gcoin-cli getblock
@route('/block/<block_hash>', method='GET')
def block(block_hash=''):
if len(block_hash) != LENGTH_CHECK:
return "no block_hash "
block_hash = rpc_connection.getblock(block_hash)
block_hash = simplejson.dumps(block_hash,cls=DecimalJSONEncoder)
response.add_header("Access-Control-Allow-Origin", "*")
return block_hash
#gcoin-cli getblockhash
@route('/blockindex/<block_hash_index>', method='GET')
def blockindex(block_hash_index=''):
block_hash = rpc_connection.getblockhash(int(block_hash_index))
block_hash = {'blockhash ' + str(block_hash_index) : block_hash}
response.add_header("Access-Control-Allow-Origin", "*")
return json.dumps(block_hash)
#gcoin-cli getfixedaddress
@route('/getfixedaddress', method='GET')
def getfixedaddressr():
getfixedaddress = rpc_connection.getfixedaddress()
response.add_header("Access-Control-Allow-Origin", "*")
return { "gcoin-cli getfixedaddress": getfixedaddress }
# return json.dumps(getfixedaddress)
#gcoin-cli getmemberlist
@route('/getmemberlist', method='GET')
def getmemberlist():
getmemberlist = rpc_connection.getmemberlist()
response.add_header("Access-Control-Allow-Origin", "*")
return getmemberlist
#gcoin-cli getinfo
@route('/getinfo', method='GET')
def getinfo():
getinfo = rpc_connection.getinfo()
response.add_header("Access-Control-Allow-Origin", "*")
return getinfo
#gcoin-cli getpeerinfo
@route('/getpeerinfo', method='GET')
def getpeerinfo():
getpeerinfo = rpc_connection.getpeerinfo()
response.add_header("Access-Control-Allow-Origin", "*")
return { "gcoin-cli getpeerinfo": getpeerinfo }
#gcoin-cli getnetworkinfo
@route('/getnetworkinfo', method='GET')
def getnetworkinfo():
getnetworkinfo = rpc_connection.getnetworkinfo()
response.add_header("Access-Control-Allow-Origin", "*")
return getnetworkinfo
#gcoin-cli getgenerate
@route('/getgenerate', method='GET')
def getgenerate():
getgenerate = rpc_connection.getgenerate()
response.add_header("Access-Control-Allow-Origin", "*")
return { "getgenerate": getgenerate}
#gcoin-cli setgenerate
#bug, unimplement
# @route('/setgenerate/<setgenerate_id>', method='GET')
# def setgenerate(setgenerate_id=''):
# setgenerate_id = setgenerate_id.lower()
# setgenerate = rpc_connection.setgenerate(setgenerate_id)
# response.add_header("Access-Control-Allow-Origin", "*")
# return { "setgenerate": setgenerate }
#gcoin-cli getminerlist
@route('/getminerlist', methos='GET')
def getminerlist():
getminerlist = rpc_connection.getminerlist()
response.add_header("Access-Control-Allow-Origin", "*")
return getminerlist
#gcoin-cli getmininginfo
@route('/getmininginfo', method='GET')
def getmininginfo():
getmininginfo = rpc_connection.getmininginfo()
response.add_header("Access-Control-Allow-Origin", "*")
return getmininginfo
#gcoin-cli getblockcount
@route('/getblockcount', method='GET')
def getblockcount():
getblockcount = rpc_connection.getblockcount()
getblockcount = {'blockcount':getblockcount}
response.add_header("Access-Control-Allow-Origin", "*")
return json.dumps(getblockcount)
#gcoin-cli mint
@route('/mint/<mint_amount>/<mint_color>', method='GET')
def mint(mint_amount='',mint_color=''):
mint = rpc_connection.mint(int(mint_amount),int(mint_color))
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": mint }
#gcoin-cli mintforminer
@route('/mintforminer', method='GET')
def mintforminer():
mintforminer = rpc_connection.mintforminer()
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": mintforminer }
#gcoin-cli mintforlicense
@route('/mintforlicense', method='GET')
def mintforlicense():
mintforlicense = rpc_connection.mintforlicense()
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": mintforlicense }
#gcoin-cli getbalance
@route('/getbalance', method='GET')
def getbalance():
getbalance = rpc_connection.getbalance()
response.add_header("Access-Control-Allow-Origin", "*")
return getbalance
#gcoin-cli getlicenseinfo
@route('/getlicenseinfo/<getlicenseinfo_index>', method='GET')
def getlicenseinfo(getlicenseinfo_index=''):
getlicenseinfo = rpc_connection.getlicenseinfo(int(getlicenseinfo_index))
# getlicenseinfo = {'getlicenseinfo ' + str(getlicenseinfo_index) : getlicenseinfo}
response.add_header("Access-Control-Allow-Origin", "*")
return getlicenseinfo
# return { "gcoin-cli getlicenseinfo": getlicenseinfo }
#gcoin-cli getlicenselist
@route('/getlicenselist', method='GET')
def getlicenselist():
getlicenselist = rpc_connection.getlicenselist()
response.add_header("Access-Control-Allow-Origin", "*")
return getlicenselist
#gcoin-cli sendlicensetoaddress
#license_comment decode tool https://hanky312.github.io/gcoin-encoder/
license_comment = "721101000547636f696e0547636f696e22314e39534650686f6d63794e466352526e5347746335654433354c445741515671560100000000000000000000000022314e39534650686f6d63794e466352526e5347746335654433354c44574151567156000000000000000000000000000a472d636f696e2e6f72670000000000000000000000000000000000000000000000000000000000000000"
@route('/sendlicensetoaddress/<sendlicensetoaddress_addr>/<sendlicensetoaddress_color>', method='GET')
def sendlicensetoaddress(sendlicensetoaddress_addr='',sendlicensetoaddress_color=''):
sendlicensetoaddress = rpc_connection.sendlicensetoaddress(str(sendlicensetoaddress_addr),int(sendlicensetoaddress_color),license_comment)
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": sendlicensetoaddress }
#gcoin-cli sendtoaddress
@route('/sendtoaddress/<address>/<amount>/<color>', method='GET')
def sendtoaddress(address='',amount='',color=''):
sendtoaddress = rpc_connection.sendtoaddress(str(address),int(amount),int(color))
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": sendtoaddress }
#gcoin-cli sendfrom
@route('/sendfrom/<from_address>/<to_adddress>/<amount>/<color>', methos='GET')
def sendfrom(from_address='',to_adddress='',amount='',color=''):
sendfrom = rpc_connection.sendfrom(str(from_address),str(to_adddress),int(amount),int(color))
response.add_header("Access-Control-Allow-Origin", "*")
return { "tx_id": sendfrom }
#gcoin-cli getaddressbalance
@route('/getaddressbalance/<address>', method='GET')
def getaddressbalance(address=''):
getaddressbalance = rpc_connection.getaddressbalance(str(address))
response.add_header("Access-Control-Allow-Origin", "*")
return getaddressbalance
#gcoin-cli importaddress
@route('/importaddress/<address>', method='GET')
def importaddress(address=''):
importaddress = rpc_connection.importaddress(str(address))
response.add_header("Access-Control-Allow-Origin", "*")
return { "importaddress": address }
#gcoin-cli importprivkey
@route('/importprivkey/<privkey>', methon='GET')
def importprivkey(privkey=''):
importprivkey = rpc_connection.importprivkey(str(privkey))
response.add_header("Access-Control-Allow-Origin", "*")
return { "importprivkey": privkey }
#gcoin-cli dumpprivkey
@route('/dumpprivkey/<address>', methon='GET')
def dumpprivkey(address=''):
dumpprivkey = rpc_connection.dumpprivkey(str(address))
response.add_header("Access-Control-Allow-Origin", "*")
return { "privkey": dumpprivkey }
#gcoin-cli gettxoutaddress
@route('/gettxoutaddress/<gettxoutaddress_address>', method='GET')
def gettxoutaddress(gettxoutaddress_address=''):
gettxoutaddress = rpc_connection.gettxoutaddress(str(gettxoutaddress_address))
response.add_header("Access-Control-Allow-Origin", "*")
return { "gettxoutaddress": gettxoutaddress }
#gcoin-cli validateaddress
@route('/validateaddress/<validateaddress_address>', method='GET')
def validateaddress(validateaddress_address=''):
validateaddress = rpc_connection.validateaddress(str(validateaddress_address))
response.add_header("Access-Control-Allow-Origin", "*")
return validateaddress
#callback_url & save to .txt
@route ('/testcallback', method='POST')
def testcallback(UpdatedData=None):
Data = request.body.read()
fileDT = datetime.now().strftime('%Y%m%d_%H%M%S')
with open(fileDT+'.txt', 'a') as f:
f.write(Data)
print Data
return Data
#
@route ('/notify/<tx_id>', method='POST')
def tagTweets(tx_id=None,UpdatedData=None):
response.content_type = 'application/json'
Data = json.load(request.body)
id = request.json['id']
tx_hash = request.json['tx_hash']
confirmation_count = request.json['confirmation_count']
callback_url = request.json['callback_url']
created_time = request.json['created_time']
return Data
run(host='0.0.0.0',port=8091,debug='true')
|
[
"hsnu109128@gmail.com"
] |
hsnu109128@gmail.com
|
f8ce478e09b28dbecf8e4a33bca3cd6def745e32
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_241/ch5_2020_03_04_20_03_29_973202.py
|
3d805dd5f639c6b24268fa29be5f1fa3398ea0f6
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 113
|
py
|
libras_para_kg=float(input("Quantas Quilogramas: ")
print("libras:libras_para_kg*0,453592")
|
[
"you@example.com"
] |
you@example.com
|
0fdae3788745958c642dc3b1fd5772fdbe2b9965
|
4d1415832ff283f8bbb655f356af333a921d96cb
|
/jasmin/vendor/smpp/pdu/error.py
|
5f207ed06eb0aea08a81843b05b50b28ea23e6a2
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
DomAmato/jasmin
|
a7d437632127d67b33dd3c980b548b7639b871fa
|
a813b8be5722eb7b5d8ac3dfbd0749441ac9df01
|
refs/heads/master
| 2021-05-24T12:16:32.070490
| 2020-04-06T22:20:01
| 2020-04-06T22:20:09
| 253,556,310
| 2
| 0
|
NOASSERTION
| 2020-04-28T20:23:33
| 2020-04-06T16:34:50
|
Python
|
UTF-8
|
Python
| false
| false
| 2,742
|
py
|
"""
Copyright 2009-2010 Mozes, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from jasmin.vendor.smpp.pdu import constants
class SMPPError(Exception):
"""Base class for SMPP errors
"""
class SMPPClientError(SMPPError):
"""Raised for client-side errors
"""
class SMPPClientConnectionCorruptedError(SMPPClientError):
"""Raised when operations are attempted after the client has received corrupt data
"""
class SMPPClientSessionStateError(SMPPClientError):
"""Raised when illegal operations are attempted for the client's session state
"""
class SMPPTransactionError(SMPPError):
"""Raised for transaction errors
"""
def __init__(self, response, request=None):
self.response = response
self.request = request
SMPPError.__init__(self, self.getErrorStr())
def getErrorStr(self):
errCodeName = str(self.response.status)
errCodeVal = constants.command_status_name_map[errCodeName]
errCodeDesc = constants.command_status_value_map[errCodeVal]
return '%s (%s)' % (errCodeName, errCodeDesc)
class SMPPGenericNackTransactionError(SMPPTransactionError):
"""Raised for transaction errors that return generic_nack
"""
class SMPPRequestTimoutError(SMPPError):
"""Raised for timeout waiting waiting for response
"""
class SMPPSessionInitTimoutError(SMPPRequestTimoutError):
"""Raised for timeout waiting waiting for response
"""
class SMPPProtocolError(SMPPError):
"""Raised for SMPP protocol errors
"""
def __init__(self, errStr, commandStatus):
self.status = commandStatus
SMPPError.__init__(self, "%s: %s" % (self.getStatusDescription(), errStr))
def getStatusDescription(self):
intVal = constants.command_status_name_map[str(self.status)]
return constants.command_status_value_map[intVal]['description']
class SessionStateError(SMPPProtocolError):
"""Raise when illegal operations are received for the given session state
"""
class PDUParseError(SMPPProtocolError):
"""Parent class for PDU parsing errors
"""
class PDUCorruptError(PDUParseError):
"""Raised when a complete PDU cannot be read from the network
"""
|
[
"tritux.com"
] |
tritux.com
|
494c54dfc73428bb8c69d8883fe9c32c72090571
|
1d4f00932367743c48b26ebf000fab5fdec48495
|
/django_leve_five/settings.py
|
d0c8f66648eb1e3897732959b5ce6850a4b7d27b
|
[] |
no_license
|
sabrina0331/django-deployment-ex
|
d53c723349c8782aca241199a53dd1a41fbb0194
|
5bac965d1af8898994edb582e905f59fe5f7db12
|
refs/heads/master
| 2020-09-14T22:16:43.413517
| 2019-11-21T22:16:00
| 2019-11-21T22:16:00
| 223,275,128
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,769
|
py
|
"""
Django settings for django_leve_five project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR,'templates')
STATIC_DIR = os.path.join(BASE_DIR,'static')
MEDIA_DIR = os.path.join(BASE_DIR,'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7jr*wvw*=jex8gd2cr16^-w#*#2+o(k8$rk36-!y(f1^i_*q_p'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'basic_app',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_leve_five.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR,],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_leve_five.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS':{'min_length':9}
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [STATIC_DIR,]
#media
MEDIA_ROOT = MEDIA_DIR
MEDIA_URL = '/media/'
#login
LOGIN_URL = '/basic_app/user_login'
|
[
"sabrina20520@gmail.com"
] |
sabrina20520@gmail.com
|
68b6abf0c1278b35d7a549089e37e1033e234db4
|
897cc05268f904f3b2faa2f08201087436ac72ab
|
/Messenger/Developers/venv/bin/django-admin
|
3a5a01fe363b45db3f74b7c2348ba83ad86f3aec
|
[] |
no_license
|
alexkim1218/CSE442-Developers
|
513a76ac794b5863bd26beb50f1bde495eae2b08
|
2e027003fcf6c7f5cb4a42fcbed0b3f9b85d2ab3
|
refs/heads/master
| 2020-05-17T10:35:04.993192
| 2018-12-03T16:57:59
| 2018-12-03T16:57:59
| 183,660,437
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 313
|
#!/Users/HuangweiDing/PycharmProjects/Developers/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
|
[
"seyaduab@buffalo.edu"
] |
seyaduab@buffalo.edu
|
|
2e02246308602c745a87fe2f038ce541667b86f6
|
6739f850c08414905a95d804f85acf3c61fad956
|
/pysimt/metrics/cer.py
|
5a44a017b743bc32cf872dca9cfdafc276c0bcac
|
[
"MIT"
] |
permissive
|
welvin21/pysimt
|
940eaaec340e3942fdfdebace2a640506459c6c6
|
6250b33dc518b3195da4fc9cc8d32ba7ada958c0
|
refs/heads/master
| 2023-09-04T18:27:14.294636
| 2021-10-17T15:35:33
| 2021-10-17T15:35:33
| 418,076,867
| 0
| 0
|
MIT
| 2021-10-17T09:21:02
| 2021-10-17T09:21:01
| null |
UTF-8
|
Python
| false
| false
| 2,109
|
py
|
"""Character error rate (CER)."""
from typing import Iterable, Union, Optional
import editdistance
from .metric import Metric
class CERScorer:
"""Computes the character error rate (CER) metric and returns a `Metric`
object.
Args:
refs: List of reference text files. Only the first one will be used
hyps: Either a string denoting the hypotheses' filename, or
a list that contains the hypotheses strings themselves
language: unused
lowercase: unused
"""
def compute(self, refs: Iterable[str],
hyps: Union[str, Iterable[str]],
language: Optional[str] = None,
lowercase: bool = False) -> Metric:
if isinstance(hyps, str):
# hyps is a file
hyp_sents = open(hyps).read().strip().split('\n')
elif isinstance(hyps, list):
hyp_sents = hyps
# refs is a list, take its first item
with open(refs[0]) as f:
ref_sents = f.read().strip().split('\n')
assert len(hyp_sents) == len(ref_sents), "CER: # of sentences does not match."
n_ref_chars = 0
n_ref_tokens = 0
dist_chars = 0
dist_tokens = 0
for hyp, ref in zip(hyp_sents, ref_sents):
hyp_chars = hyp.split(' ')
ref_chars = ref.split(' ')
n_ref_chars += len(ref_chars)
dist_chars += editdistance.eval(hyp_chars, ref_chars)
# Convert char-based sentences to token-based ones
hyp_tokens = hyp.replace(' ', '').replace('<s>', ' ').strip().split(' ')
ref_tokens = ref.replace(' ', '').replace('<s>', ' ').strip().split(' ')
n_ref_tokens += len(ref_tokens)
dist_tokens += editdistance.eval(hyp_tokens, ref_tokens)
cer = (100 * dist_chars) / n_ref_chars
wer = (100 * dist_tokens) / n_ref_tokens
verbose_score = "{:.3f}% (n_errors = {}, n_ref_chars = {}, WER = {:.3f}%)".format(
cer, dist_chars, n_ref_chars, wer)
return Metric('CER', cer, verbose_score, higher_better=False)
|
[
"ozancag@gmail.com"
] |
ozancag@gmail.com
|
7a6ed2e5960052212101319e62ebc462b40124b7
|
006bf92faff4950b13b2830924f3a723972e6d45
|
/bot.py
|
227ede349eaf781f3f4d24b40b0bddfb665afd02
|
[] |
no_license
|
NotThareesh/Pybot
|
7910942a29279029a7365985f5da9fb5927e7231
|
12bdd159d8307ceb504f7f52d541542c010665e3
|
refs/heads/main
| 2023-04-24T07:29:25.121851
| 2021-05-17T16:03:49
| 2021-05-17T16:03:49
| 333,765,373
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
import discord
import os
from discord.ext import commands
intents = discord.Intents(messages=True, guilds=True, reactions=True, members=True, presences=True)
client = commands.Bot(command_prefix="!", intents=intents, help_command=None)
TOKEN = ''
@client.command()
async def load(ctx, extension):
client.load_extension(f"cogs.{extension}")
@client.command()
async def unload(ctx, extension):
client.unload_extension(f"cogs.{extension}")
for filename in os.listdir("./cogs"):
if filename.endswith(".py"):
client.load_extension(f"cogs.{filename[:-3]}")
client.run(TOKEN)
|
[
"thareesh.prabakaran@gmail.com"
] |
thareesh.prabakaran@gmail.com
|
ed46e132c7a54dfeeebb3287a3a61a345d35061c
|
3aab4e1db47f51ef99b94c7f26d3f3788b5cef3c
|
/mrtandam-ica-code/ensemble/src/eca_launch_mapreduce.py
|
6dae3f6fb8487f92e9a6fb4c482abc9c54f93fb2
|
[
"Apache-2.0"
] |
permissive
|
baimingze/project1
|
8f73bc5e3af151335c7ee97b583914423d927269
|
6c9cd57bf1d413645bd47785f2ca597d97f9df22
|
refs/heads/master
| 2021-01-18T14:33:48.211413
| 2015-07-15T20:56:58
| 2015-07-15T20:56:58
| 38,840,952
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,530
|
py
|
#!/opt/local/bin/python
# script for launching ensemble learning jobs in Amazon Elastic Map Reduce
# Copyright (C) 2010 Insilicos LLC All Rights Reserved
# Original Authors Jeff Howbert, Natalie Tasman, Brian Pratt
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
# general idea is to launch a framework R script which sources a configurable
# script that contains the various bits of mapreduce code
#
# expected file layout when this all runs:
# <your bucket>
# <your bucket>/<path_to_trainingDataFile>
# <your bucket>/<path_to_testDataFile>
# <your bucket>/<baseNameFromConfigFile>
# <your bucket>/<baseNameFromConfigFile>/<timestamp>/ (the "job directory")
# <your bucket>/<baseNameFromConfigFile>/<timestamp>/<configFile>
# <your bucket>/<baseNameFromConfigFile>/<timestamp>/<scriptFile>
# <your bucket>/<baseNameFromConfigFile>/<timestamp>/results/<mapReduce results file(s)>
import sys
import os.path
import boto.ec2
import boto.s3
import boto.emr
from boto.emr import BootstrapAction
from boto.ec2.regioninfo import RegionInfo
from boto.emr.step import StreamingStep
from boto.emr.connection import EmrConnection
from boto.s3.connection import S3Connection
from boto.s3.bucketlistresultset import BucketListResultSet
import eca_launch_helper as eca # functions commont to RMPI and MapReduce versions
from boto.s3.key import Key
import simplejson as json
from time import sleep
import datetime
eca.loadConfig("mapreduce") # get config as directed by commandline, mapreduce style
jobDir = eca.getCoreJobDir() # gets baseName, or umbrella name for multi-config batch job
jobDirS3 = eca.S3CompatibleString(jobDir)
syspath=os.path.dirname(sys.argv[0])
if (""==syspath) :
syspath = os.getcwd()
syspath = syspath.replace("\\","/") # tidy up any windowsy slashes
eca.setCoreConfig("mapReduceFrameworkScript", eca.getConfig( "mapReduceFrameworkScript",syspath+"/eca_mapreduce_framework.R"))
eca.setCoreConfig("frameworkSupportScript", eca.getConfig( "frameworkSupportScript",syspath+"/eca_common_framework.R"))
# are we running on AWS Elastic MapReduce? (could be a generic hadoop cluster, instead)
if ( eca.runAWS() ) :
aws_access_key_id = eca.getConfig( "aws_access_key_id" )
aws_secret_access_key = eca.getConfig( "aws_secret_access_key" )
aws_region = eca.getConfig( "aws_region" )
aws_placement = eca.getConfig( "aws_placement", required=False ) # sub-region
aws_region = RegionInfo(name=eca.getConfig( "aws_region" ),endpoint=eca.getConfig( "ec2_endpoint",'elasticmapreduce.amazonaws.com' ))
conn = boto.emr.EmrConnection(region=aws_region,aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
else :
conn = eca.HadoopConnection()
head_instance_type = eca.getConfig( "ec2_head_instance_type" )
client_instance_type = eca.getConfig( "ec2_client_instance_type" )
# optional: name of existing EC2 keypair for SSH to head node
ec2KeyPair = eca.getConfig( "RSAKeyName", required=False )
# prepare a list of files to be copied from S3 to where the clients can access them
if (eca.runLocal()) :
eca.setCoreConfig("sharedDir",jobDir + "/")
elif ( eca.runAWS() ) :
bucketName = eca.S3CompatibleString(eca.getConfig("s3bucketID" ),isBucketName=True) # enforce bucket naming rules
bucketURL = "s3n://"+bucketName
# directory for passing large key values in files
eca.setCoreConfig("sharedDir","/mnt/var/lib/hadoop/dfs/")
s3conn = S3Connection(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key)
s3bucket = s3conn.create_bucket( bucketName )
k = Key(s3bucket)
else :
bucketName = 'hdfs://%s' % eca.getHadoopDir()
bucketURL = bucketName
k = eca.HadoopConnection()
# write the framework and implementation scripts to
# per-job directory as a matter of record
frameworkScriptPath = eca.getConfig( "mapReduceFrameworkScript")
frameworkSupportScriptPath = eca.getConfig( "frameworkSupportScript")
mapReduceScriptPath = eca.getConfig( "scriptFileName" )
baseName = eca.getConfig( "baseName" )
configName = '%s.cfg.r' % baseName
if ( not eca.runLocal() ) :
frameworkScriptName = eca.S3CompatibleString(os.path.basename(frameworkScriptPath))
k.key = '%s/%s' % ( jobDirS3 , frameworkScriptName )
k.set_contents_from_filename(frameworkScriptPath)
eca.makeFileExecutable(k.key)
frameworkSupportScriptName = eca.S3CompatibleString(os.path.basename(frameworkSupportScriptPath))
k.key = '%s/%s' % ( jobDirS3 , frameworkSupportScriptName )
eca.setCoreConfig( "frameworkSupportScript", frameworkSupportScriptName) # use the version without path info
k.set_contents_from_filename(frameworkSupportScriptPath)
scriptName = eca.S3CompatibleString(os.path.basename(mapReduceScriptPath))
k.key = '%s/%s' % ( jobDirS3 , scriptName )
k.set_contents_from_filename(mapReduceScriptPath)
# now we can refer to these without a path
eca.setCoreConfig( "mapReduceFrameworkScript", frameworkScriptName )
eca.setCoreConfig( "frameworkSupportScript", frameworkSupportScriptName)
eca.setCoreConfig( "scriptFileName", scriptName)
configName = os.path.basename(configName)
configCache = eca.constructCacheFileReference( bucketName , jobDirS3 , configName )
frameworkCache = eca.constructCacheFileReference( bucketName , jobDirS3 , frameworkScriptName)
scriptCache = eca.constructCacheFileReference( bucketName , jobDirS3 , scriptName )
scriptSupportCache = eca.constructCacheFileReference( bucketName , jobDirS3 , frameworkSupportScriptName )
cachefiles = [ configCache, frameworkCache, scriptCache, scriptSupportCache ]
# create a job step to copy data from S3 to HDFS
copierInputFile = '%s/copier-input-values' % jobDirS3
copierCommands = ""
# go through the config parameters, anything named "sharedFile_*" gets uploaded
# to S3 with a gzip preference
for n in range(-1,len(eca.cfgStack)) :
for cfgKey,val in eca.selectConfig(n).iteritems():
if (cfgKey.startswith("sharedFile_")):
fullLocalPath = eca.my_abspath( val ) # convert relative path to absolute
eca.setConfig( cfgKey, fullLocalPath)
# do the upload to S3
s3FileList = [(cfgKey, n, "", True)]
eca.uploadToS3(s3FileList) # side effect: after this call config speaks of data files in terms of S3
# and set up for copying S3 files out to HDFS
hdfsPath = "hdfs:///home/hadoop/"
hdfsname = hdfsPath+os.path.basename(eca.getConfig(cfgKey))
hadoopCopyCmd = "hadoop dfs -cp "
# prepare a list of copy commands to be passed out to mappers
cmd = '%s %s%s %s\n' % ( hadoopCopyCmd, bucketURL, eca.getConfig(cfgKey), hdfsname )
if not cmd in copierCommands :
copierCommands = copierCommands + cmd
eca.setConfig(cfgKey,hdfsname)
k.key = copierInputFile
k.set_contents_from_string(copierCommands)
# are we planning a spot bid instead of demand instances?
spotBid = eca.getConfig("spotBid","")
if ("" != spotBid) :
if ('%' in spotBid) : # a percentage, eg "25%" or "25%%"
spotBid = eca.calculateSpotBidAsPercentage( spotBid, client_instance_type, 0.20 ) # about 20% more for EMR instances
launchgroup = "ECA"+eca.getConfig( "baseName" ) +"_"+eca.getConfig("jobTimeStamp")
else :
launchgroup = ""
eca.setCoreConfig("launchgroup",launchgroup)
# mapper keys are random seeds
# there is only one reducer key
# mapper input file is just a list of integers 0 through (ensembleSize-1)
mapperInputFile = '%s/mapper-input-values' % jobDirS3
mapperInputs = ""
for count in range(int(eca.getConfig( "ensembleSize" ))) :
mapperInputs = mapperInputs + str(count) + "\n"
eca.saveStringToFile(mapperInputs,mapperInputFile)
# write parameters to for the record (after removing security info)
eca.scrubAndPreserveJobConfig( '%s/%s' % ( jobDirS3 , configName ) )
# and now execute
if (eca.runLocal()) :
# execute the package installer script
packageInstallerScriptText=eca.create_R_package_loader_script(eca.getConfig("scriptFileName"))
eca.setCoreConfig("packageInstaller", '%s/%s.installpackages.r' % ( jobDirS3, configName ))
eca.saveStringToFile(packageInstallerScriptText, eca.getConfig("packageInstaller"))
cmd = "Rscript " + eca.getConfig("packageInstaller")
eca.log( "run: " + cmd )
os.system(cmd)
configName = '%s/%s' % ( jobDirS3 , configName )
for n in range(0,len(eca.cfgStack)) :
eca.selectConfig(n)
resultsFilename=eca.getConfig("resultsFilename")
subCfgName = eca.getConfig("eca_uniqueName")
mapResults=resultsFilename+"."+subCfgName+".map"
redResults=resultsFilename+"."+subCfgName+".red"
mapper = "Rscript %s mapper %s %s %s" % (frameworkScriptPath,mapReduceScriptPath,configName,subCfgName)
if (resultsFilename != "") :
mapper=mapper+" 2>"+mapResults # capture logging on stderr
reducer = "Rscript %s reducer %s %s %s" % (frameworkScriptPath,mapReduceScriptPath,configName,subCfgName)
if (resultsFilename != "") :
reducer=reducer+" >"+redResults +" 2>&1" # capture logging on stderr as well as results on stdout
cmd = "cat " + mapperInputFile + " | " + mapper + " | sort | " + reducer
eca.log("run: "+ cmd)
os.system(cmd)
wait = 1
if (resultsFilename != "") :
os.system("cat "+mapResults +" >> "+resultsFilename) # combine mapper and reducer logs
os.system("cat "+redResults +" >> "+resultsFilename) # combine mapper and reducer logs
os.system("cat "+mapResults) # display mapper logs
os.system("cat "+redResults) # display reducer logs
os.system("rm "+mapResults) # delete mapper log
os.system("rm "+redResults) # delete reducer log
else :
# bootstrap actions to customize EMR image for our purposes - no need to run on master
bootstrapText = '#!/bin/bash\n'
if ("True" == eca.getConfig("update_EMR_R_install","False")) :
# get latest R (creaky old 2.7 is default on EMR)
bootstrapText = bootstrapText + '# select a random CRAN mirror\n'
bootstrapText = bootstrapText + 'mirror=$(sudo Rscript -e "m=getCRANmirrors(all = TRUE) ; m[sample(1:dim(m)[1],1),4]" | cut -d "\\"" -f 2)\n'
bootstrapText = bootstrapText + 'echo "deb ${mirror}bin/linux/debian lenny-cran/" | sudo tee -a /etc/apt/sources.list\n'
bootstrapText = bootstrapText + '# hose out old pre-2.10 R packages\n'
bootstrapText = bootstrapText + 'rpkgs="r-base r-base-dev r-recommended"\n'
bootstrapText = bootstrapText + 'sudo apt-get remove --yes --force-yes r-cran-* r-base* $rpkgs\n'
bootstrapText = bootstrapText + '# install fresh R packages\n'
bootstrapText = bootstrapText + 'sudo apt-get update\nsudo apt-get -t lenny-cran install --yes --force-yes $rpkgs\n'
# and make sure any packages mentioned in the user script are present
bootstrapText = bootstrapText + 'cat >/tmp/installPackages.R <<"EndBlock"\n'
bootstrapText = bootstrapText + eca.create_R_package_loader_script(eca.getConfig("scriptFileName"))
bootstrapText = bootstrapText + "EndBlock\nsudo Rscript /tmp/installPackages.R\nexit $?\n"
bootstrapFile = "bootstrap.sh"
eca.debug("writing AWS EMR bootstrap script to %s" % bootstrapFile)
k.key = '%s/%s' % ( jobDirS3, bootstrapFile )
k.set_contents_from_string(bootstrapText)
bootstrapActionInstallRPackages = BootstrapAction("install R packages",'s3://elasticmapreduce/bootstrap-actions/run-if', ['instance.isMaster!=true','s3://%s/%s' % (bucketName, k.key)])
copierScript = '%s copier' % ( frameworkScriptName )
mapperScript = '%s mapper %s %s' % ( frameworkScriptName , scriptName, configName )
reducerScript = '%s reducer %s %s' % ( frameworkScriptName , scriptName, configName )
# write results here
eca.log("scripts, config and logs will be written to %s/%s" % (bucketURL,jobDirS3))
# tell Hadoop to run just one reducer task, and set mapper task count in hopes of giving reducer equal resources
nodecount = int(eca.getConfig( "numberOfClientNodes", eca.getConfig( "numberOfNodes", 0 ) )) # read old style as well as new
if (nodecount < 1) :
nodecount = 1 # 0 client nodes means something in RMPI, but not Hadoop
mapTasksPerClient = int(eca.getConfig("numberOfRTasksPerClient"))
nmappers = (nodecount*mapTasksPerClient)-1 # -1 so reducer gets equal resources
stepArgs = ['-jobconf','mapred.task.timeout=1200000','-jobconf','mapred.reduce.tasks=1','-jobconf','mapred.map.tasks=%d' % nmappers]
workstepsStack = []
for n in range(0,len(eca.cfgStack)) :
worksteps = []
if ((0==n) and eca.runAWS()) :
# specify a streaming (stdio-oriented) step to copy data files from S3
copierStep = boto.emr.StreamingStep( name = '%s-copyDataFromS3toHDFS' % baseName,
mapper = '%s' % (copierScript),
reducer = 'NONE',
cache_files = cachefiles,
input = '%s/%s' % (bucketURL, copierInputFile),
output = '%s/%s/copierStepResults' % (bucketURL, jobDir),
step_args = ['-jobconf','mapred.task.timeout=1200000']) # double the std timeout for file transfer
worksteps.extend( [copierStep] )
eca.selectConfig(n)
eca.setConfig("completed",False,noPostSaveWarn=True)
subCfgName = eca.getConfig("eca_uniqueName")
eca.setConfig("resultsDir", '%s/%s' % (jobDir,subCfgName),noPostSaveWarn=True)
# specify a streaming (stdio-oriented) step
if (baseName == subCfgName) :
stepname = baseName
else :
stepname = '%s-%s' % (baseName, subCfgName)
workstep = boto.emr.StreamingStep( name = stepname,
mapper = '%s %s' % (mapperScript, subCfgName),
reducer = '%s %s' % (reducerScript, subCfgName),
cache_files = cachefiles,
input = '%s/%s' % (bucketURL, mapperInputFile),
output = '%s/%s' % (bucketURL, eca.getConfig("resultsDir")),
step_args = stepArgs)
worksteps.extend([workstep])
workstepsStack.extend([worksteps])
# and run the job
keepalive = ("True" == eca.getConfig("keepHead","False"))
if ( keepalive ) :
failure_action = 'CANCEL_AND_WAIT'
else :
failure_action = 'TERMINATE_JOB_FLOW'
if ("" != spotBid) :
from boto.emr.instance_group import InstanceGroup # spot EMR is post-2.0 stuff - 2.1rc2 is known to work
launchGroup = eca.getConfig("launchgroup")
instanceGroups = [
InstanceGroup(1, 'MASTER', head_instance_type, 'SPOT', 'master-%s' % launchGroup, spotBid),
InstanceGroup(nodecount, 'CORE', client_instance_type, 'SPOT', 'core-%s' % launchGroup, spotBid)
]
jf_id = conn.run_jobflow(name = baseName,
log_uri='s3://%s/%s' % (bucketName, jobDir),
ec2_keyname=ec2KeyPair,
action_on_failure=failure_action,
keep_alive=keepalive,
instance_groups=instanceGroups,
enable_debugging=("False"==eca.getConfig("noDebugEMR","False")),
steps=workstepsStack[0],
bootstrap_actions=[bootstrapActionInstallRPackages])
else :
jf_id = conn.run_jobflow(name = baseName,
log_uri='s3://%s/%s' % (bucketName, jobDir),
ec2_keyname=ec2KeyPair,
action_on_failure=failure_action,
keep_alive=keepalive,
master_instance_type=head_instance_type,
slave_instance_type=client_instance_type,
enable_debugging=("False"==eca.getConfig("noDebugEMR","False")),
num_instances=(nodecount+1), # +1 for master
steps=workstepsStack[0],
bootstrap_actions=[bootstrapActionInstallRPackages])
for n in range(1,len(workstepsStack)) : # adding all multi-config steps at once can overwhelm boto
conn.add_jobflow_steps(jf_id,workstepsStack[n])
wait = 10 # much less than this and AWS gets irritated and throttles you back
lastState = ""
while True:
jf = conn.describe_jobflow(jf_id)
if (lastState != jf.state) : # state change
eca.log_no_newline("cluster status: "+jf.state)
lastState = jf.state
else :
eca.log_progress() # just put a dot
for n in range(0,len(eca.cfgStack)) :
eca.selectConfig(n)
if (not eca.getConfig("completed")) :
# grab the results
concat = ""
mask = '%s/part-' % eca.getConfig("resultsDir")
eca.debug("checking %s"%mask)
if ( eca.runAWS() ) :
for part in BucketListResultSet(s3bucket, prefix=mask) :
# all results in one string
k.key = part
concat = concat + k.get_contents_as_string()
else : # hadoop
k.key = mask+"*"
concat = k.get_contents_as_string()
if (len(concat) > 0) :
eca.log("Done. Results:")
eca.log(concat)
# write to file?
resultsFilename=eca.getConfig("resultsFilename")
if (resultsFilename != "") :
f = open(resultsFilename,"w+")
f.write(concat)
f.close()
eca.log('results also written to %s' % resultsFilename)
eca.setConfig("completed",True,noPostSaveWarn=True)
lastState = '' # just to provoke reprint of state on console
if lastState == 'COMPLETED':
break
if lastState == 'FAILED':
break
if lastState == 'TERMINATED':
break
sleep(wait)
eca.log_close()
|
[
"mingze@localhost.localdomain"
] |
mingze@localhost.localdomain
|
d91951bb7e3d019589001109b06f2d1214f43ea3
|
fa4401ec3a3e6077566c5e142567172cd2232b71
|
/sg2/game1.py
|
a529c3ec3a0eaaf506b09bc868d2d0903acf9f80
|
[] |
no_license
|
wizardkeven/SeriousGameV2
|
c5eb2591a6a158aec52b641b7885d31cf53ae73c
|
deae6e8c6b70487ff66c9c1e4d61321ed33f450d
|
refs/heads/master
| 2021-01-19T11:05:22.818776
| 2015-04-07T23:05:18
| 2015-04-07T23:05:18
| 29,533,780
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,832
|
py
|
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.clock import Clock
from kivy.core.audio import SoundLoader
import time
class Object(Widget):
"""Class which represent the object on the screen
"""
def on_touch_move(self, touch):
"""Function called when the object is moved
:param touch: finger on the screen
"""
if touch.grab_current is self:
self.center_x = touch.x
self.center_y = touch.y
def on_touch_down(self, touch):
"""Function called when the object is pressed
:param touch: finger on the screen
"""
if self.collide_point(*touch.pos):
if touch.is_double_tap:
sound = SoundLoader.load(self.text)
sound.play()
return;
self.opacity = 0.2
touch.grab(self)
return True
def on_touch_up(self, touch):
"""Function called when the object is released
:param touch: finger on the screen
"""
if touch.grab_current is self:
self.center_x = touch.x
self.center_y = touch.y
self.opacity = 1
touch.ungrab(self)
return True
class Game1(Widget):
"""Class to manage the game itself
"""
def update(self, dt):
"""Game loop
:param dt: time between two calls
"""
pass
def on_winning(self, touch):
pass
class Game1App(App):
"""Class to launch the game 1
"""
def build(self):
"""Function which constructs the game
"""
game = Game1()
Clock.schedule_interval(game.update, 1.0 / 60.0)
return game
if __name__ == '__main__':
Game1App().run()
|
[
"wizardkeven@live.com"
] |
wizardkeven@live.com
|
f0058d3d6a1df1097582e384bb22a5d06725cbb7
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/467/usersdata/282/111931/submittedfiles/Av2_Parte2.py
|
f4d68cd5e055d09012f4f459c82a6e8816d004ca
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 141
|
py
|
# -*- coding: utf-8 -*-
a=[]
qa=int(input('Digite o numero de elementos de a: '))
b=[]
qb=int(input('Digite o numero de elementos de b: '))
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
8c82f3f4edeb1917ad66c06c4ad474a27f497bc4
|
58cb93acfdb489dc5b7b75773a4f554e3da554ee
|
/waifuforsale/settings.py
|
13205387591b2a50dd7128af1819038163f57c26
|
[] |
no_license
|
Rlexsu/waifusite
|
7c3949267e1be8c612abccffbbc4a7b136346581
|
1fe3f8a0df926b1fa7d3180205734e47a885f5ce
|
refs/heads/master
| 2021-01-20T14:28:35.661253
| 2017-05-09T04:36:22
| 2017-05-09T04:36:22
| 90,613,462
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,290
|
py
|
"""
Django settings for waifuforsale project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ra7yo28k77uwzy8p^wfty=z^t2_jnvqx3t8$@8#azi&u=mqe@a'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'waifusite',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'waifuforsale.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'waifuforsale.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'mydb',
'USER': 'osboxes',
'PASSWORD': '911009',
'HOST': 'localhost',
'PORT': '69834',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kuala_Lumpur'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
|
[
"alex.teoh.91@gmail.com"
] |
alex.teoh.91@gmail.com
|
4648638b49ed86db25fc2b2d037d25916a5b6282
|
92f057d40a83332983279d1b3902593e0540e2f5
|
/competitions/migrations/0001_initial.py
|
714aba3b90137f528d73c98c1873d51b9b5b796b
|
[] |
no_license
|
frmblanco/my-project
|
1a8e3e47d047f5283ebe4268a359b2e1499f184d
|
2e658b2c7060ae0ea75e351033248c04b5b3cebd
|
refs/heads/master
| 2021-07-20T08:43:31.759548
| 2017-10-24T08:24:24
| 2017-10-24T08:24:24
| 107,520,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 678
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-19 08:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Competition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('pub_date', models.DateTimeField(blank=True, null=True)),
],
),
]
|
[
"francisco.blanco@opinno.com"
] |
francisco.blanco@opinno.com
|
a381417460e3b250868a66c482b42631651416ad
|
52e5cd27efb30cd3466e3937fe57ce560f5d56b6
|
/5/5-6/main.py
|
e76c228d88609be9d06a8a0debd4d00027f75c5d
|
[] |
no_license
|
justice3120/opencv_sample
|
25afc2b0877775793f2ae91199f7a642491f28d6
|
7720bd6331758a66d91e46c62cd2114cf194a9f8
|
refs/heads/master
| 2021-01-18T19:30:26.145170
| 2016-11-24T04:16:04
| 2016-11-24T04:16:04
| 72,085,288
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,791
|
py
|
from PIL import Image
import sys
from graph import build_graph, segment_graph
from smooth_filter import gaussian_grid, filter_image
from random import random
from numpy import sqrt
import numpy as np
import cv2
def diff_rgb(img, x1, y1, x2, y2):
r = (img[0][x1, y1] - img[0][x2, y2]) ** 2
g = (img[1][x1, y1] - img[1][x2, y2]) ** 2
b = (img[2][x1, y1] - img[2][x2, y2]) ** 2
return sqrt(r + g + b)
def diff_grey(img, x1, y1, x2, y2):
v = (img[x1, y1] - img[x2, y2]) ** 2
return sqrt(v)
def threshold(size, const):
return (const / size)
def generate_image(forest, width, height):
random_color = lambda: (int(random()*255), int(random()*255), int(random()*255))
colors = [random_color() for i in xrange(width*height)]
img = Image.new('RGB', (width, height))
im = img.load()
for y in xrange(height):
for x in xrange(width):
comp = forest.find(y * width + x)
im[x, y] = colors[comp]
return img.transpose(Image.ROTATE_270).transpose(Image.FLIP_LEFT_RIGHT)
if __name__ == '__main__':
if len(sys.argv) != 6:
print 'Invalid number of arguments passed.'
print 'Correct usage: python main.py sigma neighborhood K min_comp_size input_file'
else:
neighbor = int(sys.argv[2])
if neighbor != 4 and neighbor!= 8:
print 'Invalid neighborhood choosed. The acceptable values are 4 or 8.'
print 'Segmenting with 4-neighborhood...'
tmp_img = cv2.imread(sys.argv[5])
h, w = tmp_img.shape[:2]
tmp_img = cv2.resize(tmp_img, (int(w / 4), int(h / 4)))
image_file = Image.fromarray(tmp_img, "RGB")
sigma = float(sys.argv[1])
K = float(sys.argv[3])
min_size = int(sys.argv[4])
size = image_file.size
print 'Image info: ', image_file.format, size, image_file.mode
grid = gaussian_grid(sigma)
if image_file.mode == 'RGB':
image_file.load()
r, g, b = image_file.split()
r = filter_image(r, grid)
g = filter_image(g, grid)
b = filter_image(b, grid)
smooth = (r, g, b)
diff = diff_rgb
else:
smooth = filter_image(image_file, grid)
diff = diff_grey
graph = build_graph(smooth, size[1], size[0], diff, neighbor == 8)
forest = segment_graph(graph, size[0]*size[1], K, min_size, threshold)
image = generate_image(forest, size[1], size[0])
print 'Number of components: %d' % forest.num_sets
cv2.namedWindow('Result')
cv2.imshow('Result', np.array(image))
cv2.namedWindow('Canny')
cv2.imshow('Canny', cv2.Canny(np.array(image), 100, 10))
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"masayoshi.sakamoto@dena.com"
] |
masayoshi.sakamoto@dena.com
|
85f779834d336ce10a4a871e05d86f674c49d738
|
6c51f665a21bde3e2c10f068e8a741a62c8ec9e2
|
/nexfil.py
|
6f916d6cf5b042a2764a67c68a19293e6c80f676
|
[
"MIT"
] |
permissive
|
ActorExpose/nexfil
|
0676bc9b7c719fa98280921359f21e63bb3fa9d5
|
7630b8ee9f7b626c2dbb0f58247afa4c70bbddd4
|
refs/heads/main
| 2023-05-27T06:53:29.801617
| 2021-05-22T21:59:43
| 2021-05-22T21:59:43
| 369,937,090
| 1
| 0
|
MIT
| 2021-05-23T01:23:10
| 2021-05-23T01:23:10
| null |
UTF-8
|
Python
| false
| false
| 13,136
|
py
|
#!/usr/bin/env python3
version = '1.0.0'
R = '\033[31m' # red
G = '\033[32m' # green
C = '\033[36m' # cyan
W = '\033[0m' # white
Y = '\033[33m' # yellow
import argparse
parser = argparse.ArgumentParser(description=f'nexfil - Find social media profiles on the web | v{version}')
parser.add_argument('-u', help='Specify username', type=str)
parser.add_argument('-d', help='Specify DNS Servers [Default : 1.1.1.1]', type=str, nargs='+')
parser.add_argument('-f', help='Specify a file containing username list', type=str)
parser.add_argument('-l', help='Specify multiple comma separated usernames', type=str)
parser.add_argument('-t', help='Specify timeout [Default : 20]', type=int)
parser.add_argument('-v', help='Prints version', action='store_true')
parser.set_defaults(
d=['1.1.1.1'],
t=20,
v=False
)
args = parser.parse_args()
uname = args.u
dns = args.d
ulist = args.l
fname = args.f
tout = args.t
vers = args.v
if vers == True:
print(dns, type(dns))
print(uname, type(uname))
print(version)
exit()
if uname == None and ulist == None and fname == None:
print(f'{R}[-] {C}Please provide {Y}one {C}of the following : \n\t{C}* {Y}username [-u]\n\t{C}* {Y}comma separated usernames [-l]\n\t{C}* {Y}file containing list of usernames [-f]{W}')
exit()
if uname != None:
mode = 'single'
if len(uname) > 0:
if uname.isspace():
print(f'{R}[-] {C}Username Missing!{W}')
exit()
else:
pass
else:
print(f'{R}[-] {C}Username Missing!{W}')
exit()
elif fname != None:
mode = 'file'
elif ulist != None:
mode = 'list'
tmp = ulist
if ',' not in tmp:
print(f'{R}[-] {C}Invalid Format!{W}')
exit()
else:
ulist = tmp.split(',')
else:
pass
print(f'{G}[+] {C}Importing Modules...{W}')
import socket
import asyncio
import aiohttp
import tldextract
from json import loads
from datetime import datetime
from requests import get, exceptions
from os import getenv, path, makedirs
gh_version = ''
twitter_url = ''
discord_url = ''
found = []
codes = [200, 301, 302, 403, 405, 410, 418, 500]
home = getenv('HOME')
loc_data = home + '/.local/share/nexfil/dumps/'
def fetch_meta():
global gh_version, twitter_url, discord_url
try:
rqst = get('https://raw.githubusercontent.com/thewhiteh4t/nexfil/master/metadata.json', timeout=5)
sc = rqst.status_code
if sc == 200:
metadata = rqst.text
json_data = loads(metadata)
gh_version = json_data['version']
twitter_url = json_data['twitter']
discord_url = json_data['discord']
else:
with open('metadata.json', 'r') as metadata:
json_data = loads(metadata.read())
gh_version = json_data['version']
twitter_url = json_data['twitter']
discord_url = json_data['discord']
except Exception as exc:
print(f'\n{R}[-] {C}Exception : {W}{str(exc)}')
with open('metadata.json', 'r') as metadata:
json_data = loads(metadata.read())
gh_version = json_data['version']
twitter_url = json_data['twitter']
discord_url = json_data['discord']
def banner():
banner = r'''
__ _ _____ _ _ _____ _____ _
| \ | |____ \___/ |____ | |
| \_| |____ _/ \_ | __|__ |_____'''
print(f'{G}{banner}{W}\n')
print(f'{G}[>] {C}Created By : {W}thewhiteh4t')
print(f'{G} |---> {C}Twitter : {W}{twitter_url}')
print(f'{G} |---> {C}Discord : {W}{discord_url}')
print(f'{G}[>] {C}Version : {W}{version}\n')
async def clout(url):
global found
found.append(url)
url = str(url)
ext = tldextract.extract(url)
dom = str(ext.domain)
suf = str(ext.suffix)
orig = f'{dom}.{suf}'
cl_dom = f'{Y}{dom}.{suf}{C}'
url = url.replace(orig, cl_dom)
print(f'{G}[+] {C}{url}{W}')
async def query(session, url, test, data, uname):
try:
if test == 'method':
await test_method(session, url)
elif test == 'string':
await test_string(session, url, data)
elif test == 'redirect':
await test_redirect(session, url)
elif test == 'api':
data = data.format(uname)
await test_api(session, url, data)
elif test == 'alt':
data = data.format(uname)
await test_alt(session, url, data)
else:
response = await session.head(url, allow_redirects=True)
if response.status in codes:
if test == None:
await clout(response.url)
elif test == 'url':
await test_url(response.url)
elif test == 'subdomain':
await test_sub(url, response.url)
else:
pass
elif response.status == 404 and test == 'method':
await test_method(session, url)
elif response.status != 404:
print(f'{R}[-] {Y}[{url}] {W}[{response.status}]')
else:
pass
except asyncio.exceptions.TimeoutError:
print(f'{Y}[!] Timeout :{C} {url}{W}')
except Exception as exc:
print(f'{Y}[!] Exception [query] [{url}] :{W} {str(exc)}')
async def test_method(session, url):
try:
response = await session.get(url, allow_redirects=True)
if response.status != 404:
await clout(response.url)
else:
pass
except asyncio.exceptions.TimeoutError:
print(f'{Y}[!] Timeout :{C} {url}{W}')
except Exception as exc:
print(f'{Y}[!] Exception [test_method] [{url}] :{W} {exc}')
return
async def test_url(url):
url = str(url)
proto = url.split('://')[0]
ext = tldextract.extract(url)
subd = ext.subdomain
if subd != '':
base_url = proto + '://' + subd + '.' + ext.registered_domain
else:
base_url = proto + '://' + ext.registered_domain
if url.endswith('/') == False and base_url.endswith('/') == True:
if url + '/' != base_url:
await clout(url)
else:
pass
elif url.endswith('/') == True and base_url.endswith('/') == False:
if url != base_url + '/':
await clout(url)
else:
pass
elif url != base_url:
await clout(url)
else:
pass
async def test_sub(url, resp_url):
if url == str(resp_url):
await clout(url)
else:
pass
async def test_string(session, url, data):
try:
response = await session.get(url)
if response.status == 404:
pass
elif response.status not in codes:
print(f'{R}[-] {Y}[{url}] {W}[{response.status}]')
else:
resp_body = await response.text()
if data in resp_body:
pass
else:
await clout(response.url)
except asyncio.exceptions.TimeoutError:
print(f'{Y}[!] Timeout :{C} {url}{W}')
return
except Exception as exc:
print(f'{Y}[!] Exception [test_string] [{url}] :{W} {exc}')
return
async def test_api(session, url, endpoint):
try:
response = await session.get(endpoint)
if response.status != 404:
resp_body = loads(await response.text())
if len(resp_body) != 0:
tmp_vars = ['results', 'users', 'username']
for var in tmp_vars:
try:
if resp_body.get(var) != None:
if len(resp_body[var]) != 0:
await clout(url)
return
else:
pass
else:
pass
except:
pass
else:
pass
else:
pass
except Exception as exc:
print(f'{Y}[!] Exception [test_api] [{url}] :{W} {exc}')
return
async def test_alt(session, url, alt_url):
try:
response = await session.get(alt_url, allow_redirects=False)
if response.status != 200:
pass
else:
await clout(url)
except Exception as exc:
print(f'{Y}[!] Exception [test_alt] [{url}] :{W} {str(exc)}')
return
async def test_redirect(session, url):
try:
response = await session.head(url, allow_redirects=False)
except asyncio.exceptions.TimeoutError:
print(f'{Y}[!] Timeout :{C} {url}{W}')
return
except Exception as exc:
print(f'{Y}[!] Exception [test_redirect] [{url}] :{W} {str(exc)}')
return
try:
location = response.headers['Location']
if url != location:
pass
else:
await clout(url)
except KeyError:
await clout(url)
def autosave(uname, ulist, mode, found, start_time, end_time):
if not path.exists(loc_data):
makedirs(loc_data)
else:
pass
if mode == 'single':
filename = f'{uname}_{str(int(datetime.now().timestamp()))}.txt'
username = uname
elif mode == 'list' or mode == 'file':
filename = f'session_{str(int(datetime.now().timestamp()))}.txt'
username = ulist
else:
pass
with open(loc_data + filename, 'w') as outfile:
outfile.write(f'nexfil v{version}\n')
outfile.write(f'Username : {username}\n')
outfile.write(f'Start Time : {start_time.strftime("%c")}\n')
outfile.write(f'End Time : {end_time.strftime("%c")}\n')
outfile.write(f'Total Profiles Found : {len(found)}\n\n')
outfile.write(f'URLs : \n\n')
for url in found:
outfile.write(f'{url}\n')
outfile.write(f'{"-" * 40}\n')
print(f'{G}[+] {C}Saved : {W}{loc_data + filename}')
async def main(uname):
tasks = []
print(f'\n{G}[+] {C}Target :{W} {uname}\n')
headers = {
'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:88.0) Gecko/20100101 Firefox/88.0'
}
resolver = aiohttp.AsyncResolver(nameservers=dns)
timeout = aiohttp.ClientTimeout(total=tout)
conn = aiohttp.TCPConnector(
limit=0,
family=socket.AF_INET,
ssl=False,
resolver=resolver
)
print(f'{Y}[!] Finding Profiles...{W}\n')
async with aiohttp.ClientSession(connector=conn, headers=headers, timeout=timeout) as session:
for block in urls_json:
curr_url = block['url'].format(uname)
test = block['test']
data = block['data']
task = asyncio.create_task(query(session, curr_url, test, data, uname))
tasks.append(task)
await asyncio.gather(*tasks)
def netcheck():
print(f'\n{G}[+] {C}Checking Connectivity...{W}')
try:
rqst = get('https://github.com/', timeout=5)
if rqst.status_code == 200:
pass
else:
print(f'{Y}[!] {C}Status : {W}{rqst.status_code}')
except exceptions.ConnectionError:
print(f'{R}[-] {C}Connection Error! Exiting.{W}')
exit()
def launch(uname):
loop = asyncio.new_event_loop()
loop.run_until_complete(main(uname))
loop.run_until_complete(asyncio.sleep(0))
loop.close()
try:
netcheck()
fetch_meta()
banner()
print(f'{Y}[!] Loading URLs...{W}')
with open('url_store.json', 'r') as url_store:
raw_data = url_store.read()
urls_json = loads(raw_data)
print(f'{G}[+] {W}{len(urls_json)} {C}URLs Loaded!{W}')
print(f'{G}[+] {C}Timeout : {W}{tout} secs')
print(f'{G}[+] {C}DNS Servers : {W}{dns}')
start_time = datetime.now()
if mode == 'single':
launch(uname)
elif mode == 'list':
for uname in ulist:
ulist[ulist.index(uname)] = uname.strip()
launch(uname)
elif mode == 'file':
ulist = []
try:
with open(fname, 'r') as wdlist:
tmp = wdlist.readlines()
for user in tmp:
ulist.append(user.strip())
for uname in ulist:
uname = uname.strip()
launch(uname)
except Exception as exc:
print(f'{Y}[!] Exception [file] :{W} {str(exc)}')
exit()
else:
pass
end_time = datetime.now()
delta = end_time - start_time
if mode == 'single':
print(f'\n{G}[+] {C}Lookup for {Y}{uname} {C}completed in {W}{delta}')
print(f'\n{G}[+] {Y}{len(found)} {C}Possible Profiles Found for {Y}{uname}{W}')
elif mode == 'list' or mode == 'file':
print(f'\n{G}[+] {C}Lookup for {Y}{ulist} {C}completed in {W}{delta}')
print(f'\n{G}[+] {Y}{len(found)} {C}Possible Profiles Found for {Y}{ulist}{W}')
if len(found) != 0:
autosave(uname, ulist, mode, found, start_time, end_time)
else:
pass
except KeyboardInterrupt:
print(f'{R}[-] {C}Keyboard Interrupt.{W}')
exit()
|
[
"lohityapushkar@gmail.com"
] |
lohityapushkar@gmail.com
|
2dc3ec4af49c857ff67a051334b7be5cbb9dd6ba
|
927b50cdaf1c384c8bbf6f13816d0ba465852fd8
|
/main/migrations/0002_auto_20201128_0813.py
|
f86867def1d360053603e5adf8c185ee104522d0
|
[
"MIT"
] |
permissive
|
jhabarsingh/DOCMED
|
f37d336483cffd874b0a7db43677c08a47bd639c
|
8a831886d3dd415020699491687fb73893e674c5
|
refs/heads/main
| 2023-04-26T06:45:10.409633
| 2021-05-19T14:37:53
| 2021-05-19T14:37:53
| 316,683,855
| 3
| 5
|
MIT
| 2021-02-21T13:32:33
| 2020-11-28T07:51:22
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 1,061
|
py
|
# Generated by Django 2.0 on 2020-11-28 08:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='doctor',
name='blood_group',
field=models.CharField(blank=True, choices=[('A+', 'A+ Type'), ('A-', 'A- Type'), ('B+', 'B+ Type'), ('B-', 'B- Type'), ('AB+', 'AB+ Type'), ('AB+', 'AB- Type'), ('O+', 'O+ Type'), ('O-', 'O- Type')], max_length=3, null=True),
),
migrations.AlterField(
model_name='doctor',
name='is_working',
field=models.NullBooleanField(),
),
migrations.AlterField(
model_name='patient',
name='blood_group',
field=models.CharField(blank=True, choices=[('A+', 'A+ Type'), ('A-', 'A- Type'), ('B+', 'B+ Type'), ('B-', 'B- Type'), ('AB+', 'AB+ Type'), ('AB+', 'AB- Type'), ('O+', 'O+ Type'), ('O-', 'O- Type')], max_length=4, null=True),
),
]
|
[
"jhabarsinghbhati23@gmail.com"
] |
jhabarsinghbhati23@gmail.com
|
ae4cc4603c1a22255a40e5fe082d194ec34583d6
|
23c8baaec2c190c1e9c818485a720f7e2c77a79f
|
/Array/334.py
|
7afbcb861e5c5223e7d7b99ca1fda015325dfd55
|
[] |
no_license
|
ViVaHa/Leetcode
|
ca5bc3f157b7802b68d47fe001b0adaa605eb792
|
9e4d39576633a83a5e647cfa3ea62ce4e840e642
|
refs/heads/master
| 2020-03-22T20:25:31.755179
| 2018-11-22T17:52:35
| 2018-11-22T17:52:35
| 140,598,785
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 423
|
py
|
class Solution:
def increasingTriplet(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
if len(nums)==0:
return False
m1=sys.maxsize
m2=sys.maxsize
for num in nums:
if num<=m1:
m1=num
elif num<=m2:
m2=num
else:
return True
return False
|
[
"varshath@Vishnus-MBP.fios-router.home"
] |
varshath@Vishnus-MBP.fios-router.home
|
2c3a1f89467d4632951f38ea7f7fe18fe7d23094
|
286f95d4a0b2b0e082a92a4361ad3f6513705943
|
/functional-programming/map.py
|
dce4c4e09c2de4b1e4bfb2e6fa4afc63aef5ce02
|
[] |
no_license
|
flashjaysan/python-projects
|
5aafffba313d308def867b33e4ebb95152672780
|
a95b24a083d50f376a808d846f42751622ac2274
|
refs/heads/main
| 2023-06-11T17:42:49.205624
| 2021-06-22T16:04:40
| 2021-06-22T16:04:40
| 362,723,330
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,039
|
py
|
import collections
Scientist = collections.namedtuple('Scientist', ['name', 'field', 'born', 'nobel'])
scientists = (
Scientist(name='Ada Lovelace', field='math', born=1815, nobel=False),
Scientist(name='Emmy Noether', field='math', born=1882, nobel=False),
Scientist(name='Marie Curie', field='physics', born=1867, nobel=True),
Scientist(name='Tu Youyou', field='chemistry', born=1930, nobel=True),
Scientist(name='Ada Yonath', field='chemistry', born=1939, nobel=True),
Scientist(name='Vera Rubin', field='astronomy', born=1928, nobel=False),
Scientist(name='Sally Ride', field='physics', born=1951, nobel=False),
)
# crée un iterator qui applique la fonction à chaque élément de l'iterable
scientists_name_and_age = map(lambda scientist: (scientist.name, 2021 - scientist.born), scientists)
# utilise l'iterator pour afficher chaque nouvel élément généré
for scientist in scientists_name_and_age:
print(scientist)
print(next(scientists_name_and_age)) # provoque une exception StopITeration
|
[
"jaysan@wanadoo.fr"
] |
jaysan@wanadoo.fr
|
1c4458211f04b61d65360a91f24938a79f071603
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-gsn-edf/gsn-edf_ut=2.0_rd=0.5_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=43/params.py
|
0c4e8626f8dbb25ae1a1eaa2f0fe59307cd289fe
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
{'cpus': 4,
'duration': 30,
'final_util': '2.017881',
'max_util': '2.0',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 43,
'utils': 'uni-medium-3'}
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
487f7f1a66033de858a171cdecf6bc646c75145e
|
35ec90ff2b7c22cd67ff5ef35381f8beb49c5ce4
|
/venv/lib/python2.7/abc.py
|
f85089e6de1967d796cf1235936822db08ee4e82
|
[] |
no_license
|
xiawei918/driplet
|
518ad11290b0d2a9b2f4c96bbab4a1ac38edb724
|
46329631cf82ccf404278ba7d73cf73497486234
|
refs/heads/master
| 2023-06-21T11:29:52.006897
| 2023-06-17T04:42:52
| 2023-06-17T04:42:52
| 178,310,764
| 0
| 1
| null | 2019-03-29T14:54:03
| 2019-03-29T01:38:59
|
Python
|
UTF-8
|
Python
| false
| false
| 43
|
py
|
/Users/weixia/anaconda/lib/python2.7/abc.py
|
[
"weixia@WEIs-MacBook-Pro.local"
] |
weixia@WEIs-MacBook-Pro.local
|
68038ebf4d1e402773f4dae40fcac9450c4c51bb
|
ce63cd114213a8debe7874d7904379cedc51ab65
|
/Utils/tflite_schema/CustomOptionsFormat.py
|
fbaad184b926a00a7bd4e4124a81aa07d8e0f60b
|
[
"Apache-2.0"
] |
permissive
|
sandeepkadiyam/HumanActivityRecognition-in-Bangle.js
|
4a901fa709e26f48813b17cc4ec31dfa13019e91
|
a174c080d9541d76f177b8655cd033be1d013ec1
|
refs/heads/main
| 2023-07-05T00:34:55.331509
| 2021-09-03T07:24:52
| 2021-09-03T07:24:52
| 389,747,734
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 154
|
py
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
class CustomOptionsFormat(object):
FLEXBUFFERS = 0
|
[
"noreply@github.com"
] |
noreply@github.com
|
f02dec514ce9a82d6c5ff24abbd45981bfda669f
|
cd7722c0c75513cc896320a73a2eac960f2f340d
|
/custom_components/mosenergosbyt/sensor.py
|
a90662d061be73c2d389713bea3d706f22ebf75e
|
[] |
no_license
|
kkuryshev/ha_mosenergosbyt
|
faeab92571b45f8c1a7c531375e8ebac592eacc6
|
dfd9e35520e956ae0651b1e922ff91e0702d01eb
|
refs/heads/master
| 2022-09-13T04:05:32.377587
| 2020-05-31T22:10:59
| 2020-05-31T22:10:59
| 268,356,051
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,896
|
py
|
"""Platform for sensor integration."""
from homeassistant.helpers.entity import Entity
from .const import DOMAIN
import logging
from homeassistant.const import CONF_NAME
from datetime import datetime
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
if discovery_info is None:
return
client = hass.data[DOMAIN]
meter_list = discovery_info.items()
if not meter_list:
return
entities = []
for meter in meter_list:
sensor = MosenergoSensor(
client,
meter[0]
)
entities.append(sensor)
_LOGGER.debug(f'Счетчики мосэнергосбыт добавлены {entities}')
async_add_entities(entities, update_before_add=True)
class MosenergoSensor(Entity):
"""Representation of a Sensor."""
def __init__(self, client, meter_id):
"""Initialize the sensor."""
self.client = client
self._device_class = 'power'
self._unit = 'kw'
self._icon = 'mdi:speedometer'
self._available = True
self._name = meter_id
self._state = None
self.meter_id = meter_id
self.update_time = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if self._state:
return self._state.last_measure.nm_status
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return 'кв'
@property
def unique_id(self) -> str:
"""Return a unique identifier for this entity."""
return f"mosenergosbyt_{self.name}"
@property
def device_state_attributes(self):
if self._state:
measure = self._state.last_measure
attributes = {
'nn_ls': self._state.nn_ls,
'nm_provider': self._state.nm_provider,
'nm_ls_group_full': self._state.nm_ls_group_full,
'dt_pay': measure.dt_pay,
'nm_status': measure.nm_status,
'sm_pay': measure.sm_pay,
'dt_meter_installation': measure.dt_meter_installation,
'dt_indication': measure.dt_indication,
'nm_description_take': measure.nm_description_take,
'nm_take': measure.nm_take,
'nm_t1': measure.nm_t1,
'nm_t2': measure.nm_t2,
'nm_t3': measure.nm_t3,
'pr_zone_t1': measure.pr_zone_t1,
'pr_zone_t2': measure.pr_zone_t2,
'pr_zone_t3': measure.pr_zone_t3,
'vl_t1': measure.vl_t1,
'vl_t2': measure.vl_t2,
'vl_t3': measure.vl_t3,
'refresh_date': self.update_time,
'nn_days': self._state.nn_days,
'vl_debt': self._state.vl_debt,
'vl_balance': self._state.vl_balance
}
return attributes
async def async_update(self):
self._state, self.update_time = await self.async_fetch_state()
@property
def should_poll(self):
"""No need to poll. Coordinator notifies entity of updates."""
return False
async def async_fetch_state(self):
try:
_LOGGER.debug('получение данных с портала по счетчикам')
meter_list = await self.client.fetch_data()
if not meter_list:
return
for item in meter_list.values():
if item.nn_ls == self.meter_id:
return item, datetime.now()
except BaseException:
_LOGGER.exception('ошибка получения состояния счетчиков с портала')
|
[
"kkurishev@gmail.com"
] |
kkurishev@gmail.com
|
f662d955f122cb26eb9042c12234d75832957de7
|
0007ba97130140d0b9d608ece9879323c6dc5f85
|
/53.py
|
bf6019caef1bcaedc7713c5b111bd521a6f0a775
|
[] |
no_license
|
Ashgomathi/ash
|
d0a4fb79fc8b15bb286d19afc121671a0ca8b79c
|
83c879c570e8abc261069574ee671ddee042664a
|
refs/heads/master
| 2020-06-10T10:32:58.875287
| 2019-07-27T09:07:40
| 2019-07-27T09:07:40
| 193,635,194
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 87
|
py
|
gta8=input()
number=0
for i in range(0,len(gta8)):
number+=int(gta8[i])
print(number)
|
[
"noreply@github.com"
] |
noreply@github.com
|
a83b8b621ccdb7c4edbf4359733bf2099d221fe7
|
b0eaa5f11441ae24432e57225cfb7906857ee172
|
/auth-service-2/venv/lib/python3.6/imp.py
|
82ba4f7f3d9967dff13a174e94b08e49c593e914
|
[] |
no_license
|
Anyungu/flask-msa
|
86bf64631b548a5407b36546a6e46ae415a44b76
|
1126fe6ea016ad5772695079b074f549aea3948d
|
refs/heads/master
| 2023-01-24T16:09:52.088877
| 2020-08-18T14:03:57
| 2020-08-18T14:03:57
| 285,810,522
| 0
| 1
| null | 2020-08-18T14:03:59
| 2020-08-07T11:15:03
|
Python
|
UTF-8
|
Python
| false
| false
| 44
|
py
|
/home/namrehs/anaconda3/lib/python3.6/imp.py
|
[
"anyungucw@gmail.com"
] |
anyungucw@gmail.com
|
fb7d9981d74ca20b240ec0f878f8bdfe495d0c7a
|
92fd68a8a6ac6e3946e9515cba58a626339552e8
|
/machineLearning_1/ML-Model_example1_ch.19.py
|
716f7b49d3a8ae97472ee42a00808649fb20c377
|
[] |
no_license
|
dlichtb/python_ml_deepLearning
|
9dac116e4de1278598a420a33bdf089a4706d28e
|
f9d6d03b30b9c0e721d6cd3a4833488ca8bc0cff
|
refs/heads/master
| 2020-04-25T11:06:34.326040
| 2019-02-26T20:48:13
| 2019-02-26T20:48:13
| 172,733,989
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,031
|
py
|
#!/usr/bin/env python
import sys
import scipy
import numpy
import matplotlib
import pandas# ### Used for EXPLORATORY/DESCRIPTIVE/DATA-VIZUALIZATION statistics
import sklearn
print('Python: {}'.format(sys.version))
print('scipy: {}'.format(scipy.__version__))
print('numpy: {}'.format(numpy.__version__))
print('matplotlib: {}'.format(matplotlib.__version__))
print('pandas: {}'.format(pandas.__version__))
print('sklearn: {}'.format(sklearn.__version__))
print('##############################################################################')
print('')
### 1. LOAD DATA:
#########################
# 1.1: Import Library Modules/Functions/Objects
# Load libraries
from pandas import read_csv
from pandas.tools.plotting import scatter_matrix
from matplotlib import pyplot
from sklearn.model_selection import train_test_split
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
# 1.2: Load Dataset
# Load dataset
filename = '____.csv'
names = ['', '', '', '']
dataset = read_csv(filename, names=names)
print('##############################################################################')
print('')
#################################################################################################
#################################################################################################
#################################################################################################
### 2. SUMMARIZE DATA:
##############################
# 2.1: Dimensions of the dataset
print('SHAPE(ROWS, COLUMNS):', dataset.shape)
# 2.2: Data-types of each attribute
set_option('display.max_rows', 500)
print('ATTRIBUTE DATA-TYPES:')
print(dataset.dtypes)
print('')
# 2.2: Peek at the data itself
set_option('display.width', 100)
print('HEAD(20):')
print(dataset.head(20))
print('')
# 2.3: Summarize ATTRIBUTE-DISTRIBUTION
# - Change precision to 3 places
set_option('precision', 3)
print(dataset.describe())
print('##############################################################################')
print('')
# 2.4: Breakdown of the data by the CLASS variable: Class Distribution
print(dataset.groupby(60).size())
# ##############################################################################
# ##############################################################################
# OR
# ##############################################################################
# ##############################################################################
# from pandas import read_csv
# filename = '____.csv'
# names = ['', '', '', '']
# data = read_csv(filename, names = names)
# class_counts = data.groupby('class').size()
# print(class_counts)
print('##############################################################################')
print('')
# 2.5: Statistical summary of all attributes Statistical Summary(Attribute-x) = Count, Mean, Std.Dev, Min.Value, 25th Percentile, 50th Percentile, 75th Percentile, Max.Value
print('STATISTICAL SUMMARY FOR EACH COLUMN/ATTRIBUTE:')#set_option('precision', 1)
print(dataset.describe())
print('')
# 2.6: Taking a look at the correlation between all of the numeric attributes
# CORRELATIONS
# Assess where 'LSTAT' has highest |%|-correlation to an output-variable
# set_option('precision', 2)
# print(dataset.corr(method = 'pearson'))
# ##############################################################################
# ##############################################################################
# OR
# ##############################################################################
# ##############################################################################
# PAIRWISE PEARSON CORRELATION:
# from pandas import read_csv
# from pandas import set_option
# filename = '____'
# names = ['', '', '', '', ''}# Attribute/Column Names
# data = read_csv(filename, names = names)
# set_option('display width', 100)
# set_option('precision', 3)
# correlations = data.corr(method = 'pearson')
# print(correlations)
#print('##############################################################################')
#print('')
#################################################################################################
#################################################################################################
#################################################################################################
### 3. DATA VISUALIZATION:
##################################
# 3.1: Univariate/Unimodal Plots
# i) Attribute-based HISTOGRAMS
# HISTOGRAM PLOT
#dataset.hist()
#plt.show()
dataset.hist(sharex = False, sharey = False, xlabelsie = 1, ylabelsize = 1)
pyplot.show()
# from matplotlib import pyplot
# from pandas import read_csv
# filename = '____.csv'
# names = ['', '', '', '']
# data = read_csv(filename, names = names)
# data.hist()
# pyplot.show()
print('##############################################################################')
print('')
# ii) Density-Plots to determine Attribute-Distributions
# Attribute-based DENSITY-PLOT Distributions
dataset.plot(kind = 'density', subplots = True, layout(8,8), sharex = False, legend = False, fontsize = 1)
pyplot.show()
# from matplotlib import pyplot
# from pandas import read_csv
# filename = '____.csv'
# names = ['', '', '', '']
# data = read_csv(filename, names = names)
# dataset.plot(kind = 'density', subplots = True, layout(3,3), sharex = False, legend = False, fontsize = 1)
# pyplot.show()
print('##############################################################################')
print('')
# iii) BOX & WHISKER PLOTS
dataset.plot(kind='box', subplots=True, layout=(8,8), sharex=False, sharey=False, fontsize = 1)
pyplot.show()
# from matplotlib import pyplot
# from pandas import read_csv
# filename = '____.csv'
# names = ['', '', '', '']
# data = read_csv(filename, names = names)
# dataset.plot(kind='box', subplots=True, layout=(3,3), sharex=False, sharey=False, fontsize = 1)
# pyplot.show()
print('##############################################################################')
print('')
##plt.show()
# 3.4: SKEW for UNIVARIATE-DISTRIBUTIONS
# Skew/Attribute
from pandas import read_csv
filename = '___'
names = ['', '', '', '']
data = read_csv(filename, names = names)
skew = data.skew()
print(skew)
print('##############################################################################')
print('')
# 3.2: Multivariate/Multimodal Plots: - Intersections between variables
# i) SCATTER-PLOT MATRIX
# - Represents relationship between 2-variables as a 2-Dimm-dot
# - A series/sequence of scatter-plots for multiple variable-pairs = Scatter-Plot Matrix
# from matplotlib import pyplot
# from pandas import read_csv
# import numpy
# filename = '___.csv'
# names = ['', '', '', '']
# data = read_csv(filename, names = names)
# scatter_matrix(dataset)
# pyplot.show()
scatter_matrix(dataset)
pyplot.show()
#print('##############################################################################')
#print('')
#################################################################################################
#################################################################################################
#################################################################################################
### 4. EVALUATING ALGORITHMS:
#####################################
# 4.1: Isolate VALIDATION/TESTING-Set
# a) Create VALIDATION/TESTING-Set
# SLIT-OUT (Validation / Testing set)
array = dataset.values
X = array[:,0:4]
Y = array[:,4]
validation_size = 0.20
seed = 7
X_train, X_validation, Y_train, Y_validation = train_test_split(X, Y, test_size = validation_size, random_state = seed)
# 4.2: Configure TEST-HARNESS to use K(10)-FOLD CROSS-VALIDATION on ML-Models
# [a) Build ML-Models] >> [b) Build 5 ML-Models: Predicting species from Flower-Measurements/Attributes] >> [c) Select best ML-Model]
# SPOT-CHECK ML-Models/Algorithms
models = []
models.append(( ' LR ' , LogisticRegression()))
models.append(( ' LDA ' , LinearDiscriminantAnalysis()))
models.append(( ' KNN ' , KNeighborsClassifier()))
models.append(( ' CART ' , DecisionTreeClassifier()))
models.append(( ' NB ' , GaussianNB()))
models.append(( ' SVM ' , SVC()))
# evaluate each model in turn
results = []
names = []
for name, model in models:
kfold = KFold(n_splits=10, random_state=seed)
cv_results = cross_val_score(model, X_train, Y_train, cv=kfold, scoring= ' accuracy ' )
results.append(cv_results)
names.append(name)
msg = "%s: %f (%f)" % (name, cv_results.mean(), cv_results.std())
# output results to evaluate and select best ML-Model/Algorithm
print(msg)
print('##############################################################################')
print('')
# 5: COMPARE ALGORITHMS:
##########################################
# Compare Algorithms
fig = pyplot.figure()
fig.suptitle( ' Algorithm Comparison ' )
ax = fig.add_subplot(111)
pyplot.boxplot(results)
ax.set_xticklabels(names)
pyplot.show()
print('##############################################################################')
print('')
# 5: MAKE PREDICTIONS:
########################################
# Make predictions on validation dataset
knn = KNeighborsClassifier()
knn.fit(X_train, Y_train)
predictions = knn.predict(X_validation)
print(accuracy_score(Y_validation, predictions))
print(confusion_matrix(Y_validation, predictions))
print(classification_report(Y_validation, predictions))
print('##############################################################################')
print('')
|
[
"noreply@github.com"
] |
noreply@github.com
|
6cf262c45682d5fbf7ebdee95f242ab1457a8f85
|
b90444530bd7999168c6ea5e3c17657ae2a0b925
|
/test/functional/wallet_disable.py
|
c3b0df3627e134dcba7685e137121384d0aa298d
|
[
"MIT"
] |
permissive
|
Bitcoin3-source/Bitcoin3.0
|
4723d3db6fa2edc5734ab28dac87cf131bbcfe9e
|
4c623be5200032fd49b54e01147da721e142f4f2
|
refs/heads/main
| 2023-03-25T20:34:26.331459
| 2021-03-25T02:31:47
| 2021-03-25T02:31:47
| 343,466,598
| 0
| 3
|
MIT
| 2021-03-25T02:31:48
| 2021-03-01T15:36:34
| null |
UTF-8
|
Python
| false
| false
| 1,542
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test a node with the -disablewallet option.
- Test that validateaddress RPC works when running with -disablewallet
- Test that it is not possible to mine to an invalid address.
"""
from test_framework.test_framework import Bitcoin3TestFramework
from test_framework.util import assert_raises_rpc_error
class DisableWalletTest (Bitcoin3TestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-disablewallet"]]
def run_test (self):
# Make sure wallet is really disabled
assert_raises_rpc_error(-32601, 'Method not found', self.nodes[0].getwalletinfo)
x = self.nodes[0].validateaddress('3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
assert(x['isvalid'] == False)
x = self.nodes[0].validateaddress('mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
assert(x['isvalid'] == True)
# Checking mining to an address without a wallet. Generating to a valid address should succeed
# but generating to an invalid address will fail.
self.nodes[0].generatetoaddress(1, 'mneYUmWYsuk7kySiURxCi3AGxrAqZxLgPZ')
assert_raises_rpc_error(-5, "Invalid address", self.nodes[0].generatetoaddress, 1, '3J98t1WpEZ73CNmQviecrnyiWrnqRhWNLy')
if __name__ == '__main__':
DisableWalletTest ().main ()
|
[
"superdaddynz@gmail.com"
] |
superdaddynz@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.