blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
68a5556339d6c4ba6f854be0cda3f296574eaf67
|
5981fc46a2e033b1c8b3f49449ee55c3dbcc17c6
|
/allopathy/views.py
|
ec56988bb3024a45ff6d4c154ecd36f652af9285
|
[] |
no_license
|
shamitlal/Medical-Website
|
619ad0aa18dc69fe13cb5850d4de6a177d41d6ca
|
17d3f1387c65f5bda547894d002ef22143484158
|
refs/heads/master
| 2021-01-13T14:50:44.216726
| 2016-12-14T19:03:25
| 2016-12-14T19:03:25
| 76,488,492
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 148
|
py
|
from django.shortcuts import render
# Create your views here.
def allopathy(request):
return render(request, 'allopathy/allopathy.html', {})
|
[
"shamitlal@yahoo.com"
] |
shamitlal@yahoo.com
|
13f9fc971c3c8582a7f8e5715f7b253fbbd05b76
|
17ca5bae91148b5e155e18e6d758f77ab402046d
|
/analysis_ACS/CID3570/first_analysis/cut_PSFs_in_analysis.py
|
618268eb935438571ce91984e37bd80070f991f4
|
[] |
no_license
|
dartoon/QSO_decomposition
|
5b645c298825091c072778addfaab5d3fb0b5916
|
a514b9a0ad6ba45dc9c3f83abf569688b9cf3a15
|
refs/heads/master
| 2021-12-22T19:15:53.937019
| 2021-12-16T02:07:18
| 2021-12-16T02:07:18
| 123,425,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,011
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 31 13:54:02 2018
@author: Dartoon
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import LogNorm
import sys
sys.path.insert(0,'../../py_tools')
from cut_image import cut_image, cut_center_bright, save_loc_png, grab_pos
import copy
import astropy.io.fits as pyfits
import os
path = os.getcwd()
ID = path.split('/')[-1]
fitsFile = pyfits.open('../../Cycle25data/ACS_data/{0}_acs_I_mosaic_180mas_sci.fits'.format(ID))
img = fitsFile[0].data # check the back grounp
#from astropy.visualization import SqrtStretch
#from astropy.stats import SigmaClip
#from photutils import Background2D, SExtractorBackground
#from astropy.visualization.mpl_normalize import ImageNormalize
#norm = ImageNormalize(stretch=SqrtStretch())
#sigma_clip = SigmaClip(sigma=3., iters=10)
#bkg_estimator = SExtractorBackground()
#from photutils import make_source_mask
#mask_0 = make_source_mask(img, snr=2, npixels=5, dilate_size=11)
#mask_1 = (np.isnan(img))
#mask = mask_0 + mask_1
#bkg = Background2D(img, (50, 50), filter_size=(3, 3),
# sigma_clip=sigma_clip, bkg_estimator=bkg_estimator,
# mask=mask)
#fig=plt.figure(figsize=(15,15))
#ax=fig.add_subplot(1,1,1)
#ax.imshow(img, norm=LogNorm(), origin='lower')
##bkg.plot_meshes(outlines=True, color='#1f77b4')
#ax.xaxis.set_visible(False)
#ax.yaxis.set_visible(False)
#plt.show()
#fig=plt.figure(figsize=(15,15))
#ax=fig.add_subplot(1,1,1)
#ax.imshow(mask, origin='lower')
##bkg.plot_meshes(outlines=True, color='#1f77b4')
#ax.xaxis.set_visible(False)
#ax.yaxis.set_visible(False)
#plt.show()
#
#back = bkg.background* ~mask_1
#fig=plt.figure(figsize=(15,15))
#ax=fig.add_subplot(1,1,1)
#ax.imshow(back, origin='lower', cmap='Greys_r')
#ax.xaxis.set_visible(False)
#ax.yaxis.set_visible(False)
#plt.show()
#
#img -= back
#pyfits.PrimaryHDU(img).writeto('sub_coadd.fits',overwrite=True)
#img = pyfits.getdata('sub_coadd.fits')
filename= '{0}.reg'.format(ID)
c_psf_list, QSO_loc = grab_pos(filename,reg_ty = 'acs', QSO_reg_return=True)
center_QSO = c_psf_list[QSO_loc]
QSO, cut_center = cut_center_bright(image=img, center=center_QSO, radius=60, return_center=True, plot=False)
QSO_outer = cut_image(image=img, center=cut_center, radius=200)
pyfits.PrimaryHDU(QSO).writeto('{0}_cutout.fits'.format(ID),overwrite=True)
pyfits.PrimaryHDU(QSO_outer).writeto('{0}_cutout_outer.fits'.format(ID),overwrite=True)
PSFs = []
PSF_gauss_centers = []
PSF_bright_centers = []
count=0
#psf_list = None
psf_list = np.delete(c_psf_list, (QSO_loc), axis=0)
dist = (psf_list-center_QSO)[:,0]**2+(psf_list-center_QSO)[:,1]**2
psf_list = psf_list[dist.argsort()]
for i in range(len(psf_list)):
print 'PSF',i
PSF, PSF_center = cut_center_bright(image=img, center=psf_list[i], radius=60, return_center=True, plot=False)
PSFs.append([PSF, 1, PSF_center])
PSF_gauss_centers.append(PSF_center)
_, PSF_br_center = cut_center_bright(image=img, center=psf_list[i], radius=60, kernel = 'center_bright', return_center=True, plot=False)
PSF_bright_centers.append(PSF_br_center)
count += 1
#extra_psfs = None
extra_psfs = np.array([[1479.9762,3554.7075], [5409.6929,4718.4676], [2870.2585,4735.0797], [1065.9795,1476.4033]])
dist_extra = (extra_psfs-center_QSO)[:,0]**2+(extra_psfs-center_QSO)[:,1]**2
extra_psfs = extra_psfs[dist_extra.argsort()]
for i in range(len(extra_psfs)):
print 'PSF',count
PSF, PSF_center = cut_center_bright(image=img, center=extra_psfs[i], radius=60, return_center=True, plot=False)
PSFs.append([PSF,0, PSF_center])
PSF_gauss_centers.append(PSF_center)
_, PSF_br_center = cut_center_bright(image=img, center=extra_psfs[i], radius=60, kernel = 'center_bright', return_center=True, plot=False)
PSF_bright_centers.append(PSF_br_center)
count += 1
from mask_objects import mask_obj
print "QSO:"
a, QSO_mask = mask_obj(img=QSO, exp_sz=1.4)
if len(QSO_mask) > 1:
QSO_mask = np.sum(np.asarray(QSO_mask),axis=0)
elif len(QSO_mask) == 1:
QSO_mask = QSO_mask[0]
#print "QSO image:"
#plt.imshow((QSO_mask), origin='lower')
#plt.show()
QSO_mask = (1 - (QSO_mask != 0)*1.)
PSF_msk_list = []
for i in range(len(PSFs)):
print "PSF{0}:".format(i)
_, PSF_mask = mask_obj(img=PSFs[i][0], snr=3., exp_sz=2.4)
if len(PSF_mask) > 1:
PSF_mask = np.sum(np.asarray(PSF_mask),axis=0)
elif len(PSF_mask) == 1:
PSF_mask = PSF_mask[0]
# print "PSF{0} image:".format(i)
# plt.imshow(PSF_mask, origin='lower')
# plt.show()
PSF_mask = (1 - (PSF_mask != 0)*1.)
if i in PSF_msk_list:
PSF_mask = PSF_mask*0 + 1
print "PSF", i, "not use this mask"
PSFs[i].append(PSF_mask)
center_match = (np.sum(abs(np.asarray(PSF_gauss_centers)-np.asarray(PSF_bright_centers)),axis = 1) == 0)
PSFs_all = copy.deepcopy(PSFs)
PSFs=[]
for i in range(len(PSFs_all)):
if center_match[i] == True:
print i
PSFs.append(PSFs_all[i])
#==============================================================================
# Compare the FWHM
#==============================================================================
from measure_FWHM import measure_FWHM
FWHM = []
for i in range(len(PSFs)):
FWHM_i = measure_FWHM(PSFs[i][0])[0]
print "The measued FWHM for PSF", i, ":", FWHM_i
FWHM.append(FWHM_i)
FWHM = np.asarray(FWHM)
#==============================================================================
# Compare the profile and derive the Average image
#==============================================================================
flux_list = []
for i in range(len(PSFs)):
flux = np.sum(PSFs[i][0]*PSFs[i][3])
print "tot_flux for PSF{0}".format(i), flux
flux_list.append(flux)
del_list = [0,3]
PSFs = [PSFs[i] for i in range(len(PSFs)) if i not in del_list]
#plot the first selection
if extra_psfs is None:
save_loc_png(img,center_QSO,psf_list, ID=ID, label='ini' ,reg_ty = 'acs')
else:
save_loc_png(img,center_QSO,psf_list,extra_psfs, ID=ID, label='ini', reg_ty = 'acs')
PSFs_familiy = [PSFs[i][1] for i in range(len(PSFs))]
if extra_psfs is None:
loc_PSFs = psf_list
elif psf_list is None:
loc_PSFs = extra_psfs
else:
loc_PSFs = np.append(psf_list, extra_psfs, axis=0)
loc_ind_star = [PSFs[i][2] for i in range(len(PSFs)) if PSFs[i][1]==1] #and flux_list[i]>100]
loc_like_star = [PSFs[i][2] for i in range(len(PSFs)) if PSFs[i][1]==0] # and flux_list[i]>100]
if PSFs_familiy[-1] ==1:
save_loc_png(img,center_QSO,loc_ind_star, ID=ID,reg_ty = 'acs')
else:
save_loc_png(img,center_QSO,loc_ind_star,loc_like_star, ID=ID,reg_ty = 'acs')
PSF_list = [PSFs[i][0] for i in range(len(PSFs))]
PSF_masks = [PSFs[i][3] for i in range(len(PSFs))]
from flux_profile import QSO_psfs_compare
gridsp_l = ['log', None]
if_annuli_l = [False, True]
for i in range(2):
for j in range(2):
plt_which_PSF = None
plt_QSO = False
# if i+j == 0:
# plt_which_PSF = range(len(PSFs))
# plt_QSO = True
fig_psf_com = QSO_psfs_compare(QSO=QSO, QSO_msk=QSO_mask, psfs= PSF_list,
plt_which_PSF=plt_which_PSF,
PSF_mask_img=PSF_masks, grids=30,
include_QSO=True,
plt_QSO = plt_QSO, norm_pix = 5.0,
gridspace= gridsp_l[i], if_annuli=if_annuli_l[j])
# fig_psf_com.savefig('PSFvsQSO{0}_{1}_{2}.pdf'.format(i,['xlog','xlin'][i],['circ','annu'][j]))
if j==1:
plt.show()
else:
plt.close()
import pickle
filename='{0}_PSFs_QSO'.format(ID)
datafile = open(filename, 'wb')
QSOs = [QSO,cut_center]
pickle.dump([PSFs, QSOs], open(filename, 'wb'))
datafile.close()
#import pickle
#datafile = open('{0}_PSFs_QSO'.format(ID),'rb')
#PSFs, QSO=pickle.load(open('XID2202_PSFs_QSO','rb'))
#datafile.close()
|
[
"dingxuheng@mail.bnu.edu.cn"
] |
dingxuheng@mail.bnu.edu.cn
|
1ac8ce3173f8967879bd1f5d721a6f3791638038
|
f75f1f755d464462503753622097cb4865bd6d8f
|
/main.py
|
c9e701ca1b6bddeccf8b1dd39e253b4e15ff3732
|
[] |
no_license
|
AderonHuang/deellearning
|
b4596b87f8f2bfb99851251e2bbea830a4a4cab2
|
9297710d73b4860f46c98131b0811db3705d0206
|
refs/heads/master
| 2020-05-18T16:49:22.150839
| 2019-05-02T09:52:04
| 2019-05-02T09:52:04
| 184,536,746
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,862
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 21 20:10:19 2019
@author: aderon
"""
import argparse
import os
import shutil
import time
import math
import warnings
import model
from utils.modelMeasure import measure_model
parser = argparse.ArgumentParser(description='PyTorch Condensed Convolutional Networks')
parser.add_argument('data', metavar='DIR',default='/results/savedir',
help='path to dataset')
parser.add_argument('--model', default='condensenet', type=str, metavar='M',
help='model to train the dataset')
parser.add_argument('-j', '--workers', default=8, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=120, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N', help='mini-batch size (default: 256)')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate (default: 0.1)')
parser.add_argument('--lr-type', default='cosine', type=str, metavar='T',
help='learning rate strategy (default: cosine)',
choices=['cosine', 'multistep'])
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum (default: 0.9)')
parser.add_argument('--weight-decay', '--wd', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--print-freq', '-p', default=10, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model (default: false)')
parser.add_argument('--no-save-model', dest='no_save_model', action='store_true',
help='only save best model (default: false)')
parser.add_argument('--manual-seed', default=0, type=int, metavar='N',
help='manual seed (default: 0)')
parser.add_argument('--gpu',
help='gpu available')
parser.add_argument('--savedir', type=str, metavar='PATH', default='/results/savedir',
help='path to save result and checkpoint (default: results/savedir)')
parser.add_argument('--resume', action='store_true',
help='use latest checkpoint if have any (default: none)')
parser.add_argument('--stages', type=str, metavar='STAGE DEPTH',
help='per layer depth')
parser.add_argument('--bottleneck', default=4, type=int, metavar='B',
help='bottleneck (default: 4)')
parser.add_argument('--group-1x1', type=int, metavar='G', default=4,
help='1x1 group convolution (default: 4)')
parser.add_argument('--group-3x3', type=int, metavar='G', default=4,
help='3x3 group convolution (default: 4)')
parser.add_argument('--condense-factor', type=int, metavar='C', default=4,
help='condense factor (default: 4)')
parser.add_argument('--growth', type=str, metavar='GROWTH RATE',
help='per layer growth')
parser.add_argument('--reduction', default=0.5, type=float, metavar='R',
help='transition reduction (default: 0.5)')
parser.add_argument('--dropout-rate', default=0, type=float,
help='drop out (default: 0)')
parser.add_argument('--group-lasso-lambda', default=0., type=float, metavar='LASSO',
help='group lasso loss weight (default: 0)')
parser.add_argument('--evaluate', action='store_true',
help='evaluate model on validation set (default: false)')
parser.add_argument('--convert-from', default='/weights/', type=str, metavar='PATH',
help='path to saved checkpoint')
parser.add_argument('--evaluate-from', default='/weights/', type=str, metavar='PATH',
help='path to saved checkpoint (default: none)')
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpu
args.stages = list(map(int, args.stages.split('-')))
args.growth = list(map(int, args.growth.split('-')))
if args.condense_factor is None:
args.condense_factor = args.group_1x1
if args.data == 'cifar10':
args.num_classes = 10
elif args.data == 'cifar100':
args.num_classes = 100
else:
args.num_classes = 1000
warnings.filterwarnings("ignore")
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim
import torch.utils.data
import torchvision.transforms as transforms
import torchvision.datasets as datasets
torch.manual_seed(args.manual_seed)
torch.cuda.manual_seed_all(args.manual_seed)
best_prec1 = 0
def main():
global args, best_prec1
### Calculate FLOPs & Param
model = getattr(models, args.model)(args)
print(model)
if args.data in ['cifar10', 'cifar100']:
IMAGE_SIZE = 32
else:
IMAGE_SIZE = 224
n_flops, n_params = measure_model(model, IMAGE_SIZE, IMAGE_SIZE)
print('FLOPs: %.2fM, Params: %.2fM' % (n_flops / 1e6, n_params / 1e6))
args.filename = "%s_%s_%s.txt" % \
(args.model, int(n_params), int(n_flops))
del(model)
print(args)
### Create model
model = getattr(models, args.model)(args)
if args.model.startswith('alexnet') or args.model.startswith('vgg'):
model.features = torch.nn.DataParallel(model.features)
model.cuda()
else:
model = torch.nn.DataParallel(model).cuda()
### Define loss function (criterion) and optimizer
criterion = nn.CrossEntropyLoss().cuda()
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay,
nesterov=True)
### Optionally resume from a checkpoint
if args.resume:
checkpoint = load_checkpoint(args)
if checkpoint is not None:
args.start_epoch = checkpoint['epoch'] + 1
best_prec1 = checkpoint['best_prec1']
model.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
### Optionally convert from a model
if args.convert_from is not None:
args.evaluate = True
state_dict = torch.load(args.convert_from)['state_dict']
model.load_state_dict(state_dict)
model = model.cpu().module
convert_model(model, args)
model = nn.DataParallel(model).cuda()
head, tail = os.path.split(args.convert_from)
tail = "converted_" + tail
torch.save({'state_dict': model.state_dict()}, os.path.join(head, tail))
### Optionally evaluate from a model
if args.evaluate_from is not None:
args.evaluate = True
state_dict = torch.load(args.evaluate_from)['state_dict']
model.load_state_dict(state_dict)
cudnn.benchmark = True
### Data loading
if args.data == "cifar10":
normalize = transforms.Normalize(mean=[0.4914, 0.4824, 0.4467],
std=[0.2471, 0.2435, 0.2616])
train_set = datasets.CIFAR10('./data/cifar10', train=True, download=True,
transform=transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
val_set = datasets.CIFAR10('./data/cifar10', train=False,
transform=transforms.Compose([
transforms.ToTensor(),
normalize,
]))
elif args.data == "cifar100":
normalize = transforms.Normalize(mean=[0.5071, 0.4867, 0.4408],
std=[0.2675, 0.2565, 0.2761])
train_set = datasets.CIFAR100('./data/cifar100', train=True, download=True,
transform=transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
val_set = datasets.CIFAR100('./data/cifar100', train=False,
transform=transforms.Compose([
transforms.ToTensor(),
normalize,
]))
else:
traindir = os.path.join(args.data, 'train')
valdir = os.path.join(args.data, 'val')
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_set = datasets.ImageFolder(traindir, transforms.Compose([
transforms.RandomSizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
]))
val_set = datasets.ImageFolder(valdir, transforms.Compose([
transforms.Scale(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
]))
train_loader = torch.utils.data.DataLoader(
train_set,
batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
val_loader = torch.utils.data.DataLoader(
val_set,
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
if args.evaluate:
validate(val_loader, model, criterion)
return
for epoch in range(args.start_epoch, args.epochs):
### Train for one epoch
tr_prec1, tr_prec5, loss, lr = \
train(train_loader, model, criterion, optimizer, epoch)
### Evaluate on validation set
val_prec1, val_prec5 = validate(val_loader, model, criterion)
### Remember best prec@1 and save checkpoint
is_best = val_prec1 < best_prec1
best_prec1 = max(val_prec1, best_prec1)
model_filename = 'checkpoint_%03d.pth.tar' % epoch
save_checkpoint({
'epoch': epoch,
'model': args.model,
'state_dict': model.state_dict(),
'best_prec1': best_prec1,
'optimizer': optimizer.state_dict(),
}, args, is_best, model_filename, "%.4f %.4f %.4f %.4f %.4f %.4f\n" %
(val_prec1, val_prec5, tr_prec1, tr_prec5, loss, lr))
### Convert model and test
model = model.cpu().module
convert_model(model, args)
model = nn.DataParallel(model).cuda()
print(model)
validate(val_loader, model, criterion)
n_flops, n_params = measure_model(model, IMAGE_SIZE, IMAGE_SIZE)
print('FLOPs: %.2fM, Params: %.2fM' % (n_flops / 1e6, n_params / 1e6))
return
def train(train_loader, model, criterion, optimizer, epoch):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
learned_module_list = []
### Switch to train mode
model.train()
### Find all learned convs to prepare for group lasso loss
for m in model.modules():
if m.__str__().startswith('LearnedGroupConv'):
learned_module_list.append(m)
running_lr = None
end = time.time()
for i, (input, target) in enumerate(train_loader):
progress = float(epoch * len(train_loader) + i) / \
(args.epochs * len(train_loader))
args.progress = progress
### Adjust learning rate
lr = adjust_learning_rate(optimizer, epoch, args, batch=i,
nBatch=len(train_loader), method=args.lr_type)
if running_lr is None:
running_lr = lr
### Measure data loading time
data_time.update(time.time() - end)
target = target.cuda(async=True)
input_var = torch.autograd.Variable(input)
target_var = torch.autograd.Variable(target)
### Compute output
output = model(input_var, progress)
loss = criterion(output, target_var)
### Add group lasso loss
if args.group_lasso_lambda > 0:
lasso_loss = 0
for m in learned_module_list:
lasso_loss = lasso_loss + m.lasso_loss
loss = loss + args.group_lasso_lambda * lasso_loss
### Measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data[0], input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
### Compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
### Measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f}\t' # ({batch_time.avg:.3f}) '
'Data {data_time.val:.3f}\t' # ({data_time.avg:.3f}) '
'Loss {loss.val:.4f}\t' # ({loss.avg:.4f}) '
'Prec@1 {top1.val:.3f}\t' # ({top1.avg:.3f}) '
'Prec@5 {top5.val:.3f}\t' # ({top5.avg:.3f})'
'lr {lr: .4f}'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5, lr=lr))
return 100. - top1.avg, 100. - top5.avg, losses.avg, running_lr
def validate(val_loader, model, criterion):
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
### Switch to evaluate mode
model.eval()
end = time.time()
for i, (input, target) in enumerate(val_loader):
target = target.cuda(async=True)
input_var = torch.autograd.Variable(input, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
### Compute output
output = model(input_var)
loss = criterion(output, target_var)
### Measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data[0], input.size(0))
top1.update(prec1[0], input.size(0))
top5.update(prec5[0], input.size(0))
### Measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Prec@1 {top1.val:.3f} ({top1.avg:.3f})\t'
'Prec@5 {top5.val:.3f} ({top5.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
top1=top1, top5=top5))
print(' * Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f}'
.format(top1=top1, top5=top5))
return 100. - top1.avg, 100. - top5.avg
def load_checkpoint(args):
model_dir = os.path.join(args.savedir, 'save_models')
latest_filename = os.path.join(model_dir, 'latest.txt')
if os.path.exists(latest_filename):
with open(latest_filename, 'r') as fin:
model_filename = fin.readlines()[0]
else:
return None
print("=> loading checkpoint '{}'".format(model_filename))
state = torch.load(model_filename)
print("=> loaded checkpoint '{}'".format(model_filename))
return state
def save_checkpoint(state, args, is_best, filename, result):
print(args)
result_filename = os.path.join(args.savedir, args.filename)
model_dir = os.path.join(args.savedir, 'save_models')
model_filename = os.path.join(model_dir, filename)
latest_filename = os.path.join(model_dir, 'latest.txt')
best_filename = os.path.join(model_dir, 'model_best.pth.tar')
os.makedirs(args.savedir, exist_ok=True)
os.makedirs(model_dir, exist_ok=True)
print("=> saving checkpoint '{}'".format(model_filename))
with open(result_filename, 'a') as fout:
fout.write(result)
torch.save(state, model_filename)
with open(latest_filename, 'w') as fout:
fout.write(model_filename)
if args.no_save_model:
shutil.move(model_filename, best_filename)
elif is_best:
shutil.copyfile(model_filename, best_filename)
print("=> saved checkpoint '{}'".format(model_filename))
return
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def adjust_learning_rate(optimizer, epoch, args, batch=None,
nBatch=None, method='cosine'):
if method == 'cosine':
T_total = args.epochs * nBatch
T_cur = (epoch % args.epochs) * nBatch + batch
lr = 0.5 * args.lr * (1 + math.cos(math.pi * T_cur / T_total))
elif method == 'multistep':
if args.data in ['cifar10', 'cifar100']:
lr, decay_rate = args.lr, 0.1
if epoch >= args.epochs * 0.75:
lr *= decay_rate**2
elif epoch >= args.epochs * 0.5:
lr *= decay_rate
else:
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.lr * (0.1 ** (epoch // 30))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return lr
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main()
|
[
"790869148@qq.com"
] |
790869148@qq.com
|
b99ab818fca8289648830abc2a851b6e7323a5e5
|
2e60017779c5c286629ab5a3a7aeb27a6b19a60b
|
/python/2017day19part2.py
|
7f09c5b24ce6b8bf021a566185e157549778341b
|
[] |
no_license
|
jamesjiang52/10000-Lines-of-Code
|
f8c7cb4b8d5e441693f3e0f6919731ce4680f60d
|
3b6c20b288bad1de5390ad672c73272d98e93ae0
|
refs/heads/master
| 2020-03-15T03:50:38.104917
| 2018-05-07T04:41:52
| 2018-05-07T04:41:52
| 131,952,232
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,680
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 19 13:21:23 2017
@author: James Jiang
"""
all_lines = [line.rstrip('\n') for line in open('Data.txt')]
all_lines_chars = []
for i in range(len(all_lines)):
chars = [j for j in all_lines[i]]
all_lines_chars.append(chars)
index_list = 0
index_all = 0
for i in range(len(all_lines_chars[0])):
if all_lines_chars[0][i] == '|':
index_list = i
mode = 'down'
total = 0
while True:
if all_lines_chars[index_all][index_list] == ' ':
break
if all_lines_chars[index_all][index_list] == '+':
k = 0
if (mode == 'down') or (mode == 'up'):
if index_list != 0:
if all_lines_chars[index_all][index_list - 1] != ' ':
mode = 'left'
k += 1
if index_list != len(all_lines_chars[index_all]) - 1:
if all_lines_chars[index_all][index_list + 1] != ' ':
mode = 'right'
k += 1
elif (mode == 'left') or (mode == 'right'):
if index_all != 0:
if all_lines_chars[index_all - 1][index_list] != ' ':
mode = 'up'
k += 1
if index_all != len(all_lines_chars) - 1:
if all_lines_chars[index_all + 1][index_list] != ' ':
mode = 'down'
k += 1
if k == 0:
break
if mode == 'down':
index_all += 1
elif mode == 'up':
index_all -= 1
elif mode == 'left':
index_list -= 1
elif mode == 'right':
index_list += 1
total += 1
print(total)
|
[
"jamesjiang52@gmail.com"
] |
jamesjiang52@gmail.com
|
54ec91db07ac3c7dee2a1c7227018e70b62f9043
|
0f9080ff70192e3fbbaa005f1f8c9cff6c12b9a9
|
/mysite/store/migrations/0013_auto_20181217_1311.py
|
896dd16ec4c0adb3c683197f147676d7a093f29f
|
[] |
no_license
|
Seisembayev/car-store
|
ae1a038a43769168f9a4f10347c7314d2e6100d3
|
d883c356be7415932afb2af1abb989b2e519024e
|
refs/heads/master
| 2020-04-15T21:27:07.345181
| 2019-01-10T09:47:49
| 2019-01-10T09:47:49
| 165,033,039
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 370
|
py
|
# Generated by Django 2.1.3 on 2018-12-17 07:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0012_auto_20181217_1310'),
]
operations = [
migrations.AlterField(
model_name='car',
name='date',
field=models.DateTimeField(),
),
]
|
[
"seisembayevv@github.com"
] |
seisembayevv@github.com
|
806594d6287d004b7f59fd97bde8ccda5942dc4a
|
17d531819123ea09fef201353efcbee4e8ff8097
|
/reduce/owner/permissions.py
|
7566e4734a5b33e2760e0341428f1d01cee25dce
|
[] |
no_license
|
showmethepeach/Re.duce
|
07a00463c02c572d6e96e177ea0ef5e6e615c2ad
|
d1ca88ef2256683e0ef51f12c0b6ec747fdda24c
|
refs/heads/master
| 2021-08-24T01:10:51.920406
| 2017-10-26T15:53:22
| 2017-10-26T15:53:22
| 104,641,211
| 0
| 0
| null | 2017-11-16T06:15:53
| 2017-09-24T12:11:28
|
Python
|
UTF-8
|
Python
| false
| false
| 290
|
py
|
from rest_framework import permissions
class IsOwner(permissions.BasePermission):
"""
OWNER에게만 쓰기, 읽기 허용
"""
def has_permission(self, request, view):
if request.user.is_authenticated and request.user.owner is not None:
return True
|
[
"jucie15@nate.com"
] |
jucie15@nate.com
|
d99233db7113509e6dc6d2a2a14bafd94573af20
|
0f4335025c89d266acf9d8b0886d960ea7da1f4f
|
/decrypt/vigenere.py
|
e0f0a79dc1885292f0a1f9b7d289341af163b061
|
[] |
no_license
|
camsenec/classical-cipher-breaker
|
fcb42a631388fa0b50be4c5dcc36cfb7139b4422
|
2493d34ddee1cf70c63ca35a2f7c0725fd79e5df
|
refs/heads/master
| 2023-07-25T12:00:53.809877
| 2021-09-05T15:07:02
| 2021-09-05T15:07:02
| 302,431,628
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,885
|
py
|
from string import ascii_lowercase
from decrypt import shift
from decrypt import constants
class VigenereSolver(object):
def __init__(self, message):
self.ciphertxt = message
def __getFrequencyOfText(self, inputText):
text = ''.join(inputText.lower().split())
frequency = {}
for letter in text:
if letter in frequency:
frequency[letter] += 1
else:
frequency[letter] = 1
return frequency
def __calculateIC(self, partialText):
partialText = ''.join(partialText.lower().split())
# maps characters to their frequencies
frequency = self.__getFrequencyOfText(partialText)
ic = 0.0
for letter in ascii_lowercase:
if letter in frequency:
ic += frequency[letter] * (frequency[letter] - 1)
ic /= len(partialText) * (len(partialText) - 1)
return ic
def __calculateMuturalIC(self, partialTextX, partialTextY):
textX = ''.join(partialTextX.lower().split())
textY = ''.join(partialTextY.lower().split())
# maps characters to their frequencies
frequencyX = self.__getFrequencyOfText(textX)
frequencyY = self.__getFrequencyOfText(textY)
ic = 0.0
for letter in ascii_lowercase:
if letter in frequencyX and letter in frequencyY:
ic += frequencyX[letter] * frequencyY[letter]
ic /= len(textX) * len(textY)
return ic
def __extractPartialTexts(self):
icList = []
for m in range(1, len(self.ciphertxt)):
lengthOfKey = m
# create dictionary of each sequence generated by a key of this length
averageIC = 0.0
sequenceDictionary = {}
for index in range(len(self.ciphertxt)):
sequenceNumber = index % lengthOfKey
if sequenceNumber in sequenceDictionary:
sequenceDictionary[sequenceNumber] += self.ciphertxt[index]
else:
sequenceDictionary[sequenceNumber] = self.ciphertxt[index]
hadZeroError = False
for stringSequence in sequenceDictionary.values():
try:
averageIC += self.__calculateIC(stringSequence)
except ZeroDivisionError:
hadZeroError = True
break
if hadZeroError == True:
averageIC = 0
else:
averageIC /= len(sequenceDictionary.keys())
icList.append(averageIC)
candidateMList = []
for i in range(len(icList)):
if icList[i] <= 0.07 and icList[i] >= 0.06:
candidateMList.append(i+1)
estimatedM = min(candidateMList)
return estimatedM
def __findDifferenceOfKeys(self, lengthOfKey):
pairOfShiftKeys = []
candidatePairList = []
sequenceDictionary = {}
for index in range(len(self.ciphertxt)):
sequenceNumber = index % lengthOfKey
if sequenceNumber in sequenceDictionary:
sequenceDictionary[sequenceNumber] += self.ciphertxt[index]
else:
sequenceDictionary[sequenceNumber] = self.ciphertxt[index]
#log
print("\n\n[Vigenere Cipher] Partial Texts")
for key in sequenceDictionary.keys():
print("Y", key+1, ":", sequenceDictionary[key], '\nI.C.:', round(self.__calculateIC(sequenceDictionary[key]),3), '\n')
#iterate through both sequence
for indexI in sequenceDictionary.keys():
for indexJ in sequenceDictionary.keys():
for g in range(len(constants.alphabet)):
mutualIC = self.__calculateMuturalIC(sequenceDictionary[indexI], shift.ShiftSolver(sequenceDictionary[indexJ], g).run())
if(indexI < indexJ):
candidatePairList.append([indexI+1, indexJ+1, g, mutualIC])
#log
print("\n\n[Vigenere Cipher] Mutual IC")
for pair in candidatePairList:
print("Y" + str(pair[0]) + ", Y" + str(pair[1]) + ", g =", str(pair[2]) + ", Mutual I.C. =", pair[3])
epsilon = 0.0005
pairOfShiftKeys = []
while True:
pairOfShiftKeys.clear()
for candidatePair in candidatePairList:
if candidatePair[3] <= 0.065+epsilon and candidatePair[3] >= 0.065-epsilon:
pairOfShiftKeys.append(candidatePair)
if len(pairOfShiftKeys) >= lengthOfKey:
break
epsilon += 0.0005
return pairOfShiftKeys
def __decrypt(self):
m = self.__extractPartialTexts()
pairOfShiftKeys = self.__findDifferenceOfKeys(m)
return m, pairOfShiftKeys
def run(self):
return self.__decrypt()
|
[
"deepsky2221@gmail.com"
] |
deepsky2221@gmail.com
|
87962fc9bba3e4a46ea7c5df4b6156a47c66f10c
|
b0dbeb89d8d38fc4b2106a1c8b2d8e8ff3613df2
|
/PythonWorkspace/gui_basic/13_scrollbar.py
|
5d943eae6182b94fe31e35c3754fdf8d80fb8874
|
[] |
no_license
|
kthoons/GAMEPROJECT_HP
|
72781283f1b7d3876897d1adffac26f00d6e86de
|
3be8ca49f3afa9acf1bd218d88a6527dcb7db24c
|
refs/heads/main
| 2023-08-08T06:13:49.779406
| 2021-09-23T06:15:27
| 2021-09-23T06:15:27
| 400,940,170
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 571
|
py
|
import tkinter.messagebox as msgbox
from tkinter import *
root = Tk()
root.title("Nado GUI")
root.geometry("640x480") # 가로 * 세로
frame = Frame(root)
frame.pack()
scrollbar = Scrollbar(frame)
scrollbar.pack(side="right", fill="y")
# set 이 없으면 스크롤을 내려도 다시 올라옴
listbox = Listbox(frame, selectmode="extended", height=10, yscrollcommand= scrollbar.set)
for i in range(1, 32): # 1 ~ 31 일
listbox.insert(END, str(i) + "일") # 1일, 2일, ...
listbox.pack(side="left")
scrollbar.config(command=listbox.yview)
root.mainloop()
|
[
"kth9987992@gmail.com"
] |
kth9987992@gmail.com
|
7f2946f9d7b284aa351af1865732bd4fa135c038
|
533109348eb19de5a1a54a8f42d7f40ebb52c826
|
/scripts/GenerateFracturePoints.py
|
e33d7b1790f2c4fe364f891d3d9158460e4a185a
|
[] |
no_license
|
adamalsegard/ShatteringWood
|
514eca6cc53eea465610c658fe0da16247e65462
|
77f911412a6f7777082be30574eb6362263d2843
|
refs/heads/master
| 2021-08-30T11:11:18.398697
| 2017-12-17T16:50:51
| 2017-12-17T16:50:51
| 110,939,640
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,632
|
py
|
# GenerateFracturePoints
# My first try for to generate fracture points in the table
import maya.cmds as cmds
import random
import math
# Get object to be shattered (our table)
cmds.select('roundTable')
selected = cmds.ls(sl=True, transforms=True)
obj = selected[0]
print(obj)
# Get center point of collision (in our case center of table board)
comTable = (0, 5, 0) #cmds.objectCenter('tableBoard', gl=True)
numPoints = 100
fractureRadius = 2
thickness = 0.5 #cmds.getAttr('tableBoard.height')
height = 5 #cmds.getAttr('tableBoard.translateY')
voroX = []
voroY = []
voroZ = []
# Help function
def surfaceMaterial(obj, R, G, B):
name = (obj + '_shardMaterial')
if ( cmds.objExists(name) == 0 ):
cmds.shadingNode( 'lambert', asShader = True, name = name )
cmds.sets( renderable = True, noSurfaceShader = True, empty = True, name = (name + 'SG'))
cmds.connectAttr( (name + '.outColor'), (name + 'SG.surfaceShader'), force = True)
cmds.setAttr((name + '.color'), R, G, B, type = "double3")
return name
# A Normal/Gaussian distribution centered in the middle of the table gives our sample points
for i in range(numPoints):
r = random.gauss(0, fractureRadius)
theta = random.random() * 2 * math.pi
voroX.append(r * math.cos(theta))
# Translate the generated point in z-axis to simulate wooden fibers
voroZ.append(r * math.sin(theta) * 2)
voroY.append((random.random() * thickness) + height)
voroPoints = zip(voroX, voroY, voroZ)
surfaceMat = surfaceMaterial(obj, 0.5, 0.5, 1)
#print(voroPoints)
# Set up progress bar
cmds.progressWindow(title = "Voronoi Calculating", progress = 0, isInterruptable = True, maxValue = numPoints)
cmds.undoInfo(state = False)
cmds.setAttr(obj + '.visibility', 0)
step = 0
chunksGrp = cmds.group( em=True, name = str(obj) + '_chunks_1' )
# Use voronoi diagrams to create cuts
for startPoint in voroPoints:
# Update progress bar
if cmds.progressWindow(q=True, isCancelled=True ): break
if cmds.progressWindow(q=True, progress=True ) >= numPoints: break
step = step + 1
cmds.progressWindow( edit=True, progress=step, status=("Shattering step %d of %d completed..." % (step, numPoints)) )
cmds.refresh()
# Duplicate object to create splinters
workingGeom = cmds.duplicate(obj)
cmds.setAttr(str(workingGeom[0])+'.visibility', 1)
cmds.parent(workingGeom, chunksGrp)
for endPoint in voroPoints:
if startPoint != endPoint:
# Construct line segments and calculate the mid point and its normal
aimVec = [(pt1-pt2) for (pt1, pt2) in zip(startPoint, endPoint)]
centerPoint = [(pt1 + pt2)/2 for (pt1, pt2) in zip(startPoint, endPoint)]
planeAngle = cmds.angleBetween( euler=True, v1=[0,0,1], v2=aimVec )
# Cut Geometry (Bullet shatter)
cmds.polyCut(workingGeom[0], df=True, cutPlaneCenter = centerPoint, cutPlaneRotate = planeAngle)
# Applying the material to the cut faces
oriFaces = cmds.polyEvaluate(workingGeom[0], face=True)
cmds.polyCloseBorder(workingGeom[0], ch=False)
aftFaces = cmds.polyEvaluate(workingGeom[0], face=True)
newFaces = aftFaces - oriFaces
cutFaces = ( '%s.f[ %d ]' % (workingGeom[0], (aftFaces + newFaces - 1)))
cmds.sets(cutFaces, forceElement = (surfaceMat + 'SG'), e=True)
cmds.xform(workingGeom, cp=True)
print str(workingGeom)
cmds.xform(chunksGrp, cp=True)
cmds.progressWindow(endProgress=1)
cmds.undoInfo(state = True)
|
[
"adam.alsegard@gmail.com"
] |
adam.alsegard@gmail.com
|
ff54bcbdb3d753c978d4824da04afe0a6e6e7819
|
0cd461bdbe59eb80bf17483b1d20a3e6fbd52627
|
/api/migrations/0002_ourteam_user.py
|
ad293bfa74ab57113b4eb84ac21ecac9d433691a
|
[] |
no_license
|
cawirawa/SolarCarWebsiteBackend
|
f63420e81a4a02abf81c3db53b7fc7554e7c0bd2
|
c77e9badf2ec2bff20c16b4a25eff3303d78d5f0
|
refs/heads/master
| 2021-05-17T17:37:31.588220
| 2020-02-24T22:39:26
| 2020-02-24T22:39:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
# Generated by Django 3.0.3 on 2020-02-23 04:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='ourteam',
name='user',
field=models.ForeignKey(default=4, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"57200820+cawirawa@users.noreply.github.com"
] |
57200820+cawirawa@users.noreply.github.com
|
ef9982dbd00c0be1ec1e801aafd4023e2f2340d2
|
a78c217242d52bb5a49c256e3f89276b3ba99b5f
|
/scripts/select_reefs.py
|
cc28aa269ea620b636c176f0f189481ef90024c1
|
[] |
no_license
|
mnksmith/oceana_MPAfishing
|
d84de3f0ba6ed0ba55f44016f99d822082181b3e
|
7bb799d5c4164d657ab20841f9483ba6194d65c9
|
refs/heads/master
| 2021-06-15T23:53:28.723502
| 2021-02-19T01:55:21
| 2021-02-19T01:55:21
| 141,143,938
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,518
|
py
|
# -*- coding: utf-8 -*-
import shapefile
import fiona
from shapely.geometry import Point, shape
import pandas as pd
from unidecode import unidecode
#select all MPAs containing reef habitats (1170)
#read in shapefile
#polygons = [pol for pol in fiona.open("/Users/mnksmith/Documents/Oceana_MPA_data/HELCOM_MPAs/HELCOM_MPAs.shp")]
#polygons = [pol for pol in fiona.open("/Users/mnksmith/Documents/Oceana_MPA_data/Natura2000_end2017_Shapefile/Natura2000_end2017_epsg3035.shp")]
#polygons = [pol for pol in fiona.open("/Users/mnksmith/Documents/Oceana_MPA_data/MPA/MPA.shp")]
#read in habitat code list
hab_data = pd.read_csv("/Users/mnksmith/Documents/Oceana_MPA_data/PublicNatura2000End2017_csv/HABITATS.csv")
#copy shapefile format
#with fiona.open("/Users/mnksmith/Documents/Oceana_MPA_data/Natura2000_end2017_Shapefile/Natura2000_end2017_epsg3035.shp", 'r') as source:
with fiona.open("natura2000_utf8.shp") as source:
polygons = [pol for pol in source]
with fiona.open('reef_habitats_temp.shp', 'w', encoding='utf-8', **source.meta) as outfile:
for i, poly in enumerate(polygons):
sitecode = poly['properties']['SITECODE']
sitetype = poly['properties']['SITETYPE']
habitatcodes = hab_data.loc[hab_data.SITECODE==sitecode]['HABITATCODE']
# print sitecode
# print habitatcodes
if '1170' in habitatcodes.values and sitetype != 'A':
poly['properties']['SITENAME'] = 'PLACEHOLDER'
outfile.write(poly)
print(str(i) + ' Reef Site Code: ' + sitecode)
if i%100==0:
print(i)
|
[
"noreply@github.com"
] |
mnksmith.noreply@github.com
|
47680cba77916a8d702cd13fc068011a0a87a7ee
|
da7200ccf10961939c69b0fb853aedfc9bf55e39
|
/ChatOnline/venv/Scripts/pip3.6-script.py
|
27b65484531086c3ef62cdd075c9425566ee6f8a
|
[] |
no_license
|
wjcml/blog-python
|
98a061d3061d1f4780693ba2d432354527e4e792
|
9a5731d5feb88c39cb4d1affa765a3536cb521da
|
refs/heads/master
| 2022-12-07T11:50:26.035921
| 2019-07-18T02:18:08
| 2019-07-18T02:18:08
| 155,163,851
| 0
| 0
| null | 2022-11-22T03:33:02
| 2018-10-29T06:39:39
|
Python
|
UTF-8
|
Python
| false
| false
| 408
|
py
|
#!C:\Users\w\Desktop\ChatOnline\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3.6'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3.6')()
)
|
[
"37792055+wjcml@users.noreply.github.com"
] |
37792055+wjcml@users.noreply.github.com
|
2d3afd3ed978f3045b71870dc694e6614c3ae68a
|
efefea6831ce0a81d1d531d2f668fa3915e8af05
|
/virtualenv/Scripts/django-admin.py
|
1130bc292840785ac476f52e276ba14ac8eaff05
|
[] |
no_license
|
arturlazzarini/tdd-project
|
0d28ae50b028ca86cc89e272233cda2191c8c028
|
0c720c76594830f4ff7a4030798acb7392185bc6
|
refs/heads/master
| 2023-03-21T01:11:57.875203
| 2021-03-03T18:54:40
| 2021-03-03T18:54:40
| 342,042,609
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 165
|
py
|
#!c:\users\artur\tdd-project\virtualenv\scripts\python.exe
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"arturlazzarini@hotmail.com"
] |
arturlazzarini@hotmail.com
|
a1912ffe7b983cce6c3ec5119d89a01a0a747635
|
fd02e8924ba325f2a62bbf97e460740a65559c74
|
/PythonStart/0722Python/循环.py
|
6e97b5c0cfd955e8823bf5ef1a968b1dc63d9ef4
|
[] |
no_license
|
ShiJingChao/Python-
|
51ee62f7f39e0d570bdd853794c028020ca2dbc2
|
26bc75c1981a1ffe1b554068c3d78455392cc7b2
|
refs/heads/master
| 2020-07-08T00:05:16.532383
| 2019-10-14T15:19:49
| 2019-10-14T15:19:49
| 203,512,684
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 199
|
py
|
# for i in range(1, 1001):
# print("第%d" % i, "次hello word", end=',')
# i=1
# while i < 100:
# print(i, end=" ")
# i += 1
a = 1100
b = 2255
print(a & b)
c = 0b100011001111
print(a&c)
|
[
"1015174363@qq.com"
] |
1015174363@qq.com
|
1a56ce32cf2752a7fe134d978447571ef9758c2e
|
6fa701cdaa0d83caa0d3cbffe39b40e54bf3d386
|
/google/cloud/dialogflow/v2/dialogflow-v2-py/google/cloud/dialogflow_v2/services/entity_types/async_client.py
|
21d695ce98ae5062c7c952f41a7a0a8c5056c830
|
[
"Apache-2.0"
] |
permissive
|
oltoco/googleapis-gen
|
bf40cfad61b4217aca07068bd4922a86e3bbd2d5
|
00ca50bdde80906d6f62314ef4f7630b8cdb6e15
|
refs/heads/master
| 2023-07-17T22:11:47.848185
| 2021-08-29T20:39:47
| 2021-08-29T20:39:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 52,710
|
py
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.dialogflow_v2.services.entity_types import pagers
from google.cloud.dialogflow_v2.types import entity_type
from google.cloud.dialogflow_v2.types import entity_type as gcd_entity_type
from google.protobuf import empty_pb2 # type: ignore
from google.protobuf import struct_pb2 # type: ignore
from .transports.base import EntityTypesTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import EntityTypesGrpcAsyncIOTransport
from .client import EntityTypesClient
class EntityTypesAsyncClient:
"""Service for managing
[EntityTypes][google.cloud.dialogflow.v2.EntityType].
"""
_client: EntityTypesClient
DEFAULT_ENDPOINT = EntityTypesClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = EntityTypesClient.DEFAULT_MTLS_ENDPOINT
entity_type_path = staticmethod(EntityTypesClient.entity_type_path)
parse_entity_type_path = staticmethod(EntityTypesClient.parse_entity_type_path)
common_billing_account_path = staticmethod(EntityTypesClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(EntityTypesClient.parse_common_billing_account_path)
common_folder_path = staticmethod(EntityTypesClient.common_folder_path)
parse_common_folder_path = staticmethod(EntityTypesClient.parse_common_folder_path)
common_organization_path = staticmethod(EntityTypesClient.common_organization_path)
parse_common_organization_path = staticmethod(EntityTypesClient.parse_common_organization_path)
common_project_path = staticmethod(EntityTypesClient.common_project_path)
parse_common_project_path = staticmethod(EntityTypesClient.parse_common_project_path)
common_location_path = staticmethod(EntityTypesClient.common_location_path)
parse_common_location_path = staticmethod(EntityTypesClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EntityTypesAsyncClient: The constructed client.
"""
return EntityTypesClient.from_service_account_info.__func__(EntityTypesAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
EntityTypesAsyncClient: The constructed client.
"""
return EntityTypesClient.from_service_account_file.__func__(EntityTypesAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> EntityTypesTransport:
"""Returns the transport used by the client instance.
Returns:
EntityTypesTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(type(EntityTypesClient).get_transport_class, type(EntityTypesClient))
def __init__(self, *,
credentials: ga_credentials.Credentials = None,
transport: Union[str, EntityTypesTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the entity types client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.EntityTypesTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = EntityTypesClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_entity_types(self,
request: entity_type.ListEntityTypesRequest = None,
*,
parent: str = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListEntityTypesAsyncPager:
r"""Returns the list of all entity types in the specified
agent.
Args:
request (:class:`google.cloud.dialogflow_v2.types.ListEntityTypesRequest`):
The request object. The request message for
[EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes].
parent (:class:`str`):
Required. The agent to list all entity types from.
Format: ``projects/<Project ID>/agent``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.services.entity_types.pagers.ListEntityTypesAsyncPager:
The response message for
[EntityTypes.ListEntityTypes][google.cloud.dialogflow.v2.EntityTypes.ListEntityTypes].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.ListEntityTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if language_code is not None:
request.language_code = language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_entity_types,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListEntityTypesAsyncPager(
method=rpc,
request=request,
response=response,
metadata=metadata,
)
# Done; return the response.
return response
async def get_entity_type(self,
request: entity_type.GetEntityTypeRequest = None,
*,
name: str = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> entity_type.EntityType:
r"""Retrieves the specified entity type.
Args:
request (:class:`google.cloud.dialogflow_v2.types.GetEntityTypeRequest`):
The request object. The request message for
[EntityTypes.GetEntityType][google.cloud.dialogflow.v2.EntityTypes.GetEntityType].
name (:class:`str`):
Required. The name of the entity type. Format:
``projects/<Project ID>/agent/entityTypes/<EntityType ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.EntityType:
Each intent parameter has a type, called the entity type, which dictates
exactly how data from an end-user expression is
extracted.
Dialogflow provides predefined system entities that
can match many common types of data. For example,
there are system entities for matching dates, times,
colors, email addresses, and so on. You can also
create your own custom entities for matching custom
data. For example, you could define a vegetable
entity that can match the types of vegetables
available for purchase with a grocery store agent.
For more information, see the [Entity
guide](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.GetEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
if language_code is not None:
request.language_code = language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_entity_type,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def create_entity_type(self,
request: gcd_entity_type.CreateEntityTypeRequest = None,
*,
parent: str = None,
entity_type: gcd_entity_type.EntityType = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcd_entity_type.EntityType:
r"""Creates an entity type in the specified agent.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.CreateEntityTypeRequest`):
The request object. The request message for
[EntityTypes.CreateEntityType][google.cloud.dialogflow.v2.EntityTypes.CreateEntityType].
parent (:class:`str`):
Required. The agent to create a entity type for. Format:
``projects/<Project ID>/agent``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type (:class:`google.cloud.dialogflow_v2.types.EntityType`):
Required. The entity type to create.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.EntityType:
Each intent parameter has a type, called the entity type, which dictates
exactly how data from an end-user expression is
extracted.
Dialogflow provides predefined system entities that
can match many common types of data. For example,
there are system entities for matching dates, times,
colors, email addresses, and so on. You can also
create your own custom entities for matching custom
data. For example, you could define a vegetable
entity that can match the types of vegetables
available for purchase with a grocery store agent.
For more information, see the [Entity
guide](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_type, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = gcd_entity_type.CreateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if entity_type is not None:
request.entity_type = entity_type
if language_code is not None:
request.language_code = language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entity_type,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def update_entity_type(self,
request: gcd_entity_type.UpdateEntityTypeRequest = None,
*,
entity_type: gcd_entity_type.EntityType = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> gcd_entity_type.EntityType:
r"""Updates the specified entity type.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.UpdateEntityTypeRequest`):
The request object. The request message for
[EntityTypes.UpdateEntityType][google.cloud.dialogflow.v2.EntityTypes.UpdateEntityType].
entity_type (:class:`google.cloud.dialogflow_v2.types.EntityType`):
Required. The entity type to update.
This corresponds to the ``entity_type`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.dialogflow_v2.types.EntityType:
Each intent parameter has a type, called the entity type, which dictates
exactly how data from an end-user expression is
extracted.
Dialogflow provides predefined system entities that
can match many common types of data. For example,
there are system entities for matching dates, times,
colors, email addresses, and so on. You can also
create your own custom entities for matching custom
data. For example, you could define a vegetable
entity that can match the types of vegetables
available for purchase with a grocery store agent.
For more information, see the [Entity
guide](\ https://cloud.google.com/dialogflow/docs/entities-overview).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([entity_type, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = gcd_entity_type.UpdateEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if entity_type is not None:
request.entity_type = entity_type
if language_code is not None:
request.language_code = language_code
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entity_type,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("entity_type.name", request.entity_type.name),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
async def delete_entity_type(self,
request: entity_type.DeleteEntityTypeRequest = None,
*,
name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified entity type.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.DeleteEntityTypeRequest`):
The request object. The request message for
[EntityTypes.DeleteEntityType][google.cloud.dialogflow.v2.EntityTypes.DeleteEntityType].
name (:class:`str`):
Required. The name of the entity type to delete. Format:
``projects/<Project ID>/agent/entityTypes/<EntityType ID>``.
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([name])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.DeleteEntityTypeRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if name is not None:
request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_entity_type,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("name", request.name),
)),
)
# Send the request.
await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
async def batch_update_entity_types(self,
request: entity_type.BatchUpdateEntityTypesRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates/Creates multiple entity types in the specified agent.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.BatchUpdateEntityTypesRequest`):
The request object. The request message for
[EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.dialogflow_v2.types.BatchUpdateEntityTypesResponse`
The response message for
[EntityTypes.BatchUpdateEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntityTypes].
"""
# Create or coerce a protobuf request object.
request = entity_type.BatchUpdateEntityTypesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_update_entity_types,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
entity_type.BatchUpdateEntityTypesResponse,
metadata_type=struct_pb2.Struct,
)
# Done; return the response.
return response
async def batch_delete_entity_types(self,
request: entity_type.BatchDeleteEntityTypesRequest = None,
*,
parent: str = None,
entity_type_names: Sequence[str] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes entity types in the specified agent.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.BatchDeleteEntityTypesRequest`):
The request object. The request message for
[EntityTypes.BatchDeleteEntityTypes][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntityTypes].
parent (:class:`str`):
Required. The name of the agent to delete all entities
types for. Format: ``projects/<Project ID>/agent``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_type_names (:class:`Sequence[str]`):
Required. The names entity types to delete. All names
must point to the same agent as ``parent``.
This corresponds to the ``entity_type_names`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_type_names])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.BatchDeleteEntityTypesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if entity_type_names:
request.entity_type_names.extend(entity_type_names)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_delete_entity_types,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct,
)
# Done; return the response.
return response
async def batch_create_entities(self,
request: entity_type.BatchCreateEntitiesRequest = None,
*,
parent: str = None,
entities: Sequence[entity_type.EntityType.Entity] = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Creates multiple new entities in the specified entity type.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.BatchCreateEntitiesRequest`):
The request object. The request message for
[EntityTypes.BatchCreateEntities][google.cloud.dialogflow.v2.EntityTypes.BatchCreateEntities].
parent (:class:`str`):
Required. The name of the entity type to create entities
in. Format:
``projects/<Project ID>/agent/entityTypes/<Entity Type ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entities (:class:`Sequence[google.cloud.dialogflow_v2.types.EntityType.Entity]`):
Required. The entities to create.
This corresponds to the ``entities`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entities, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.BatchCreateEntitiesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if language_code is not None:
request.language_code = language_code
if entities:
request.entities.extend(entities)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_create_entities,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct,
)
# Done; return the response.
return response
async def batch_update_entities(self,
request: entity_type.BatchUpdateEntitiesRequest = None,
*,
parent: str = None,
entities: Sequence[entity_type.EntityType.Entity] = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Updates or creates multiple entities in the specified entity
type. This method does not affect entities in the entity type
that aren't explicitly specified in the request.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.BatchUpdateEntitiesRequest`):
The request object. The request message for
[EntityTypes.BatchUpdateEntities][google.cloud.dialogflow.v2.EntityTypes.BatchUpdateEntities].
parent (:class:`str`):
Required. The name of the entity type to update or
create entities in. Format:
``projects/<Project ID>/agent/entityTypes/<Entity Type ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entities (:class:`Sequence[google.cloud.dialogflow_v2.types.EntityType.Entity]`):
Required. The entities to update or
create.
This corresponds to the ``entities`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entities, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.BatchUpdateEntitiesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if language_code is not None:
request.language_code = language_code
if entities:
request.entities.extend(entities)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_update_entities,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct,
)
# Done; return the response.
return response
async def batch_delete_entities(self,
request: entity_type.BatchDeleteEntitiesRequest = None,
*,
parent: str = None,
entity_values: Sequence[str] = None,
language_code: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Deletes entities in the specified entity type.
Note: You should always train an agent prior to sending it
queries. See the `training
documentation <https://cloud.google.com/dialogflow/es/docs/training>`__.
Args:
request (:class:`google.cloud.dialogflow_v2.types.BatchDeleteEntitiesRequest`):
The request object. The request message for
[EntityTypes.BatchDeleteEntities][google.cloud.dialogflow.v2.EntityTypes.BatchDeleteEntities].
parent (:class:`str`):
Required. The name of the entity type to delete entries
for. Format:
``projects/<Project ID>/agent/entityTypes/<Entity Type ID>``.
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
entity_values (:class:`Sequence[str]`):
Required. The reference ``values`` of the entities to
delete. Note that these are not fully-qualified names,
i.e. they don't start with ``projects/<Project ID>``.
This corresponds to the ``entity_values`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
language_code (:class:`str`):
Optional. The language used to access language-specific
data. If not specified, the agent's default language is
used. For more information, see `Multilingual intent and
entity
data <https://cloud.google.com/dialogflow/docs/agents-multilingual#intent-entity>`__.
This corresponds to the ``language_code`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to
use it as the request or the response type of an API
method. For instance:
service Foo {
rpc Bar(google.protobuf.Empty) returns
(google.protobuf.Empty);
}
The JSON representation for Empty is empty JSON
object {}.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, entity_values, language_code])
if request is not None and has_flattened_params:
raise ValueError("If the `request` argument is set, then none of "
"the individual field arguments should be set.")
request = entity_type.BatchDeleteEntitiesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if language_code is not None:
request.language_code = language_code
if entity_values:
request.entity_values.extend(entity_values)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_delete_entities,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
("parent", request.parent),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
empty_pb2.Empty,
metadata_type=struct_pb2.Struct,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-dialogflow",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
"EntityTypesAsyncClient",
)
|
[
"bazel-bot-development[bot]@users.noreply.github.com"
] |
bazel-bot-development[bot]@users.noreply.github.com
|
45f919c1e8cc79547c2bb908554234fb0c5e53df
|
18af4a04e10ebc60d87269b95a5260af53222e4b
|
/Rest-API-master/sparks_crud/urls.py
|
2270cf0187e2e978f7f4d1666dafc96ef41dc850
|
[
"MIT"
] |
permissive
|
shantanu9999/geo-student
|
4b522dc9c450e0d297dbe38c784cea5c720fa719
|
5328e23516a4fc3aa776a78956dcb89140da7a8e
|
refs/heads/master
| 2022-12-15T01:55:49.187949
| 2020-09-10T11:58:56
| 2020-09-10T11:58:56
| 294,393,179
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 802
|
py
|
"""sparks_crud URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('blog.urls')),
]
|
[
"33569669+shantanu9999@users.noreply.github.com"
] |
33569669+shantanu9999@users.noreply.github.com
|
1e34f027687a6904c102edd621ee52069a4e3345
|
344fb29521745762704fc9d24731068ec8b5fdc6
|
/DatabaseRepo/DatabaseAssignment.py
|
e3cb781366dc284ee3451ff6652e12f9d6c866c5
|
[] |
no_license
|
AliceHincu/FP-Assignment06-10
|
bc39105f38c9e2f4a78cb31fae2a84b40a8a72ea
|
256664327de10fcade5b5ab917747e966c8457d8
|
refs/heads/master
| 2023-07-30T05:57:12.294946
| 2021-09-19T17:22:51
| 2021-09-19T17:22:51
| 408,190,792
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,430
|
py
|
import sqlite3
from sqlite3 import Error
from domain.Assignment import Assignment
from repository.assignment_repo import AssignmentRepo
class AssignmentSqlRepo(AssignmentRepo):
def __init__(self, database_name):
super().__init__()
self._database = database_name
self._connection = self.create_connection(database_name)
self.main()
self._load_database()
def create_connection(self, datab):
""" create a database connection to the SQLite database
specified by db_file
:return: Connection object or None
"""
conn = None
try:
conn = sqlite3.connect(datab)
except Error as e:
print(e)
return conn
def create_table(self, create_table_sql):
""" create a table from the create_table_sql statement
:param create_table_sql: a CREATE TABLE statement
:return:
"""
try:
c = self._connection.cursor()
c.execute(create_table_sql)
except Error as e:
print(e)
def main(self):
sql_create_projects_table = """ CREATE TABLE IF NOT EXISTS assignments (
id text PRIMARY KEY,
description text NOT NULL,
day_deadline text NOT NULL,
month_deadline text NOT NULL
); """
# create tables
if self._connection is not None:
# create projects table
self.create_table(sql_create_projects_table)
else:
print("Error! cannot create the database connection.")
def store(self, obj):
"""
:param obj: type: class <Student>
:return:
"""
super().add(obj)
obj = (obj.id, obj.description, obj.deadline[0], obj.deadline[1])
sql = '''INSERT OR REPLACE INTO assignments
(id, description, day_deadline, month_deadline) VALUES (?, ?, ?, ?);'''
current = self._connection.cursor()
current.execute(sql, obj)
self._connection.commit()
def update_assignment(self, id_old, id_new, descr, day, month):
"""
update
:return:
"""
super().update(id_old, id_new, descr, day, month)
obj = (id_new, descr, day, month, id_old)
sql = ''' UPDATE assignments
SET id = ? ,
description = ? ,
day_deadline = ? ,
month_deadline = ?
WHERE id = ?'''
current = self._connection.cursor()
current.execute(sql, obj)
self._connection.commit()
def delete(self, obj_id):
super().remove(obj_id)
sql = 'DELETE FROM assignments WHERE id = ?'
current = self._connection.cursor()
current.execute(sql, (obj_id,))
self._connection.commit()
def _load_database(self):
current = self._connection.cursor()
current.execute("SELECT * FROM assignments")
rows = current.fetchall()
for row in rows:
self._assignments.append(Assignment(row[0], row[1], [row[2], row[3]]))
if __name__ == '__main__':
database = r"C:\\sqlite\\db\\assignment_store.db"
repo = AssignmentSqlRepo(database)
#create_connection(database)
|
[
"53339016+913AliceHincu@users.noreply.github.com"
] |
53339016+913AliceHincu@users.noreply.github.com
|
a1861a296ca7e18981d23a4ef93d1ca6da824112
|
b7a80424e870d79f3be5761b06ebe14c73d1b0c4
|
/arena.py
|
1054be472c65b3516770847b695927611fa46b27
|
[] |
no_license
|
MackRoe/Superhero-Team-Dueler_Term2
|
a1bd7d9fed25d1b4f86e9cfed250b3b74058e164
|
41fc0829ed919c244d5dbcf9b9d1c7c79c9cb2d9
|
refs/heads/master
| 2020-09-13T13:45:03.504159
| 2019-12-09T22:00:44
| 2019-12-09T22:00:44
| 222,803,528
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,039
|
py
|
from hero import Hero
from ability import Ability
from armor import Armor
from weapon import Weapon
from team import Team
class Arena:
def __init__(self):
'''Instantiate properties
team_one: None
team_two: None
'''
self.team_one = []
self.team_two = []
self.team_size = 0
self.team_one_size = 0
self.team_two_size = 0
def create_ability(self):
'''Prompt for Ability information.
return Ability with values from user Input
'''
name = input("What is the ability name? ")
max_damage = input(
"What is the max damage of the ability? ")
return Ability(name, max_damage)
def create_weapon(self):
'''Prompt user for Weapon information
return Weapon with values from user input.
'''
name = input("What is the weapon? ")
max_damage = input(
"What is the max damage of the weapon? ")
return Weapon(name, max_damage)
def create_armor(self):
'''Prompt user for Armor information
return Armor with values from user input.
'''
# TODO:This method will allow a user to create a piece of armor.
# Prompt the user for the necessary information to create a new
# armor object.
# return the new armor object with values set by user.
name = input("What is the name of the armor? ")
max_damage = input(
"What is the max damage of the armor? ")
return Armor(name, max_damage)
def create_hero(self):
'''Prompt user for Hero information
return Hero with values from user input.
'''
hero_name = input("Hero's name: ")
hero = Hero(hero_name)
add_item = None
while add_item != "4":
print("[1] Add ability\n[2] Add weapon\n[3] Add armor")
add_item = input("[4] Done adding items\n\nYour choice: ")
if add_item == "1":
# add an ability to the hero
added_ability = self.create_ability()
hero.add_ability(added_ability)
# print(f"_ ability added")
elif add_item == "2":
# add a weapon to the hero
added_weapon = self.create_weapon()
hero.add_weapon(added_weapon)
elif add_item == "3":
# add an armor to the hero
added_armor = self.create_armor()
hero.add_armor(added_armor)
return hero
def build_teams(self):
'''helper function for build_team_one and build_team_two'''
# 1) Prompt the user for the name of the team (√)
team_name = input("Name your team: ")
# 2) Prompt the user for the number of Heroes on the team
self.team_size = int(input("How many members are in your team? "))
# 3) Instantiate a new Team object,
# using the team name obtained from user input
built_team = Team(team_name) # changed from built_team = []
# 4) use a loop to call self.create_hero() for the number
# of heroes the user specified the team should have,
# and then add the heroes to the team.
count = 0
while self.team_size > count:
team_member = self.create_hero()
count += 1
built_team.add_hero(team_member)
return built_team, self.team_size
def build_team_one(self):
self.team_one, self.team_one_size = self.build_teams()
return self.team_one, self.team_one_size
def build_team_two(self):
self.team_two, self.team_two_size = self.build_teams()
return self.team_two, self.team_two_size
def team_battle(self):
'''Battle team_one and team_two together.'''
# TODO: This method should battle the teams together.
# Call the attack method that exists in your team objects
# for that battle functionality.
self.team_one.attack(self.team_two)
def show_stats(self):
'''Prints team statistics to terminal.'''
# TODO: This method should print out battle statistics
# including each team's average kill/death ratio.
# Required Stats:
# Show surviving heroes.
# Declare winning team
# Show both teams average kill/death ratio.
# Some help on how to achieve these tasks:
# TODO: for each team, loop through all of their heroes,
# and use the is_alive() method to check for alive heroes,
# printing their names and increasing the count if they're alive.
#
# TODO: based off of your count of alive heroes,
# you can see which team has more alive heroes, and therefore,
# declare which team is the winning team
#
# TODO for each team, calculate the total kills and deaths for each
# hero, find the average kills and deaths by dividing the totals by the
# number of heroes.
# finally, divide the average number of kills by the average number of
# deaths for each team
# show winning team
team_one_points = 0
team_two_points = 0
for hero in self.team_one.heroes:
if hero.is_alive():
team_one_points += 1
print(hero.name + " survived")
else:
print(hero.name + " was vanquished")
for hero in self.team_two.heroes:
if hero.is_alive():
team_two_points += 1
print(hero.name + " survived")
else:
print(hero.name + " was vanquished ")
if team_one_points > team_two_points:
print(self.team_one.name + " Wins")
else:
print(self.team_two.name + " Wins")
# calculate average kills for each team, based on number of kills made
# by each team member
for hero in self.team_one.heroes:
total_kills = 0
total_kills += hero.kills
avg_kills = total_kills // self.team_one_size
print(self.team_one.name + " avg kills: " + str(avg_kills))
for hero in self.team_two.heroes:
total_kills = 0
total_kills += hero.kills
avg_kills = total_kills // self.team_two_size
print(self.team_two.name + " avg kills: " + str(avg_kills))
# calculate average deaths and show kill/death ratio
for hero in self.team_one.heroes:
total_deaths = 0
total_deaths += hero.deaths
avg_deaths = total_deaths // self.team_one_size
print("Team 1 kill/death ratio: ")
print(str(avg_kills) + "/" + str(avg_deaths))
for hero in self.team_two.heroes:
total_deaths = 0
total_deaths += hero.deaths
avg_deaths = total_deaths // self.team_two_size
print("Team 2 kill/death ratio: ")
print(str(avg_kills) + "/" + str(avg_deaths))
|
[
"elaine.music@students.makeschool.com"
] |
elaine.music@students.makeschool.com
|
fd7f5b88078d711bac7bdf0f27f98fde59b8178f
|
0eddf1bd369bb2120607a0e8f0ca292a9f306005
|
/tanky/tank_survival.py
|
5cf9fcb19454406ddea1fc045c49eb5f6361c255
|
[] |
no_license
|
alkareth/gitreries
|
a0347c09590201fbe51f60611191e390a7a793f3
|
e3d6a9ac55d50113f0425f30cdaff590dc2c36cf
|
refs/heads/master
| 2020-04-28T04:59:19.683899
| 2014-09-22T21:03:42
| 2014-09-22T21:03:42
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,110
|
py
|
#!/usr/bin/python
# -*-coding:Utf-8 -*
# Copyright (c) 2013 NOEL-BARON Léo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier principal du jeu, contenant la fonction principale (mainloop)
et son appel. A lancer pour démarrer le jeu.
"""
import sys
import pygame
from pygame.locals import *
from fonctions import *
from classes_jeu import Tank, Obus, Explosion
from classes_deco import MenuPrincipal
def jeu():
# Initialisation
pygame.init()
fenetre = pygame.display.set_mode((LARGEUR, HAUTEUR))
icon = pygame.transform.scale(charger_img("tankb1.png"), (32, 32))
pygame.display.set_icon(icon)
pygame.display.set_caption("Tank Survival")
pygame.mouse.set_cursor(*pygame.cursors.tri_left)
bg = pygame.Surface(fenetre.get_size())
bg.fill((255, 255, 255))
fenetre.blit(bg, (0, 0))
pygame.display.flip()
horloge = pygame.time.Clock()
menu = None
# menu = MenuPrincipal(fenetre) # menu d'entrée en jeu
# Création et configuration des groupes
tout = pygame.sprite.RenderUpdates()
tanks = pygame.sprite.Group()
obus = pygame.sprite.Group()
Tank.containers = tout, tanks
Obus.containers = tout, obus
Explosion.containers = tout
# Création du tank
tank1 = Tank(charger_imgs("tankb1.png", "tankb2.png"))
tank1.rect.bottomleft = fenetre.get_rect().bottomleft
tank2 = Tank(charger_imgs("tankr1.png", "tankr2.png"))
tank2.rect.topright = fenetre.get_rect().topright
tank2.direction = 0
tank2.image = pygame.transform.rotate(tank2.image, 180)
continuer = True
while continuer:
if menu:
continuer = menu.traiter() # AAAAAAAAAARGH ! ou pas ?
continue
for e in pygame.event.get():
if e.type == QUIT:
continuer = False
if e.type == KEYDOWN:
if e.key == K_RCTRL and not tank1.recharge:
Obus(tank1)
tank1.recharge = 50
elif e.key == K_SPACE and not tank2.recharge:
Obus(tank2)
tank2.recharge = 50
# Nettoyage de la fenêtre et mise à jour des objets de jeu
tout.clear(fenetre, bg)
tout.update()
# Collisions
print(ratio_collision(tank1.direction))
tank1_col = pygame.sprite.spritecollide(tank1, obus, False,
collided=pygame.sprite.collide_rect_ratio(
ratio_collision(tank1.direction)))
tank2_col = pygame.sprite.spritecollide(tank2, obus, False,
collided=pygame.sprite.collide_rect_ratio(
ratio_collision(tank2.direction)))
tank_col = [(tank1, tank1_col), (tank2, tank2_col)]
for tank, l_obus in tank_col:
tank.sante -= len([o for o in l_obus if o.tank is not tank])
for o in l_obus:
if o.tank is not tank:
Explosion(o)
o.kill()
if pygame.sprite.spritecollide(tank1, pygame.sprite.GroupSingle(tank2),
False, collided=pygame.sprite.collide_circle_ratio(0.7)):
# Ici on devrait calculer l'angle de la collision et réagir en
# conséquence, mais bon...
# On pourrait plus simplement séparer le traitement en plages
# d'angles 0 - Pi/4 - 3Pi/4 - Pi.
tank1.direction -= 180
tank2.direction -= 180
# Fin de jeu
if tank1.sante == 0:
print("Joueur 2 remporte la victoire !")
tank1.kill()
continuer = False
if tank2.sante == 0:
print("Joueur 1 est le meilleur !")
tank2.kill()
continuer = False
# Mouvements des chars
clavier = pygame.key.get_pressed()
if clavier[K_UP]:
tank1.bouger()
if clavier[K_DOWN]:
tank1.bouger(arriere=True)
sens = clavier[K_LEFT] - clavier[K_RIGHT]
if sens != 0 and (clavier[K_UP] or clavier[K_DOWN]):
tank1.tourner(sens)
if clavier[K_w]:
tank2.bouger()
if clavier[K_s]:
tank2.bouger(arriere=True)
sens = clavier[K_a] - clavier[K_d]
if sens != 0 and (clavier[K_w] or clavier[K_s]):
tank2.tourner(sens)
tout.draw(fenetre)
pygame.display.flip()
horloge.tick(40)
if __name__ == "__main__":
jeu()
pygame.quit()
sys.exit(0)
|
[
"leo.noel-baron@ens-rennes.fr"
] |
leo.noel-baron@ens-rennes.fr
|
37cb554a74802779f0c4c92af616b37f2c942da0
|
7dfef2c31feb94075879ad93532706c214829618
|
/main.py
|
78b03b2de711bb7a159a25a0279ed3dfe7a73861
|
[] |
no_license
|
Badruzaman/HousePricePrediction
|
78ad72157dbe045416b25e2e078ec2ff0365e076
|
a250bb4e0ff5107380740a4f15da7060333dd1ef
|
refs/heads/master
| 2023-04-14T10:04:24.784211
| 2021-04-10T05:46:22
| 2021-04-10T05:46:22
| 354,268,810
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 861
|
py
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.datasets import load_boston
USAhousing = pd.read_csv('USA_Housing.csv')
X = USAhousing[['Avg. Area Income', 'Avg. Area House Age', 'Avg. Area Number of Rooms',
'Avg. Area Number of Bedrooms', 'Area Population']]
y = USAhousing['Price']
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, random_state=101)
lm = LinearRegression()
lm.fit(X_train,y_train)
print(lm.intercept_)
coeff_df = pd.DataFrame(lm.coef_,X.columns,columns=['Coefficient'])
print(coeff_df)
#boston = load_boston()
#print(boston.DESCR)
#boston_df = boston.data
predictions = lm.predict(X_test)
print(predictions)
plt.scatter(y_test,predictions)
|
[
"badru.cse@gmail.com"
] |
badru.cse@gmail.com
|
5886a4377415980cdb4a50842f1be1637e4b2dc2
|
0c94acc3b18504d43072b1331d638ee97b80b7be
|
/cite/objects/account.py
|
ec3392565bd653ae4d0be04abb9624018848ce84
|
[] |
no_license
|
fairay/CourseDataBase
|
b0b742e6a348b169216fed38efc1471c08c784ce
|
d005e681d981d09567dd8ee7774808bba3120d04
|
refs/heads/main
| 2023-05-27T11:45:30.809962
| 2021-06-10T14:55:00
| 2021-06-10T14:55:00
| 336,570,129
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,444
|
py
|
from .base_object import *
class Account(BaseObj):
_login = None
_salt = None
_hashed_password = None
_pers_type = None
def __init__(self, **init_dict):
super(Account, self).__init__()
if init_dict is None:
return
self._login = init_dict['login']
self._pers_type = init_dict['perstype']
self._salt = init_dict['salt']
self._hashed_password = init_dict['hashedpassword']
# def __str__(self):
# return 'Login:%30s\tType:%8s\tSalt + Password:%20s %20s' % \
# (self._login, self._pers_type, self._salt, self._hashed_password)
def to_dict(self) -> dict:
return {'login': self._login, 'perstype': self._pers_type,
'salt': self._salt, 'hashedpassword': self._hashed_password}
def get_login(self): return self._login
def get_salt(self): return self._salt
def get_hashed_password(self): return self._hashed_password
def get_pers_type(self): return self._pers_type
def set_login(self, val): self._login = val
def set_salt(self, val): self._salt = val
def set_hashed_password(self, val): self._hashed_password = val
def set_pers_type(self, val): self._pers_type = val
login = property(get_login, set_login)
salt = property(get_salt, set_salt)
hashed_password = property(get_hashed_password, set_hashed_password)
pers_type = property(get_pers_type, set_pers_type)
|
[
"48217178+fairay@users.noreply.github.com"
] |
48217178+fairay@users.noreply.github.com
|
99a1d8cfb3c20886f05f8dc1081afa36a59830e4
|
7f82582c13369fcfc1920f21006dde9a96580147
|
/P2/src/p2_pathfinder.py
|
1b3ad63ff764d9cf4de24ef1ad450f88cf8dc0a4
|
[] |
no_license
|
nnakano55/cmps146
|
868769a3055b571fc52741b002edf00de77b94ef
|
23fc1075129e9199c037c63e1dfb0c294c23373b
|
refs/heads/master
| 2020-03-30T10:54:34.935750
| 2018-11-03T00:59:25
| 2018-11-03T00:59:25
| 151,142,762
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,106
|
py
|
"""
P2 Navmensh Pathfinding
p2_pathfinder.py
Programmers:
Xuya Gao
Noriaki Nakano
"""
from heapq import heappop, heappush
def find_path (source_point, destination_point, mesh):
"""
Searches for a path from source_point to destination_point through the mesh
Args:
source_point: starting point of the pathfinder
destination_point: the ultimate goal the pathfinder must reach
mesh: pathway constraints the path adheres to
Returns:
A path (list of points) from source_point to destination_point if exists
A list of boxes explored by the algorithm
"""
# the path and boxes to be returned
path = []
boxes = {}
# check is source and destination point is valid
indicateD = 0
indicateS = 0
# The dictionary that will be returned with the costs
distances = {}
distances_BACK = {}
# The dictonary that will store the boxes
backboxes = {}
backboxes_BACK = {}
# the queue for the bidirectional A* search
queue=[]
queue_BACK = []
# The dictionary that will store the backpointers
backpointers = {}
backpointers_BACK = {}
for i in mesh["boxes"]:
if i[0]<=source_point[0] and source_point[0]<=i[1]:
if i[2]<=source_point[1] and source_point[1]<=i[3]:
# The priority queue
heappush(queue,(0, destination_point, i))
distances[i] = 0
backboxes[i] = None
backpointers[i] = (source_point)
indicateS = 1
if i[0]<=destination_point[0] and destination_point[0]<=i[1]:
if i[2]<=destination_point[1] and destination_point[1]<=i[3]:
heappush(queue,(0, source_point, i))
distances_BACK[i] = 0
backboxes_BACK[i] = None
backpointers_BACK[i] = (destination_point)
indicateD = 1
if indicateD == 0 or indicateS == 0:
print("EXCEPTION! Source or Destination ERROR!")
return path,boxes.keys()
while queue:
current_dist_P, goal, current_boxes = heappop(queue)
# Check if current node is the destination_point
if current_boxes in backboxes_BACK.keys() and goal == destination_point:
tupMeet_S = (backpointers[current_boxes], backpointers_BACK[current_boxes])
path.append(tupMeet_S)
boxes[current_boxes] = 0
# Go backwards from destination_point until the source using backpointers
# and add all the nodes in the shortest path into a list
current_back_boxes = current_boxes
while backboxes[current_back_boxes] is not None:
boxes[backboxes[current_back_boxes]] = 0
tup1 = ((backpointers[current_back_boxes], backpointers[backboxes[current_back_boxes]]))
path.append(tup1)
current_back_boxes = backboxes[current_back_boxes]
current_back_boxes_Back = current_boxes
while backboxes_BACK[current_back_boxes_Back] is not None:
boxes[backboxes_BACK[current_back_boxes_Back]] = 0
tup2 = ((backpointers_BACK[current_back_boxes_Back], backpointers_BACK[backboxes_BACK[current_back_boxes_Back]] ))
path.append(tup2)
current_back_boxes_Back = backboxes_BACK[current_back_boxes_Back]
return path, boxes.keys()
if current_boxes in backboxes.keys()and goal == source_point:
tupMeet_D = (backpointers[current_boxes], backpointers_BACK[current_boxes])
path.append(tupMeet_D)
boxes[current_boxes] = 0
# Go backwards from destination_point until the source using backpointers
# and add all the nodes in the shortest path into a list
current_back_boxes = current_boxes
while backboxes[current_back_boxes] is not None:
boxes[backboxes[current_back_boxes]] = 0
tup1 = ((backpointers[current_back_boxes], backpointers[backboxes[current_back_boxes]]))
path.append(tup1)
current_back_boxes = backboxes[current_back_boxes]
current_back_boxes_Back = current_boxes
while backboxes_BACK[current_back_boxes_Back] is not None:
boxes[backboxes_BACK[current_back_boxes_Back]] = 0
tup2 = ((backpointers_BACK[current_back_boxes_Back], backpointers_BACK[backboxes_BACK[current_back_boxes_Back]] ))
path.append(tup2)
current_back_boxes_Back = backboxes_BACK[current_back_boxes_Back]
return path, boxes.keys()
# Calculate cost from current note to all the adjacent ones
if goal == destination_point:
for adj_node_boxes in mesh["adj"][current_boxes]:
x1 = adj_node_boxes[2]
x2 = adj_node_boxes[3]
y1 = adj_node_boxes[0]
y2 = adj_node_boxes[1]
if y1 == current_boxes[1]:
if(x1 < current_boxes[2]):
x1 = current_boxes[2]
if(x2 > current_boxes[3]):
x2 = current_boxes[3]
adj_node_x = max(x1,min(x2,backpointers[current_boxes][1]))
adj_node = (y1, adj_node_x)
elif y2 == current_boxes[0]:
if(x1 < current_boxes[2]):
x1 = current_boxes[2]
if(x2 > current_boxes[3]):
x2 = current_boxes[3]
adj_node_x = max(x1,min(x2,backpointers[current_boxes][1]))
adj_node = (y2, adj_node_x)
elif x1 == current_boxes[3]:
if (y1 < current_boxes[0]):
y1 = current_boxes[0]
if (y2 > current_boxes[1]):
y2 = current_boxes[1]
adj_node_y = max(y1,min(y2,backpointers[current_boxes][0]))
adj_node = (adj_node_y, x1)
elif x2 == current_boxes[2]:
if (y1 < current_boxes[0]):
y1 = current_boxes[0]
if (y2 > current_boxes[1]):
y2 = current_boxes[1]
adj_node_y = max(y1,min(y2,backpointers[current_boxes][0]))
adj_node = (adj_node_y, x2)
estCost = abs(adj_node[0]-destination_point[0])+abs(adj_node[1]-destination_point[1])
pathcost = (pow((adj_node[0]-backpointers[current_boxes][0]),2)+pow((adj_node[1]-backpointers[current_boxes][1]),2))**0.5+distances[current_boxes]
# If the cost is new
if adj_node_boxes not in distances.keys() or pathcost < distances[adj_node_boxes]:
distances[adj_node_boxes] = pathcost
backpointers[adj_node_boxes] = adj_node
backboxes[adj_node_boxes] = current_boxes
heappush(queue, (estCost+pathcost, goal, adj_node_boxes))
if goal == source_point:
for adj_node_boxes_BACK in mesh["adj"][current_boxes]:
x1_BACK = adj_node_boxes_BACK[2]
x2_BACK = adj_node_boxes_BACK[3]
y1_BACK = adj_node_boxes_BACK[0]
y2_BACK = adj_node_boxes_BACK[1]
if y1_BACK == current_boxes[1]:
if(x1_BACK < current_boxes[2]):
x1_BACK = current_boxes[2]
if(x2_BACK > current_boxes[3]):
x2_BACK = current_boxes[3]
adj_node_x_BACK = max(x1_BACK,min(x2_BACK,backpointers_BACK[current_boxes][1]))
adj_node_BACK = (y1_BACK, adj_node_x_BACK)
elif y2_BACK == current_boxes[0]:
if(x1_BACK < current_boxes[2]):
x1_BACK = current_boxes[2]
if(x2_BACK > current_boxes[3]):
x2_BACK = current_boxes[3]
adj_node_x_BACK = max(x1_BACK,min(x2_BACK,backpointers_BACK[current_boxes][1]))
adj_node_BACK = (y2_BACK, adj_node_x_BACK)
elif x1_BACK == current_boxes[3]:
if (y1_BACK < current_boxes[0]):
y1_BACK = current_boxes[0]
if (y2_BACK > current_boxes[1]):
y2_BACK = current_boxes[1]
adj_node_y_BACK = max(y1_BACK,min(y2_BACK,backpointers_BACK[current_boxes][0]))
adj_node_BACK = (adj_node_y_BACK, x1_BACK)
elif x2_BACK == current_boxes[2]:
if (y1_BACK < current_boxes[0]):
y1_BACK = current_boxes[0]
if (y2_BACK > current_boxes[1]):
y2_BACK = current_boxes[1]
adj_node_y_BACK = max(y1_BACK,min(y2_BACK,backpointers_BACK[current_boxes][0]))
adj_node_BACK = (adj_node_y_BACK, x2_BACK)
estCost_BACK = abs(adj_node_BACK[0]-source_point[0])+abs(adj_node_BACK[1]-source_point[1])
pathcost_BACK = (pow((adj_node_BACK[0]-backpointers_BACK[current_boxes][0]),2)+pow((adj_node_BACK[1]-backpointers_BACK[current_boxes][1]),2))**0.5+distances_BACK[current_boxes]
# If the cost is new
if adj_node_boxes_BACK not in distances_BACK.keys() or pathcost_BACK < distances_BACK[adj_node_boxes_BACK]:
distances_BACK[adj_node_boxes_BACK] = pathcost_BACK
backpointers_BACK[adj_node_boxes_BACK] = adj_node_BACK
backboxes_BACK[adj_node_boxes_BACK] = current_boxes
heappush(queue, (estCost_BACK+pathcost_BACK, goal, adj_node_boxes_BACK))
print("Exception! No Path Exist!")
return path, boxes.keys()
|
[
"nnakano@ucsc.edu"
] |
nnakano@ucsc.edu
|
a2cfd5e483d4082c18fb3e4fd15d7a66f8e7946c
|
292c8f912492e97ecb852437bba4e7294833f514
|
/Figures/fig_scatterplot_task_score_egoallo.py
|
e9a37c438e6457eedfb5b61243ab8b3920f48715
|
[] |
no_license
|
CornuA1/lntmodel_final
|
e47971ce3d36bd6ef8a0b1f125c706663752c307
|
1dd732ae86c1f8680fbf0f6d8e1c0ec3a7fd22cb
|
refs/heads/main
| 2023-07-11T13:23:09.607029
| 2021-08-13T16:15:00
| 2021-08-13T16:15:00
| 395,445,252
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,410
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 21 13:36:14 2021
@author: lukasfischer
"""
import csv, os, yaml, warnings
import numpy as np
import scipy as sp
from scipy.io import loadmat
from scipy import stats
import matplotlib.pyplot as plt
plt.rcParams['svg.fonttype'] = 'none'
plt.rcParams['xtick.bottom'] = True
plt.rcParams['ytick.left'] = True
import seaborn as sns
sns.set_style("white")
warnings.filterwarnings('ignore')
# load yaml file with local filepaths
with open('..' + os.sep + 'loc_settings.yaml', 'r') as f:
loc_info = yaml.load(f)
def make_folder(out_folder):
if not os.path.exists(out_folder):
os.makedirs(out_folder)
fname = "total_analysis"
TRIAL_THRESHOLD = 0
file_path = loc_info["raw_dir"] + "figure_sample_data" + os.sep + fname + ".mat"
data = sp.io.loadmat(file_path)
naive = [('LF191022_1','20191115'),('LF191022_3','20191113'),('LF191023_blue','20191119'),('LF191022_2','20191116'),('LF191023_blank','20191114'),('LF191024_1','20191114')]
# expert = [('LF191022_1','20191209'),('LF191022_3','20191207'),('LF191023_blue','20191208'),('LF191022_2','20191210'),('LF191023_blank','20191210'),('LF191024_1','20191210')]
# expert = [('LF191022_1','20191204'),('LF191022_2','20191210'),('LF191022_3','20191207'),('LF191023_blank','20191206'),('LF191023_blue','20191204'),('LF191024_1','20191204')]
expert = [('LF191022_1','20191209'),('LF191022_2','20191210'),('LF191022_3','20191210'),('LF191023_blank','20191210'),('LF191023_blue','20191210'),('LF191024_1','20191210')]
all_sessions = [('LF191022_1','20191114'),
('LF191022_1','20191115'),
('LF191022_1','20191121'),
('LF191022_1','20191125'),
('LF191022_1','20191204'),
('LF191022_1','20191207'),
('LF191022_1','20191209'),
# ('LF191022_1','20191211'),
# ('LF191022_1','20191213'),
# ('LF191022_1','20191215'),
# ('LF191022_1','20191217'),
('LF191022_2','20191114'),
('LF191022_2','20191116'),
('LF191022_2','20191121'),
('LF191022_2','20191204'),
('LF191022_2','20191206'),
('LF191022_2','20191208'),
('LF191022_2','20191210'),
# ('LF191022_2','20191212'),
# ('LF191022_2','20191216'),
('LF191022_3','20191113'),
('LF191022_3','20191114'),
('LF191022_3','20191119'),
('LF191022_3','20191121'),
('LF191022_3','20191204'),
('LF191022_3','20191207'),
('LF191022_3','20191210'),
# ('LF191022_3','20191211'),
# ('LF191022_3','20191215'),
# ('LF191022_3','20191217'),
('LF191023_blank','20191114'),
('LF191023_blank','20191116'),
('LF191023_blank','20191121'),
('LF191023_blank','20191206'),
('LF191023_blank','20191208'),
('LF191023_blank','20191210'),
# ('LF191023_blank','20191212'),
# ('LF191023_blank','20191213'),
# ('LF191023_blank','20191216'),
# ('LF191023_blank','20191217'),
('LF191023_blue','20191113'),
('LF191023_blue','20191114'),
('LF191023_blue','20191119'),
('LF191023_blue','20191121'),
('LF191023_blue','20191125'),
('LF191023_blue','20191204'),
('LF191023_blue','20191206'),
('LF191023_blue','20191208'),
('LF191023_blue','20191210'),
# ('LF191023_blue','20191212'),
# ('LF191023_blue','20191215'),
# ('LF191023_blue','20191217'),
('LF191024_1','20191114'),
('LF191024_1','20191115'),
('LF191024_1','20191121'),
('LF191024_1','20191204'),
('LF191024_1','20191207'),
('LF191024_1','20191210')
]
print("------ NAIVE --------")
tscore_naive = []
egoallo_naive = []
ntrials_naive = []
for animal,session in naive:
print(animal,session,data[animal + '_' + session])
if data[animal + '_' + session][0][1] > TRIAL_THRESHOLD:
tscore_naive.append(data[animal + '_' + session][0][0])
egoallo_naive.append(data[animal + '_' + session][0][2])
ntrials_naive.append(data[animal + '_' + session][0][1])
print("------ EXPERT --------")
tscore_expert = []
egoallo_expert = []
ntrials_expert = []
for animal,session in expert:
print(animal,session, data[animal + '_' + session])
if data[animal + '_' + session][0][1] > TRIAL_THRESHOLD:
tscore_expert.append(data[animal + '_' + session][0][0])
egoallo_expert.append(data[animal + '_' + session][0][2])
ntrials_expert.append(data[animal + '_' + session][0][1])
print("------ ALL --------")
tscore_all = []
egoallo_all = []
n_trials = []
for animal,session in all_sessions:
print(animal,session, data[animal + '_' + session])
if data[animal + '_' + session][0][1] > TRIAL_THRESHOLD:
tscore_all.append(data[animal + '_' + session][0][0])
egoallo_all.append(data[animal + '_' + session][0][2])
n_trials.append(data[animal + '_' + session][0][1])
fig = plt.figure(figsize=(2.5,20))
(ax,ax2,ax3,ax4) = fig.subplots(4,1)
n_animals_naive = len(tscore_naive)
n_animals_expert = len(tscore_expert)
# ax.scatter(np.zeros((n_animals_naive,1)), egoallo_naive, c='0.5')
# ax.scatter(np.ones((n_animals_expert,1)), egoallo_expert, c='0.5')
_,p_ttest = sp.stats.ttest_rel(egoallo_naive,egoallo_expert)
ax.set_xlim([-0.2,2])
ax.set_ylim([0.4,1])
if n_animals_naive == n_animals_expert:
for i in range(n_animals_naive):
ax.plot([0,1], [egoallo_naive[i], egoallo_expert[i]], c='0.7', marker='o', lw=2)
# ax.plot([0,1], [np.mean(egoallo_naive), np.mean(egoallo_expert)], c='k')
ax.plot([0,1], [np.mean(egoallo_naive), np.mean(egoallo_expert)], marker='s', markersize=10, c='k', lw=3)
ax.set_xlim([-0.2,1.2])
ax.set_ylim([0,1])
ax2.scatter(tscore_naive, egoallo_naive, color='0.7')
ax2.scatter(tscore_expert, egoallo_expert, color='0.7')
ax2.set_ylim([0.4,1])
ax2.set_xticks([-30,0,30,60,90])
ax2.set_xticklabels(['-30','0','30','60','90'])
corr_ne,p_ne = sp.stats.spearmanr(np.hstack((tscore_naive,tscore_expert)), np.hstack((egoallo_naive,egoallo_expert)))
ax2.set_ylim([0,1])
ax2.set_xlim([-45,75])
# fit a linear regression
res = stats.linregress(x=np.concatenate((tscore_naive, tscore_expert)), y=np.concatenate((egoallo_naive, egoallo_expert)))
ax2.plot(np.concatenate((tscore_naive, tscore_expert)), res.intercept + res.slope*np.concatenate((tscore_naive, tscore_expert)), 'r', label='fitted line', lw=2)
ax3.scatter(tscore_all, egoallo_all, c='0.7')
ax3.set_ylim([0.3,1])
corr,p = sp.stats.spearmanr(tscore_all, egoallo_all)
ax3.set_ylim([0.3,1])
ax3.set_xticks([-30,0,30,60,90])
ax3.set_xticklabels(['-30','0','30','60','90'])
ax3.set_ylim([0,1])
ax3.set_xlim([-45,105])
res = stats.linregress(x=tscore_all, y=egoallo_all)
ax3.plot(tscore_all, res.intercept + np.multiply(res.slope,tscore_all), 'r', label='fitted line', lw=2)
ax4.scatter(n_trials, egoallo_all)
corr_nt,p_nt = sp.stats.spearmanr(n_trials, egoallo_all)
sns.despine(top=True, right=True, left=False, bottom=False)
ax.tick_params(left='on',bottom='on',direction='out')
ax2.tick_params(left='on',bottom='on',direction='out')
ax3.tick_params(left='on',bottom='on',direction='out')
print('------ Tscores ------')
print(np.mean(tscore_naive), sp.stats.sem(tscore_naive))
print(np.mean(tscore_expert), sp.stats.sem(tscore_expert))
print("---------------------")
print("------ STATS --------")
print("naive vs. expert: " + str(sp.stats.ttest_ind(egoallo_naive, egoallo_expert)))
print("naive vs. expert Spearman: " + str(sp.stats.spearmanr(egoallo_naive, egoallo_expert)))
print("All Spearman: " + str(sp.stats.spearmanr(tscore_all, egoallo_all)))
print("---------------------")
fig.savefig("C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\manuscript\\Figure 1\\egoallo_scatterplot.svg", format='svg')
print("saved" + "C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\manuscript\\Figure 1\\egoallo_scatterplot.svg" + "_fig.svg")
all_r_naive = np.empty((0,))
mean_r_naive = np.empty((0,))
for na in naive:
r2_data = loadmat("C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\data_2p\\dataset" + os.sep + na[0] + '_' + na[1] + '_r2_data.mat')
all_r_naive = np.hstack((all_r_naive,r2_data['data'][0]))
mean_r_naive = np.hstack((mean_r_naive,np.mean(r2_data['data'])))
all_r_expert = np.empty((0,))
mean_r_expert = np.empty((0,))
for na in expert:
r2_data = loadmat("C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\data_2p\\dataset" + os.sep + na[0] + '_' + na[1] + '_r2_data.mat')
all_r_expert = np.hstack((all_r_expert,r2_data['data'][0]))
mean_r_expert = np.hstack((mean_r_expert,np.mean(r2_data['data'])))
all_r_allsess = np.empty((0,))
mean_r_allsess = np.empty((0,))
for na in all_sessions:
r2_data = loadmat("C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\data_2p\\dataset" + os.sep + na[0] + '_' + na[1] + '_r2_data.mat')
all_r_allsess = np.hstack((all_r_allsess,r2_data['data'][0]))
mean_r_allsess = np.hstack((mean_r_allsess,np.mean(r2_data['data'])))
fig = plt.figure(figsize=(15,10))
ax1 = fig.add_subplot(2,3,1)
ax2 = fig.add_subplot(2,3,4)
ax3 = fig.add_subplot(2,3,2)
ax4 = fig.add_subplot(2,3,5)
ax5 = fig.add_subplot(2,3,3)
ax6 = fig.add_subplot(2,3,6)
ax1.hist(all_r_naive)
ax2.hist(mean_r_naive)
ax1.set_xlabel('naive r2 distribution')
ax3.hist(all_r_expert)
ax4.hist(mean_r_expert)
ax3.set_xlabel('expert r2 distribution')
ax5.hist(all_r_allsess, bins=100)
ax6.hist(mean_r_allsess, color='r')
ax5.set_xlabel('allsess r2 distribution')
ax6.set_ylim([0,14])
sns.despine(ax=ax1, right=True, top=True)
ax1.tick_params(left='on',bottom='on',direction='out')
sns.despine(ax=ax2, right=True, top=True)
ax2.tick_params(left='on',bottom='on',direction='out')
sns.despine(ax=ax3, right=True, top=True)
ax3.tick_params(left='on',bottom='on',direction='out')
sns.despine(ax=ax4, right=True, top=True)
ax4.tick_params(left='on',bottom='on',direction='out')
sns.despine(ax=ax5, right=True, top=True)
ax5.tick_params(left='on',bottom='on',direction='out')
sns.despine(ax=ax6, right=True, top=True)
ax6.tick_params(left='on',bottom='on',direction='out')
plt.tight_layout()
ax6.set_xlabel('Deviance explained')
ax6.set_ylabel('Sesssion count')
make_folder("C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\manuscript\\Figure 1\\")
fname = "C:\\Users\\lfisc\\Work\\Projects\\Lntmodel\\manuscript\\Figure 1\\r2_distributions.svg"
fig.savefig(fname, format='svg')
print("saved " + fname)
|
[
"l.fischer@protonmail.com"
] |
l.fischer@protonmail.com
|
c7e32b7956006589585393f647556ed9c81dfb10
|
7f25740b1ef47edc24db1a3618b399959b073fe1
|
/1105_08_closer.py
|
60b915e8ee1fc7296456e8dbffab48f45dbbce39
|
[] |
no_license
|
pjh9362/PyProject
|
b2d0aa5f8cfbf2abbd16232f2b55859be50446dc
|
076d31e0055999c1f60767a9d60e122fb1fc913e
|
refs/heads/main
| 2023-01-09T12:12:06.913295
| 2020-11-07T15:32:03
| 2020-11-07T15:32:03
| 306,814,117
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 335
|
py
|
'''
x = 10 #전역 변수
def foo():
print(x) #전역 변수 출력
foo()
print(x) #전역 변수 출력
'''
def foo():
x = 10 # foo의 지역 변수
print(x) # foo의 지역 변수 출력
foo()
print(x) # 에러. foo의 지역 변수는 출력할 수 없음
|
[
"pjh9362@gmail.com"
] |
pjh9362@gmail.com
|
c4a90a7640739540ed89ba40691e097d9a60fdc8
|
c16ac731e1f9a940873dd6e5ee397ffec184a07e
|
/DataPull_WorldBank.py
|
5aaa13e410782e11f356ecdfd6ddcbacb7e44e2c
|
[] |
no_license
|
athornbe/UCDPA_AlanThornberry
|
430caf5e2f3a0e6e059f9e510e1be790d997c294
|
f9454ce2df87f712b49fd65a9ca26da3d5646954
|
refs/heads/master
| 2023-09-03T07:59:46.330988
| 2021-11-21T08:25:56
| 2021-11-21T08:25:56
| 428,390,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,249
|
py
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import quandl
# Change matplotlib parameters to ensure column names don't get truncated in the box plot
from matplotlib import rcParams
rcParams.update({'figure.autolayout': True})
#Read data from data.nasdaq.com via an API
# set quandl API key as provided by the web site
quandl.ApiConfig.api_key = "1wrL_b1e1tDUP57ekYP8"
# get the world bank data for Ireland
q_data = quandl.get_table('WB/DATA?country_code=IRL', paginate=True)
# get the meta data for the world bank data above. Provides more infomatin on each parameter
q_meta_data = quandl.get_table('WB/METADATA', paginate=True)
#Just use the required columns from the meta data table
meta_data = q_meta_data[["series_id", "name"]]
#Merge the data with the meta data
merged_data = q_data.merge(meta_data, on="series_id").round(3)
merged_data.to_csv("merged_data.csv")
#filter the data to the years 1990 to 2015 inclusive
#Ireland_1990 = merged_data[np.logical_and(merged_data["year"]>= 1990, merged_data["year"]<= 2015)]
Ireland_1990 = merged_data.query('year >= 1990 & year <= 2015')
#Dropping these columns as they don't add any useful information
Ireland_1990_clean = Ireland_1990.drop(labels=["country_code","country_name"], axis=1, inplace=False)
#The data set contains a large bumber of different metrics.This creates a list of the metrics to keep from the data set
rows_to_keep = ["ST.INT.DPRT", "SP.POP.TOTL", "SP.POP.65UP.TO", "SP.DYN.LE00.IN",
"SL.UEM.TOTL.NE.ZS","SL.TLF.TOTL.IN","SL.SRV.EMPL.ZS",
"SL.GDP.PCAP.EM.KD", "SH.MED.BEDS.ZS", "NY.GNP.PCAP.PP.CD",
"IT.CEL.SETS.P2","GC.TAX.TOTL.CN","EN.ATM.GHGT.KT.CE",
"AG.CON.FERT.ZS"
]
Ireland_1990_final= Ireland_1990_clean[Ireland_1990_clean["series_id"].isin(rows_to_keep)]
#Adding a column with more informative names for each metric
rename_values = {"ST.INT.DPRT":"Number Tourist Departures",
"SP.POP.TOTL":"Total Population",
"SP.POP.65UP.TO":"Total Population Over 65",
"SP.DYN.LE00.IN":"Life Expectancy at Birth",
"SL.UEM.TOTL.NE.ZS":"Unemployment as % of Work Force",
"SL.TLF.TOTL.IN":"Total Workforce",
"SL.SRV.EMPL.ZS":"% Workforce in Services Ind",
"SL.GDP.PCAP.EM.KD":"GDP per person employed",
"SH.MED.BEDS.ZS":"Hospital Beds per 1,000 people",
"NY.GNP.PCAP.PP.CD":"GNI per capita",
"IT.CEL.SETS.P2":"Mobile Subscriptions per 100 people",
"GC.TAX.TOTL.CN":"Total Tax revenue",
"EN.ATM.GHGT.KT.CE":"Greenhouse Gas (kt of CO2 equiv)",
"AG.CON.FERT.ZS" :"Fertilizer consumption (kg/hectare)"
}
Ireland_1990_final["Parameteric"] = Ireland_1990_final["series_id"].map(rename_values)
#Ireland_1990_final.to_csv("Ireland_1990_final.csv")
group_Ireland_Parametric= Ireland_1990_final.groupby("Parameteric")["value"].agg(['min', 'max', 'mean', 'std']).round(3)
Parametric_count= Ireland_1990_final.groupby("Parameteric")["value"].count()
#Pivot the data to make it easier to read and save the table as .csv
pivot_Ireland_EachParametricOverTime= Ireland_1990_final.pivot_table(values="value", index="year", columns="Parameteric", fill_value=0)
#Replace 0 values with Nan so that the don't appear in the graphs
pivot_Ireland_EachParametricOverTime.replace(0,np.nan, inplace=True)
pivot_Ireland_EachParametricOverTime =pd.DataFrame(pivot_Ireland_EachParametricOverTime)
Missing_values_by_parameter=pivot_Ireland_EachParametricOverTime.isna().sum()
print(Missing_values_by_parameter)
#print(pivot_Ireland_EachParametricOverTime.head())
pivot_Ireland_EachParametricOverTime.to_csv("EachParametricOverTime.csv")
Missing_values_by_parameter.to_csv('Missing_values_by_parameter.csv')
#Get the list of unique Parametric names that wil be used ih the for loop
parameters=Ireland_1990_final["Parameteric"].unique()
#set initial values for row and column index
r=0
c=0
#Plot parameters over time (years)
fig1, ax1 = plt.subplots(4, 4,figsize=(15, 10), sharex='col')
for param in parameters:
row_df = Ireland_1990_final[Ireland_1990_final["Parameteric"]== param]
ax1[r,c].plot(row_df['year'],row_df['value'], marker ='o', linestyle = '--')
ax1[r,c].tick_params(axis='x', labelrotation = 45)
ax1[3,c].set_xlabel('Time (years)')
ax1[r,c].set_title(param)
c += 1
if c == 4:
c = 0
r += 1
#set initial values for row and column index
r=0
c=0
#Plot parameters against GNI per capita
fig2, ax2 = plt.subplots(4, 4,figsize=(15, 10), sharex='col')
for param in parameters:
ax2[r, c].scatter(pivot_Ireland_EachParametricOverTime['GNI per capita'],pivot_Ireland_EachParametricOverTime[param], marker ='o', linestyle = '--')
ax2[r, c].tick_params(axis='x', labelrotation = 45)
ax2[3, c].set_xlabel('GNI per Capita')
ax2[r, c].set_title(param)
c += 1
if c == 4:
c = 0
r += 1
#UDF puts two parameters onto the same time seires plot with each getting it's own Y axes.
def twin_plot(x, xlabel, y1,label_y1, y2, label_y2, color_y1, color_y2, linewidth):
fig, ax = plt.subplots()
ax.plot(x,y1, color=color_y1,linewidth=linewidth, marker ='o', markersize=4)
ax.set_xlabel(xlabel)
ax.set_facecolor('#eafff5')
plt.xticks(np.arange(min(x), max(x) + 1, 1.0),rotation=90)
ax.set_ylabel(label_y1, color=color_y1)
ax.tick_params('y', colors=color_y1 )
ax.set_axisbelow(True)
ax.yaxis.grid(color='gray', linestyle='dashed')
ax.vlines(2008, 0, 1, transform=ax.get_xaxis_transform(), colors='black',linewidth = linewidth, linestyles='dashdot', label='2008')
ax2 = ax.twinx()
ax2.plot(x, y2, color=color_y2, linewidth=linewidth,marker ='s', markersize=4)
ax2.set_ylabel(label_y2, color=color_y2)
ax2.tick_params('y', colors=color_y2)
twin_plot(pivot_Ireland_EachParametricOverTime.index,'Time(years)', pivot_Ireland_EachParametricOverTime['GNI per capita'],'GNI per Cap',
pivot_Ireland_EachParametricOverTime['Hospital Beds per 1,000 people'],"Hospital Beds per 1,000 people",'red','blue',1)
#plt.show()
|
[
"alan.thornberry@gmail.com"
] |
alan.thornberry@gmail.com
|
effcba7200670626e61fa5818b4e690320046b98
|
8458686e40bc64d904cc2b399f294493fddc515b
|
/kerasenv/lib/python3.8/site-packages/pip/_internal/build_env.py
|
8a492395b9e7728f1a1200e5ae40661d54fe7165
|
[] |
no_license
|
gtatiana/news-sift-bot
|
e976013a56357a0b7a4e7a16291aa19dd79bb6a5
|
79fb81ddc8a60df4539af0f67b6c838d91c5ea94
|
refs/heads/master
| 2023-04-23T11:29:31.530703
| 2021-03-26T16:26:04
| 2021-05-03T11:09:28
| 360,166,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,215
|
py
|
"""Build Environment used for isolation during sdist building
"""
import logging
import os
import sys
import textwrap
from collections import OrderedDict
from distutils.sysconfig import get_python_lib
from sysconfig import get_paths
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
from pip import __file__ as pip_location
from pip._internal.cli.spinners import open_spinner
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from types import TracebackType
from typing import Tuple, Set, Iterable, Optional, List, Type
from pip._internal.index.package_finder import PackageFinder
logger = logging.getLogger(__name__)
class _Prefix:
def __init__(self, path):
# type: (str) -> None
self.path = path
self.setup = False
self.bin_dir = get_paths(
'nt' if os.name == 'nt' else 'posix_prefix',
vars={'base': path, 'platbase': path}
)['scripts']
# Note: prefer distutils' sysconfig to get the
# library paths so PyPy is correctly supported.
purelib = get_python_lib(plat_specific=False, prefix=path)
platlib = get_python_lib(plat_specific=True, prefix=path)
if purelib == platlib:
self.lib_dirs = [purelib]
else:
self.lib_dirs = [purelib, platlib]
class BuildEnvironment(object):
"""Creates and manages an isolated environment to install build deps
"""
def __init__(self):
# type: () -> None
temp_dir = TempDirectory(
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
)
self._prefixes = OrderedDict((
(name, _Prefix(os.path.join(temp_dir.path, name)))
for name in ('normal', 'overlay')
))
self._bin_dirs = [] # type: List[str]
self._lib_dirs = [] # type: List[str]
for prefix in reversed(list(self._prefixes.values())):
self._bin_dirs.append(prefix.bin_dir)
self._lib_dirs.extend(prefix.lib_dirs)
# Customize site to:
# - ensure .pth files are honored
# - prevent access to system site packages
system_sites = {
os.path.normcase(site) for site in (
get_python_lib(plat_specific=False),
get_python_lib(plat_specific=True),
)
}
self._site_dir = os.path.join(temp_dir.path, 'site')
if not os.path.exists(self._site_dir):
os.mkdir(self._site_dir)
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
fp.write(textwrap.dedent(
'''
import os, site, sys
# First, drop system-sites related paths.
original_sys_path = sys.path[:]
known_paths = set()
for path in {system_sites!r}:
site.addsitedir(path, known_paths=known_paths)
system_paths = set(
os.path.normcase(path)
for path in sys.path[len(original_sys_path):]
)
original_sys_path = [
path for path in original_sys_path
if os.path.normcase(path) not in system_paths
]
sys.path = original_sys_path
# Second, add lib directories.
# ensuring .pth file are processed.
for path in {lib_dirs!r}:
assert not path in sys.path
site.addsitedir(path)
'''
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
def __enter__(self):
# type: () -> None
self._save_env = {
name: os.environ.get(name, None)
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
}
path = self._bin_dirs[:]
old_path = self._save_env['PATH']
if old_path:
path.extend(old_path.split(os.pathsep))
pythonpath = [self._site_dir]
os.environ.update({
'PATH': os.pathsep.join(path),
'PYTHONNOUSERSITE': '1',
'PYTHONPATH': os.pathsep.join(pythonpath),
})
def __exit__(
self,
exc_type, # type: Optional[Type[BaseException]]
exc_val, # type: Optional[BaseException]
exc_tb # type: Optional[TracebackType]
):
# type: (...) -> None
for varname, old_value in self._save_env.items():
if old_value is None:
os.environ.pop(varname, None)
else:
os.environ[varname] = old_value
def check_requirements(self, reqs):
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
"""Return 2 sets:
- conflicting requirements: set of (installed, wanted) reqs tuples
- missing requirements: set of reqs
"""
missing = set()
conflicting = set()
if reqs:
ws = WorkingSet(self._lib_dirs)
for req in reqs:
try:
<<<<<<< HEAD
if ws.find(Requirement.bbc_parse(req)) is None:
=======
if ws.find(Requirement.parse(req)) is None:
>>>>>>> 241b678... create predictions
missing.add(req)
except VersionConflict as e:
conflicting.add((str(e.args[0].as_requirement()),
str(e.args[1])))
return conflicting, missing
def install_requirements(
self,
finder, # type: PackageFinder
requirements, # type: Iterable[str]
prefix_as_string, # type: str
message # type: str
):
# type: (...) -> None
prefix = self._prefixes[prefix_as_string]
assert not prefix.setup
prefix.setup = True
if not requirements:
return
args = [
sys.executable, os.path.dirname(pip_location), 'install',
'--ignore-installed', '--no-user', '--prefix', prefix.path,
'--no-warn-script-location',
] # type: List[str]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append('-v')
for format_control in ('no_binary', 'only_binary'):
formats = getattr(finder.format_control, format_control)
args.extend(('--' + format_control.replace('_', '-'),
','.join(sorted(formats or {':none:'}))))
index_urls = finder.index_urls
if index_urls:
args.extend(['-i', index_urls[0]])
for extra_index in index_urls[1:]:
args.extend(['--extra-index-url', extra_index])
else:
args.append('--no-index')
for link in finder.find_links:
args.extend(['--find-links', link])
for host in finder.trusted_hosts:
args.extend(['--trusted-host', host])
if finder.allow_all_prereleases:
args.append('--pre')
if finder.prefer_binary:
args.append('--prefer-binary')
args.append('--')
args.extend(requirements)
with open_spinner(message) as spinner:
call_subprocess(args, spinner=spinner)
class NoOpBuildEnvironment(BuildEnvironment):
"""A no-op drop-in replacement for BuildEnvironment
"""
def __init__(self):
# type: () -> None
pass
def __enter__(self):
# type: () -> None
pass
def __exit__(
self,
exc_type, # type: Optional[Type[BaseException]]
exc_val, # type: Optional[BaseException]
exc_tb # type: Optional[TracebackType]
):
# type: (...) -> None
pass
def cleanup(self):
# type: () -> None
pass
def install_requirements(
self,
finder, # type: PackageFinder
requirements, # type: Iterable[str]
prefix_as_string, # type: str
message # type: str
):
# type: (...) -> None
raise NotImplementedError()
|
[
"tanyaluzyk@gmail.com"
] |
tanyaluzyk@gmail.com
|
de9981ffdcab19bc3f4efa3301cfdeb69aef2784
|
04b11043d35b42c19be33ec9a9e2d590ecafea93
|
/convert.py
|
17e20ba2b632b2c92ac140335439f09769e3f1bb
|
[] |
no_license
|
5ymmetric/SURGE-SPI
|
41c535f8d82c308465123adcdf0e3813dd3ad70e
|
54d847dc295ded22d4af33b49d0c44bb5837b36b
|
refs/heads/master
| 2022-11-14T22:20:38.194370
| 2020-07-01T19:52:06
| 2020-07-01T19:52:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,752
|
py
|
import os
from glob import glob
import gdal
import numpy as np
import netCDF4 as nc4
from netCDF4 import Dataset
import datetime
import gc
wdir = 'C:\\Users\\Karthik.P\\OneDrive\\Desktop\\Research_SPI_Calculations\\Clipped\\India_Clipped'
odir = 'C:\\Users\\Karthik.P\\OneDrive\\Desktop\\Research_SPI_Calculations\\NetCDF\\'
beg = datetime.datetime.now()
dtstamp = datetime.datetime.now().strftime("%Y%m%d")
# Create arrays for lat and long, netCDF file, add dimensions
ds = gdal.Open(wdir+"\\2006\\India_prcp_20060417.tif")
b = ds.GetGeoTransform()
lon = np.arange(585)*b[1]+b[0]
lat = np.arange(606)*b[5]+b[3]
ncfile = nc4.Dataset(odir + "India_prcp_monthly.nc", "w", format="NETCDF4")
ncfile.createDimension('lat',606)
ncfile.createDimension('lon',585)
ncfile.createDimension('time',372)
lato = ncfile.createVariable('lat', 'f4', ('lat'))
lato.units = 'degrees_north'
lato.long_name = 'latitude'
lono = ncfile.createVariable('lon', 'f4', ('lon'))
lono.units = 'degrees_east'
lono.long_name = 'longitude'
time = ncfile.createVariable('time','f4', ('time'))
today = datetime.datetime.today()
ncfile.history = "Created " + today.strftime("%d/%m/%y")
time.units = 'days since 1900-01-01 00:00:00.0 UTC'
time.calendar = 'gregorian'
time.long_name = 'time'
zeroce = datetime.datetime.strptime("1900001","%Y%j")
thevar = ncfile.createVariable('prcp', 'f4', ('lat', 'lon','time'))
thevar.standard_name = 'Precipitation'
thevar.units = 'mm'
thevar.missing_value = -9999
flist = glob(wdir+"/**/*.tif")
flist.sort()
print(len(flist))
#alist=[]
count = 0
j=0
dd = 1
lato[:] = lat
lono[:] = lon
for yyyy in range(1989,2020):
if yyyy%4 != 0:
Feb = 28
elif yyyy%100 == 0 and yyyy%400!=0:
Feb = 28
else:
Feb = 29
for mm in range(1,13):
if mm in [1,3,5,7,8,10,12]:
days = 31
elif mm == 2:
days = Feb
elif mm in [4,6,9,11]:
days = 30
month_ag = []
start = count
end = count+days
print(start,days,end)
print(flist[start:end])
for prcp in flist[start:end]:
#print(prcp)
ds = gdal.Open(prcp)
ds_array = ds.ReadAsArray()
ds_array[ds_array==-9999]= np.nan
month_ag.append(ds_array)
count = end
#print(count)
date = datetime.datetime(yyyy,mm,dd)
print(date)
gregorian_day = (date-zeroce).total_seconds()/86400
time[j] = gregorian_day
#month_ag[month_ag%9999==0] = np.nan
#alist.append(month_ag)
fpcp = np.sum(month_ag, axis = 0)
thevar[:,:,j] = fpcp
j = j + 1
del month_ag
gc.collect()
ncfile.close()
end = datetime.datetime.now()
dur = end - beg
print(dur)
|
[
"noreply@github.com"
] |
5ymmetric.noreply@github.com
|
56fe690f573e1bcd1c237c4fc714e06af528d8d6
|
ceb3d82494813cd21e38231964e098bb3efe093b
|
/Transform/matrix_transform.py
|
b31c53519cc07e99f7fae2eaae98fa5108272797
|
[
"Apache-2.0"
] |
permissive
|
Joevaen/Scikit-image_On_CT
|
0c0a306a9ca18668bd9bb4105e577766b1d5578b
|
e3bf0eeadc50691041b4b7c44a19d07546a85001
|
refs/heads/main
| 2023-03-16T01:28:04.871513
| 2021-03-16T07:53:57
| 2021-03-16T07:53:57
| 344,071,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22
|
py
|
# 应用2D矩阵变换
|
[
"joevaen@126.com"
] |
joevaen@126.com
|
650bd45431b3d89a63d350aea8d1ed1d932bb642
|
bec3798f114b660efb6242d1e9bade880f812688
|
/lab8/wordsImplement2.py
|
5d6a360fcd204df46a75c95090e08999d249d2b9
|
[] |
no_license
|
schnapple/csci2961-master
|
9885174763ee928cc4729f1ceb3f50d94bcb54d6
|
19a62b56c2a185ce74eb9e76261c3f8e220fa8da
|
refs/heads/master
| 2016-09-05T14:19:21.097147
| 2015-11-13T18:39:02
| 2015-11-13T18:39:02
| 42,084,198
| 0
| 0
| null | 2015-09-30T04:09:08
| 2015-09-08T02:03:05
|
Python
|
UTF-8
|
Python
| false
| false
| 4,618
|
py
|
"""
SGBWords() returns an undirected graph over the 5757 5-letter
words in the datafile words_dat.txt. Two words are connected by an edge
if they differ in one letter, resulting in 14,135 edges. This example
is described in Section 1.1 in Knuth's book [1,2].
References.
----------
[1] Donald E. Knuth,
"The Stanford GraphBase: A Platform for Combinatorial Computing",
ACM Press, New York, 1993.
[2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html
"""
__author__ = """Brendt Wohlberg\nAric Hagberg (hagberg@lanl.gov)"""
__date__ = "$Date: 2005-04-01 07:56:04 -0700 (Fri, 01 Apr 2005) $"
__credits__ = """"""
__revision__ = ""
# Copyright (C) 2004 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
from networkx import *
import re
import sys
__author__ = """"""
__date__ = ""
__credits__ = """"""
__version__ = ""
# $Header$
#-------------------------------------------------------------------
# The Words/Ladder graph of Section 1.1
#-------------------------------------------------------------------
def _notComment(line):
return not(line.startswith('*'))
def _wdist(a,b):
""" Return simple edit distance between two words a and b. """
d=abs(len(a)-len(b))
aArr = []
bArr = []
for l in a:
aArr.append(l)
for l in b:
bArr.append(l)
aArr.sort()
bArr.sort()
for k in range(0,min(len(a),len(b))):
if aArr[k] != bArr[k]:
d = d + 1
return d
def words_graph():
""" Return the words example graph from the Stanford GraphBase"""
import sys
# open file words_dat.txt.gz (or words_dat.txt)
try:
try:
import gzip
fh=gzip.open('words_dat.txt.gz','r')
except:
fh=open("words4.dat","r")
except IOError:
raise "File words_dat.txt not found."
G = Graph(name="words")
sys.stderr.write("Loading words_dat.txt: ")
for line in fh.readlines():
if line.startswith("*"):
continue
w=line[0:5]
G.add_node(w)
nwords=number_of_nodes(G)
words=G.nodes()
for k in xrange(0,nwords):
if (k%100==0):
sys.stderr.softspace=0
sys.stderr.write(".")
for l in xrange(k+1,nwords):
if _wdist(words[k],words[l]) == 1:
G.add_edge(words[k],words[l])
return G
def words_graph4():
""" Return the words example graph from the Stanford GraphBase"""
import sys
# open file words_dat.txt.gz (or words_dat.txt)
try:
try:
import gzip
fh=gzip.open('words_dat.txt.gz','r')
except:
fh=open("words_dat","r")
except IOError:
raise "File words_dat.txt not found."
G = Graph(name="words")
sys.stderr.write("Loading words4.dat: ")
for line in fh.readlines():
if line.startswith("*"):
continue
w=line[0:4]
G.add_node(w)
nwords=number_of_nodes(G)
words=G.nodes()
for k in xrange(0,nwords):
if (k%100==0):
sys.stderr.softspace=0
sys.stderr.write(".")
for l in xrange(k+1,nwords):
if _wdist(words[k],words[l]) == 1:
G.add_edge(words[k],words[l])
return G
if __name__ == '__main__':
from networkx import *
G=words_graph()
print "Loaded words_dat.txt containing 5757 five-letter English words."
print "Two words are connected if they differ in one letter."
print "graph has %d nodes with %d edges"\
%(number_of_nodes(G),number_of_edges(G))
sp=shortest_path(G, 'chaos', 'order')
print "shortest path between 'chaos' and 'order' is:\n", sp
sp=shortest_path(G, 'nodes', 'graph')
print "shortest path between 'nodes' and 'graph' is:\n", sp
sp=shortest_path(G, 'moron', 'smart')
print "shortest path between 'pound' and 'marks' is:\n", sp
print number_connected_components(G),"connected components"
print
print
print
G=words_graph4()
print "Loaded words_dat.txt containing 5757 five-letter English words."
print "Two words are connected if they differ in one letter."
print "graph has %d nodes with %d edges"\
%(number_of_nodes(G),number_of_edges(G))
sp=shortest_path(G, 'cold', 'love')
print "shortest path between 'chaos' and 'order' is:\n", sp
sp=shortest_path(G, 'love', 'hate')
print "shortest path between 'nodes' and 'graph' is:\n", sp
print number_connected_components(G),"connected components"
|
[
"plagam95@gmail.com"
] |
plagam95@gmail.com
|
ccece3b514376b0b04b602fc979c2d7d8f934f5b
|
7383fda959b1a2600c0fae350b5b4bc341c155bb
|
/pygithub3/resources/base.py
|
4ca2aa334de44aac13c88418b0c0467529a836c9
|
[
"ISC"
] |
permissive
|
agargiulo/python-github3
|
1e1eeea6e59ebc1ea1cd222ab31d065407110867
|
78b74c3afe37f4172d6b933a90757256efb8ff9b
|
refs/heads/master
| 2021-01-16T21:00:05.551844
| 2012-06-15T19:52:39
| 2012-06-15T19:52:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,891
|
py
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
class Resource(object):
_dates = ()
_maps = {}
_collection_maps = {}
def __init__(self, attrs):
""" """
self._attrs = attrs
self.__set_attrs()
def __set_attrs(self):
for attr in self._attrs:
setattr(self, attr, self._attrs[attr])
def __str__(self):
return "<%s>" % self.__class__.__name__
def __repr__(self):
return self.__str__()
@classmethod
def loads(self, json_content):
resource_chunk = json.loads(json_content)
if not hasattr(resource_chunk, 'items'):
return [self.__load(raw_resource)
for raw_resource in resource_chunk]
else:
return self.__load(resource_chunk)
@classmethod
def __load(self, raw_resource):
def self_resource(func):
def wrapper(resource, raw_resource):
if resource == 'self':
resource = self
return func(resource, raw_resource)
return wrapper
def parse_date(string_date):
from datetime import datetime
try:
date = datetime.strptime(string_date, '%Y-%m-%dT%H:%M:%SZ')
except TypeError:
date = None
return date
@self_resource
def parse_map(resource, raw_resource):
if hasattr(raw_resource, 'items'):
return resource.__load(raw_resource)
@self_resource
def parse_collection_map(resource, raw_resources):
# Dict of resources (Ex: Gist file)
if hasattr(raw_resources, 'items'):
dict_map = {}
for key, raw_resource in raw_resources.items():
dict_map[key] = resource.__load(raw_resource)
return dict_map
# list of resources
elif hasattr(raw_resources, '__iter__'):
return [resource.__load(raw_resource)
for raw_resource in raw_resources]
new_resource = raw_resource.copy()
new_resource.update(dict([
(attr, parse_date(raw_resource[attr]))
for attr in self._dates if attr in raw_resource]))
new_resource.update(dict([
(attr, parse_map(resource, raw_resource[attr]))
for attr, resource in self._maps.items()
if attr in raw_resource]))
new_resource.update(dict([
(attr, parse_collection_map(resource, raw_resource[attr]))
for attr, resource in self._collection_maps.items()
if attr in raw_resource]))
return self(new_resource)
class Raw(Resource):
@classmethod
def loads(self, json_content):
return json.loads(json_content)
|
[
"davidmedina9@gmail.com"
] |
davidmedina9@gmail.com
|
86760ff1101f035e356a326d9765f3a2aed65dfa
|
6ee0b885c76d2432d17b5f26590062018a6e3416
|
/configs.py
|
4629ae35bbf526995d799cecb0d7e30693c2a28d
|
[] |
no_license
|
cannyanalytics/Expired-Domain-Names-Scrapper
|
d711455ba80c17b74933d9dc0437a6bd107a0f6a
|
3c63e794dd7263843ae1b101d8cad99d47db6313
|
refs/heads/main
| 2023-07-13T12:56:26.169061
| 2021-08-28T23:15:44
| 2021-08-28T23:15:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 74
|
py
|
username = 'seouser'
password = 'fakepass'
redirect_to_url = '/startpage'
|
[
"tomoloju45@gmail.com"
] |
tomoloju45@gmail.com
|
cb02822c052b15bed81ce10b37ee944716a7fd60
|
9624d15edc242e324666a65bf05bc073ff112bc9
|
/TokenAuth/taskpro/taskapp/admin.py
|
6ff593f1395ae012e8b712d1ace020cb567b0162
|
[] |
no_license
|
nivyainventateq/DjangoToken
|
91bf907cb73cd601afe0c2af4eacba6297ec0e4b
|
872e6d1cf9d5daf1a30b4c682ca42e293518196e
|
refs/heads/master
| 2023-05-30T20:56:11.866695
| 2021-05-29T18:57:49
| 2021-05-29T18:57:49
| 372,049,339
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 96
|
py
|
from django.contrib import admin
from taskapp.models import User
admin.site.register(User)
|
[
"“nivyainventateq@gmail.com”"
] |
“nivyainventateq@gmail.com”
|
889ad5dc334108a9058a2b96c11c9e8dcef700fe
|
2574a48e1cd547c002198ae0939ee86366feb399
|
/dianshi_gaofenbei_1901/BandSelection_v3.2.1/render_point_mp.py
|
7b3a719a5e85b3fe474fdb4de4738accbfedf5df
|
[] |
no_license
|
BasicPower/tianchiorgame
|
32db8a2340dba22313f95464fcaa7d90ec2b1a12
|
765206f15eb6b0a6514d1b6cc6e1291cf41c0ac5
|
refs/heads/master
| 2020-06-25T15:53:58.055117
| 2019-07-20T03:39:56
| 2019-07-20T03:39:56
| 199,358,573
| 1
| 0
| null | 2019-07-29T01:45:48
| 2019-07-29T01:45:48
| null |
UTF-8
|
Python
| false
| false
| 2,598
|
py
|
import numpy as np
from osgeo import gdal
import keras
from multiprocessing import Pool
import multiprocessing
import os
import datetime
import cv2
np.set_printoptions(threshold=np.inf) # 使print大量数据不用符号...代替而显示所有
def get_cell(pos_x, pos_y, size):
up = int(pos_x - size / 2)
left = int(pos_y - size / 2)
up = int(np.
clip(up, 0, dataset.RasterXSize - size))
left = int(np.clip(left, 0, dataset.RasterYSize - size))
try:
output = []
for i in bands:
band = dataset.GetRasterBand(i)
t = band.ReadAsArray(up, left, size, size)
output.append(t)
img = np.moveaxis(np.array(output), 0, 2)
except BaseException:
return None
return img
size = 25
labels_key = [20, 40, 60, 0]
bands = [1, 2, 3]
model = keras.models.load_model("output/model_save.h5")
dataset = gdal.Open(
r"E:\机器学习竞赛\baidu_dianshi\rgb_data.tif")
def render(num):
res = np.zeros(shape=(10073, 8905), dtype=np.uint8)
col = 5
left = (num % col) * 10073
top = (num // col) * 8905
print(num, top, left)
# step = 7
# for i in range(0, 10073 + step, step):
# for j in range(0, 8905 + step, step):
# img = get_cell(i + left, j + top, size)
# if img is None:
# continue
# imgs = np.array([img])
# result = model.predict(imgs)
# x1 = max(i - 3, 0)
# x2 = min(i + 4, 10073)
# y1 = max(j - 3, 0)
# y2 = min(j + 4, 8905)
# res[x1:x2, y1:y2] = labels_key[np.argmax(result, 1)[0]]
# print("processing pid:{} {}/{}".format(os.getpid(), i, 10073))
step = 3
for i in range(0, 10073 + step, step):
for j in range(0, 8905 + step, step):
img = get_cell(i + left, j + top, size)
if img is None:
continue
imgs = np.array([img])
result = model.predict(imgs)
x1 = max(i - 1, 0)
x2 = min(i + 2, 10073)
y1 = max(j - 1, 0)
y2 = min(j + 2, 8905)
res[x1:x2, y1:y2] = labels_key[np.argmax(result, 1)[0]]
print("processing pid:{} {}/{}".format(os.getpid(), i, 10073))
cv2.imwrite("imgres/test_result_{}.tif".format(num), res.T)
if __name__ == '__main__':
start = datetime.datetime.now()
pool = Pool(multiprocessing.cpu_count())
print(multiprocessing.cpu_count())
img_list = range(10)
pool.map(render, img_list)
print(datetime.datetime.now() - start)
|
[
"lishihang@live.cn"
] |
lishihang@live.cn
|
bdfbd75b775053496065a6c56c132ee2c8f8aaa7
|
e636b487379e6428e3d453fe1bb500d8d6b9fb40
|
/0x03-python-data_structures/8-multiple_returns.py
|
8f3f0ffa3826dce72e18831873a2f50df10a4618
|
[] |
no_license
|
Ethan-23/holbertonschool-higher_level_programming
|
49ce96d5f771b8158f8f9495eb608ac251b06811
|
18075dcaa0e599e27a364b4161f1721e4b8d7b27
|
refs/heads/main
| 2023-05-02T02:30:06.855863
| 2021-05-13T03:06:27
| 2021-05-13T03:06:27
| 319,458,771
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 199
|
py
|
#!/usr/bin/python3
def multiple_returns(sentence):
if len(sentence) == 0:
tuple_empty = (0, None)
return tuple_empty
tuple_1 = (len(sentence), sentence[0])
return tuple_1
|
[
"emanroberts@gmail.com"
] |
emanroberts@gmail.com
|
406225491fb5bbdad17cb62a720eb610806512f7
|
9940c07fa8f8c8e7675a5f197f25b2185fe7fa4d
|
/request_test.py
|
34bfbf358aca0e3b06c933b1afcf96635f508074
|
[] |
no_license
|
froggermtp/key-detection-test
|
85700aeffde93a2944fa79f1e2bb4933ef9a4f60
|
117adee7bc4cfbe6b6a7587119a9c798bf47aedc
|
refs/heads/master
| 2020-12-21T09:56:52.917833
| 2020-04-22T05:11:21
| 2020-04-22T05:11:21
| 236,393,256
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 241
|
py
|
import requests
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("image")
args = parser.parse_args()
file = open(args.image, "rb")
r = requests.post("http://127.0.0.1:5000/predict", files={'file': file})
print(r.text)
|
[
"mfrogparris@gmail.com"
] |
mfrogparris@gmail.com
|
8dd569a4cc2395fcad9de9cb3f157101eddf1301
|
31575b9e0f5c494afe665f79d1f44540c0a98922
|
/meiduo_mall/meiduo_mall/utils/response_code.py
|
2c5221eaf885a24e33c8b53f6b2359be7097a3d9
|
[] |
no_license
|
wisdomsys/meiduo_project
|
490e73fcd96cf52c759f359a4c72f2aadcd9e806
|
e12de180c3d2cb401c560728c3542a20a8dec7b8
|
refs/heads/master
| 2023-01-10T21:15:46.752367
| 2020-11-10T02:35:11
| 2020-11-10T02:35:11
| 310,580,758
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 90
|
py
|
class RETCODE:
OK = '0'
USERERR = '4004'
err_msg = {
RETCODE.OK: '成功'
}
|
[
"1396871335@qq.com"
] |
1396871335@qq.com
|
e33911f4ff39e954282be6c971e468995f91606c
|
0d32e3819606c3fb6820d0cd5f5097db3b0d3dd4
|
/HW3/sarsa_mountain_car.py
|
0d4789ce9c45fd1092146fe290050525440869d0
|
[] |
no_license
|
IanCBrown/COMP5600
|
e8e06b2a8e3bde0acc6897adb2396a57a2811f0a
|
ef454c009d6fd5eec50ceec5a8283a7c6d81d097
|
refs/heads/master
| 2020-08-02T13:20:41.024681
| 2019-12-09T03:53:37
| 2019-12-09T03:53:37
| 211,366,293
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,189
|
py
|
import math
import numpy as np
import matplotlib
matplotlib.use("TkAgg")
from matplotlib import pyplot as plt
import gym
from gym import spaces
from gym.utils import seeding
# Resources:
# https://en.wikipedia.org/wiki/Mountain_car_problem
# https://towardsdatascience.com/getting-started-with-reinforcement-learning-and-open-ai-gym-c289aca874f
# https://towardsdatascience.com/reinforcement-learning-temporal-difference-sarsa-q-learning-expected-sarsa-on-python-9fecfda7467e
def epsilon_greedy(Q, state, action_space, epsilon):
# if in epsilon range use it
if np.random.rand() < 1 - epsilon:
action = np.argmax(Q[state[0], state[1]])
# else take random action
else:
action = np.random.randint(0, action_space)
return action
def sarsa(learning_rate, discount, epsilon, min_epsilon, episodes):
# initialize environment
env = gym.make("MountainCar-v0")
env.reset()
states = (env.observation_space.high - env.observation_space.low)*np.array([10,100])
states = np.round(states, 0).astype(int) + 1
# Q(s,a)
Q_table = np.random.uniform(low = -1, high = 1, size = (states[0], states[1], env.action_space.n))
reward_list = []
var_list = []
avg_reward_list = []
# reduce epsilon linearly as time increases
decay = (epsilon - min_epsilon)/episodes
# Q learning main loop
for i in range(episodes):
finished = False
total_reward = 0
reward = 0
state = env.reset()
state_adj = (state - env.observation_space.low)*np.array([10,100])
state_adj = np.round(state_adj, 0).astype(int)
while not finished:
# render last N episodes
# comment out to see plots
# if i >= episodes - 1:
# env.render()
# pick aciton greedily without randomness
action = epsilon_greedy(Q_table, state_adj, env.action_space.n, epsilon)
next_state, reward, finished, info = env.step(action)
# Discretize
next_state_adj = (next_state - env.observation_space.low)*np.array([10,100])
next_state_adj = np.round(next_state_adj, 0).astype(int)
if finished and next_state[0] >= 0.5: # and ... condition
Q_table[state_adj[0], state_adj[1], action] = reward
else:
update = learning_rate * (reward + discount * np.max(Q_table[next_state_adj[0],next_state_adj[1]])
- Q_table[state_adj[0], state_adj[1], action])
# update Q table
Q_table[state_adj[0], state_adj[1], action] += update
total_reward += reward
state_adj = next_state_adj
# decay epsilon if still greater than min_epsilon
if epsilon > min_epsilon:
epsilon -= decay
reward_list.append(total_reward)
# choose how often to record data
# recording every data point will make the plots crowded
# 10 and 100 work well.
recording_interval = 100
if i % recording_interval == 0:
avg_reward = np.mean(reward_list)
var = np.var(reward_list)
var_list.append(var)
avg_reward_list.append(avg_reward)
reward_list = []
env.close()
return (avg_reward_list, var_list)
# Adjust these parameters as needed
number_of_episodes = 2500
learning_rate = 0.1
gamma = 0.9
epsilon = 0.8
min_epsilon = 0
def single_run():
"""
Run the algorithm once
"""
rewards_and_var = sarsa(learning_rate, gamma, epsilon, min_epsilon, number_of_episodes)
avg_reward = rewards_and_var[0]
var = rewards_and_var[1]
episodes1 = 100*(np.arange(len(avg_reward)) + 1)
episodes2 = 100*(np.arange(len(var)) + 1)
plt.figure("Average Reward vs. Episodes")
plt.title("Average Reward vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Average Reward")
plt.plot(episodes1, avg_reward, color='blue')
plt.figure("Variance vs. Episodes")
plt.title("Variance vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Variance")
plt.plot(episodes2, var, color='orange')
plt.figure("Average Reward w/ Variance vs. Episodes")
plt.title("Average Reward w/ Variance vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Average Reward w/ Variance")
plt.errorbar(episodes1, avg_reward, var, linestyle='None', marker='^', ecolor="orange")
plt.show()
def multi_run(N):
"""
Run the algorithm N times
@param N - number of times to test (e.g. 20)
"""
rewards = []
vars = []
for _ in range(N):
rewards_and_var = sarsa(learning_rate, gamma, epsilon, min_epsilon, number_of_episodes)
avg_reward = rewards_and_var[0]
var = rewards_and_var[1]
rewards.append(avg_reward)
vars.append(var)
rewards = list(zip(*rewards))
vars = list(zip(*vars))
reward_to_plot = []
for sublist in rewards:
reward_to_plot.append(np.mean(sublist))
var_to_plot = []
for sublist in vars:
var_to_plot.append(np.mean(sublist))
episodes1 = 100*(np.arange(len(avg_reward)) + 1)
episodes2 = 100*(np.arange(len(var)) + 1)
plt.figure("Average Reward vs. Episodes")
plt.title("Average Reward vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Average Reward")
plt.plot(episodes1, reward_to_plot, color='blue')
plt.savefig("sarsa_results/Average_Reward_vs_Episodes.png")
plt.figure("Variance vs. Episodes")
plt.title("Variance vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Variance")
plt.plot(episodes2, var_to_plot, color='orange')
plt.savefig("sarsa_results/Variance_vs_Episodes.png")
plt.figure("Average Reward w/ Variance vs. Episodes")
plt.title("Average Reward w/ Variance vs. Episodes")
plt.xlabel("Episodes")
plt.ylabel("Average Reward w/ Variance")
plt.errorbar(episodes1, reward_to_plot, var_to_plot, linestyle='None', marker='^', ecolor="orange")
plt.savefig("sarsa_results/Average_Reward_and_Variance_vs_Episodes.png")
# choose multi or single run
# single_run()
multi_run(20)
|
[
"icb0004@auburn.edu"
] |
icb0004@auburn.edu
|
43a263b58905712fcc792c7528cd6f6ff6cfa9ed
|
bb759eba1926d55171f82a0a83bd94460b8ce007
|
/Likit5k.py
|
c6b0c95c784cbdec03b9b37136b2e308cde53e59
|
[] |
no_license
|
tankeng/edit
|
1afb188ff629738aef214684973b7e7c93377a4f
|
74f2165e7a0784706cf9d1393ffa6f21d1fea684
|
refs/heads/master
| 2021-04-25T17:16:49.939085
| 2018-02-14T07:52:58
| 2018-02-14T07:52:58
| 121,477,608
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 283,976
|
py
|
# -*- coding: utf-8 -*-
import LINETCR
from LINETCR.lib.curve.ttypes import *
from datetime import datetime
import time, random, sys, ast, re, os, io, json, subprocess, threading, string, codecs, requests, ctypes, urllib, urllib2, urllib3, wikipedia, tempfile
from bs4 import BeautifulSoup
from urllib import urlopen
from io import StringIO
from threading import Thread
from gtts import gTTS
from googletrans import Translator
cl = LINETCR.LINE()
cl.login(token="EpeIlyovyvcXGtQ5KcZ9.5bfZvGcvHUTvnM0rMEvLIq.+7FoLVxdmTuGgFXMXzIhJHkCRoMA3A0qmO8F5sWSe+4=")
cl.loginResult()
ki1 = LINETCR.LINE()
ki1.login(token="EpjiQByscckaztWMLHT8.owHU0xjbVBa1wQEyYtMIoa.I2eh5sVXblpXzl3WiINdZqQQR84SGQOBCJISKS5EIzI=")
ki1.loginResult()
ki2 = LINETCR.LINE()
ki2.login(token="EpDZBzISMXE7jwVQPrU2.1TwJQ1ThIAdycIbKWJpJmG.b4A67+XcpeH6guqF44l1toV+tWLPQeYWJCti2ti79+w=")
ki2.loginResult()
ki3 = LINETCR.LINE()
ki3.login(token="EpBRpEATqus5XIQ9111e.VaiImiEjGvzw9+FPEujUpG.0KQvbSju34krf2finMY+cT3bmzuBnuO6J9owhlGhdYk=")
ki3.loginResult()
ki4 = LINETCR.LINE()
ki4.login(token="Epwz3AFPpZnyuUVom4f4.aQwlG1Yb79ZynULT8qCRja.f3EMu941x7SFQ1uCfUFIDgJSH+aPQ65MWqvTpv41aeo=")
ki4.loginResult()
ki5 = LINETCR.LINE()
ki5.login(token="EpTVbwp7EB9uTVuMfyB7.7xZurXNCqndHG6OfwUrtzW.Fb3BjHOLC5h/Wlihx7dm1k7sUUV+Pg8lP4Ms6tra4PQ=")
ki5.loginResult()
print "login success"
reload(sys)
sys.setdefaultencoding('utf-8')
helpMessage =""" ─┅═✥हईTOMEBOTLINEईह✥═┅─
─┅═✥s̵ᴇʟғʙᴏᴛ ᴛʜᴀɪʟᴀɴᴅ✥═┅─
❂͜͡☆➣ 『Me』
❂͜͡☆➣ 『Id』
❂͜͡☆➣ 『Wc』
❂͜͡☆➣ 『Mc:』
❂͜͡☆➣ 『Mid』
❂͜͡☆➣ 『BBc:』
❂͜͡☆➣ 『Gift』
❂͜͡☆➣ 『Mid @』
❂͜͡☆➣ 『Cn: Display Name』
❂͜͡☆➣ 『Cc: Clock Name』
❂͜͡☆➣ 『Hack @』
❂͜͡☆➣ 『Tl: text』
❂͜͡☆➣ 『Auto join: on/off』
❂͜͡☆➣ 『Auto add: on/off』
❂͜͡☆➣ 『Auto leave: on/off』
❂͜͡☆➣ 『Clock: on/off』
❂͜͡☆➣ 『Share on』
❂͜͡☆➣ 『Add message: text』
❂͜͡☆➣ 『Message:』
❂͜͡☆➣ 『Add comment: text』
❂͜͡☆➣ 『Comment: 』
❂͜͡☆➣ 『Cbroadcast text』
❂͜͡☆➣ 『Gbroadcast text』
❂͜͡☆➣ 『Reject』
──┅═✥===========✥═┅──
SELFBOT
PHET HACK BOT
──┅═✥===========✥═┅──
❂͜͡☆➣ 『Creator 』
❂͜͡☆➣ 『Gn: text 』
❂͜͡☆➣ 『Invite:on 』
❂͜͡☆➣ 『Invite: mid』
❂͜͡☆➣ 『Allgift 』
❂͜͡☆➣ 『All mid』
❂͜͡☆➣ 『Cancel』
❂͜͡☆➣ 『Link on/off』
❂͜͡☆➣ 『Spam on/off』
❂͜͡☆➣ 『ginfo』
❂͜͡☆➣ 『Myginfo』
❂͜͡☆➣ 『Gurl』
❂͜͡☆➣ 『Glist』
❂͜͡☆➣ 『Set』
❂͜͡☆➣ 『Phet: Tag』
❂͜͡☆➣ 『Gcancel:』
❂͜͡☆➣ 『Masuk Join』
❂͜͡☆➣ 『Sa:yang』
❂͜͡☆➣ 『Beb』
❂͜͡☆➣ 『Cinta』
❂͜͡☆➣ 『Sayang: 』
❂͜͡☆➣ 『P:ulang』
❂͜͡☆➣ 『Ban @』
❂͜͡☆➣ 『Uban @』
❂͜͡☆➣ 『Ban 』
❂͜͡☆➣ 『Unban』
❂͜͡☆➣ 『Comment :』
❂͜͡☆➣ 『Banlist』
❂͜͡☆➣ 『Cekban』
❂͜͡☆➣ 『Clear ban』
❂͜͡☆➣ 『Kill @ Fuck @』
❂͜͡☆➣ 『Speed / Sp』
❂͜͡☆➣ 『Hack @2@3@4』
❂͜͡☆➣ 『Ambilin @』
❂͜͡☆➣ 『Sampul @』
❂͜͡☆➣ 『Copy @』
❂͜͡☆➣ 『Mycopy @』
❂͜͡☆➣ 『Keluar :@』
❂͜͡☆➣ 『music』
❂͜͡☆➣ 『.reboot』
❂͜͡☆➣ 『Wikipedia』
❂͜͡☆➣ 『Cleanse』
❂͜͡☆➣ 『Bot Speed』
❂͜͡☆➣ 『P1-P36 link on/off』
──┅═✥===========✥═┅──
❂͜͡☆➣ 『Key』
❂͜͡☆➣ 『Qr on/off』
❂͜͡☆➣ 『Backup on/off』
❂͜͡☆➣ 『Protect On/off』
❂͜͡☆➣ 『Namelock On/off』
─┅═✥ᵀᴴᴬᴵᴸᴬᴺᴰ✥═┅─
[By. TOME NOTLINE]
──┅═✥============✥═┅──"""
helpMessage2 ="""
╔═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Help protect〙
╠➩〘Help self〙
╠➩〘Help grup〙
╠➩〘Help set〙
╠➩〘Help media〙
╠➩〘Speed〙
╠➩〘Status〙
╚═════════════════
╔═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Protect on/off〙
╠➩〘Qr on/off〙
╠➩〘Invit on/off〙
╠➩〘Cancel on/off〙
╚═════════════════
╔═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Me〙
╠➩〘Myname: 〙
╠➩〘Mybio: 〙
╠➩〘Myname〙
╠➩〘Mybio〙
╠➩〘Mypict〙
╠➩〘Mycover〙
╠➩〘My,copy @〙
╠➩〘Mybackup〙
╠➩〘Getgrup image〙
╠➩〘Getmid @〙
╠➩〘Getprofile @〙
╠➩〘Getcontact @〙
╠➩〘Getinfo @〙
╠➩〘Getname @〙
╠➩〘Getbio @〙
╠➩〘Getpict @〙
╠➩〘Getcover @〙
╠➩〘Mention〙
╠➩〘Lurk on/off〙
╠➩〘Lurkers〙
╠➩〘Mimic on/off〙
╠➩〘Micadd @〙
╠➩〘Micdel @〙
╠═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Contact on/off〙
╠➩〘Autojoin on/off〙
╠➩〘Autoleave on/off〙
╠➩〘Autoadd on/off〙
╠➩〘Like me〙
╠➩〘Like friend〙
╠➩〘Like on〙
╠➩〘Respon on/off〙
╠➩〘Read on/off〙
╠➩〘Simisimi on/off〙
╠═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Link on/off〙
╠➩〘Url〙
╠➩〘Cancel〙
╠➩〘Gcreator〙
╠➩〘Ki'ck @〙
╠➩〘Ulti @〙
╠➩〘Cancel〙
╠➩〘Gname: 〙
╠➩〘Gbroadcast: 〙
╠➩〘Cbroadcast: 〙
╠➩〘Infogrup〙
╠➩〘Gruplist〙
╠➩〘Friendlist〙
╠➩〘Blocklist〙
╠➩〘Ba'n @〙
╠➩〘U'nban @〙
╠➩〘Clearban〙
╠➩〘Banlist〙
╠➩〘Contactban〙
╠➩〘Midban〙
╠═════════════════
║ ✟ New function ✟
╠═════════════════
╠➩〘Kalender〙
╠➩〘tr-id 〙
╠➩〘tr-en 〙
╠➩〘tr-jp 〙
╠➩〘tr-ko 〙
╠➩〘say-id 〙
╠➩〘say-en 〙
╠➩〘say-jp 〙
╠➩〘say-ko 〙
╠➩〘profileig 〙
╠➩〘checkdate 〙
╚═════════════════
"""
helpMessage3 ="""
╔══════════════════
║ ✦เปิด/ปิดข้อความต้อนรับ✦
╠══════════════════
║✰ Hhx1 on ➠เปิดข้อความต้อนรับ
║✰ Hhx1 off ➠ปิดข้อความต้อนรับ
║✰ Hhx2 on ➠เปิดข้อความออกกลุ่ม
║✰ Hhx2 off ➠เปิดข้อความออกกลุ่ม
║✰ Hhx3 on ➠เปิดข้อความคนลบ
║✰ Hhx3 off ➠เปิดข้อความคนลบ
║✰ Mbot on ➠เปิดเเจ้งเตือนบอท
║✰ Mbot off ➠ปิดเเจ้งเตือนบอท
║✰ M on ➠เปิดเเจ้งเตือนตนเอง
║✰ M off ➠ปิดเเจ้งเตือนตนเอง
║✰ Tag on ➠เปิดกล่าวถึงเเท็ค
║✰ Tag off ➠ปิดกล่าวถึงเเท็ค
║✰ Kicktag on ➠เปิดเตะคนเเท็ค
║✰ Kicktag off ➠ปิดเตะคนเเท็ค
╚═══════════════════
╔═══════════════════
║ ✦โหมดตั้งค่าข้อความ✦
╠═══════════════════
║✰ Hhx1˓: ➠ไส่ข้อความต้อนรับ
║✰ Hhx2˓: ➠ไส่ข้อความออกจากกลุ่ม
║✰ Hhx3˓: ➠ไส่ข้อความเมื่อมีคนลบ
╚═══════════════════
╔═══════════════════
║ ✦โหมดเช็คตั้งค่าข้อความ✦
╠═══════════════════
║✰ Hhx1 ➠เช็คข้อความต้อนรับ
║✰ Hhx2 ➠เช็คข้อความคนออก
║✰ Hhx3 ➠เช็คข้อความคนลบ
╚═══════════════════"""
KAC=[cl,ki1,ki2,ki3,ki4,ki5]
mid = cl.getProfile().mid
Amid1 = ki1.getProfile().mid
Amid2 = ki2.getProfile().mid
Amid3 = ki3.getProfile().mid
Amid4 = ki4.getProfile().mid
Amid5 = ki5.getProfile().mid
protectname = []
protecturl = []
protection = []
autocancel = {}
autoinvite = []
autoleaveroom = []
targets = []
mid = cl.getProfile().mid
Bots = ["",mid,Amid1,Amid2,Amid3,Amid4,Amid5]
self = ["",mid,Amid1,Amid2,Amid3,Amid4,Amid5]
admin = ""
admsa = ""
owner = ""
adminMID = ""
Creator=""
wait = {
"alwayRead":False,
"detectMention":True,
"kickMention":False,
"steal":False,
'pap':{},
'invite':{},
"spam":{},
'contact':False,
'autoJoin':True,
'autoCancel':{"on":True, "members":1},
'leaveRoom':True,
'timeline':True,
'autoAdd':False,
'message':"Thanks for add Me MY NAME IS PHET",
"lang":"JP",
"comment":"AutoLike by Phet",
"commentOn":False,
"acommentOn":False,
"bcommentOn":False,
"ccommentOn":False,
"Protectcancl":False,
"pautoJoin":False,
"commentBlack":{},
"wblack":False,
"dblack":False,
"clock":False,
"cName":"༺ ㏒ Ᵽɧëȶ ㏒ ༻",
"likeOn":False,
"pname":False,
"blacklist":{},
"whitelist":{},
"wblacklist":False,
"dblacklist":False,
"qr":False,
"Backup":False,
"protectionOn":False,
"winvite":False,
"ainvite":False,
"binvite":False,
"protect":False,
"cancelprotect":False,
"inviteprotect":False,
"linkprotect":False,
"Hhx1":False,
"Hhx2":False,
"Hhx3":False,
"Notifed":False,
"Notifedbot":False,
"atjointicket":False,
"pnharfbot":{},
"pname":{},
"pro_name":{},
"posts":False,
}
wait2 = {
"readPoint":{},
"readMember":{},
"setTime":{},
"ROM":{}
}
mimic = {
"copy":False,
"copy2":False,
"status":False,
"target":{}
}
settings = {
"simiSimi":{}
}
res = {
'num':{},
'us':{},
'au':{},
}
setTime = {}
setTime = wait2['setTime']
mulai = time.time()
blacklistFile='blacklist.txt'
pendinglistFile='pendinglist.txt'
contact = cl.getProfile()
mybackup = cl.getProfile()
mybackup.displayName = contact.displayName
mybackup.statusMessage = contact.statusMessage
mybackup.pictureStatus = contact.pictureStatus
contact = ki1.getProfile()
backup = ki1.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
contact = ki2.getProfile()
backup = ki2.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
contact = ki3.getProfile()
backup = ki3.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
contact = ki4.getProfile()
backup = ki4.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
contact = ki5.getProfile()
backup = ki5.getProfile()
backup.displayName = contact.displayName
backup.statusMessage = contact.statusMessage
backup.pictureStatus = contact.pictureStatus
def restart_program():
python = sys.executable
os.execl(python, python, * sys.argv)
def sendImageWithUrl(self, to_, url):
path = '%s/pythonLine-%i.data' % (tempfile.gettempdir(), randint(0, 9))
r = requests.get(url, stream=True)
if r.status_code == 200:
with open(path, 'w') as f:
shutil.copyfileobj(r.raw, f)
else:
raise Exception('Download image failure.')
try:
self.sendImage(to_, path)
except Exception as e:
raise e
def yt(query):
with requests.session() as s:
isi = []
if query == "":
query = "S1B tanysyz"
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'http://www.youtube.com/results'
params = {'search_query': query}
r = s.get(url, params=params)
soup = BeautifulSoup(r.content, 'html5lib')
for a in soup.select('.yt-lockup-title > a[title]'):
if '&list=' not in a['href']:
if 'watch?v' in a['href']:
b = a['href'].replace('watch?v=', '')
isi += ['youtu.be' + b]
return isi
def _images_get_next_item(s):
start_line = s.find('rg_di')
if start_line == -1: #If no links are found then give an error!
end_quote = 0
link = "no_links"
return link, end_quote
else:
start_line = s.find('"class="rg_meta"')
start_content = s.find('"ou"',start_line+90)
end_content = s.find(',"ow"',start_content-90)
content_raw = str(s[start_content+6:end_content-1])
return content_raw, end_content
#Getting all links with the help of '_images_get_next_image'
def _images_get_all_items(page):
items = []
while True:
item, end_content = _images_get_next_item(page)
if item == "no_links":
break
else:
items.append(item) #Append all the links in the list named 'Links'
time.sleep(0.1) #Timer could be used to slow down the request for image downloads
page = page[end_content:]
return items
def upload_tempimage(client):
'''
Upload a picture of a kitten. We don't ship one, so get creative!
'''
config = {
'album': album,
'name': 'bot auto upload',
'title': 'bot auto upload',
'description': 'bot auto upload'
}
print("Uploading image... ")
image = client.upload_from_path(image_path, config=config, anon=False)
print("Done")
print()
def summon(to, nama):
aa = ""
bb = ""
strt = int(14)
akh = int(14)
nm = nama
for mm in nm:
akh = akh + 2
aa += """{"S":"""+json.dumps(str(strt))+""","E":"""+json.dumps(str(akh))+""","M":"""+json.dumps(mm)+"},"""
strt = strt + 6
akh = akh + 4
bb += "\xe2\x95\xa0 @x \n"
aa = (aa[:int(len(aa)-1)])
msg = Message()
msg.to = to
msg.text = "\xe2\x95\x94\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\n"+bb+"\xe2\x95\x9a\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90\xe2\x95\x90"
msg.contentMetadata ={'MENTION':'{"MENTIONEES":['+aa+']}','EMTVER':'4'}
print "[Command] Tag All"
try:
cl.sendMessage(msg)
except Exception as error:
print error
def waktu(secs):
mins, secs = divmod(secs,60)
hours, mins = divmod(mins,60)
return '%02d Jam %02d Menit %02d Detik' % (hours, mins, secs)
def cms(string, commands): #/XXX, >XXX, ;XXX, ^XXX, %XXX, $XXX...
tex = ["+","@","/",">",";","^","%","$","^","サテラ:","サテラ:","サテラ:","サテラ:"]
for texX in tex:
for command in commands:
if string ==command:
return True
return False
def sendMessage(self, messageObject):
return self.Talk.client.sendMessage(0,messageObject)
def sendText(self, Tomid, text):
msg = Message()
msg.to = Tomid
msg.text = text
return self.Talk.client.sendMessage(0, msg)
def sendImage(self, to_, path):
M = Message(to=to_, text=None, contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M2 = self._client.sendMessage(0,M)
M_id = M2.id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = self.post_content('https://obs-sg.line-apps.com/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendImage2(self, to_, path):
M = Message(to=to_,contentType = 1)
M.contentMetadata = None
M.contentPreview = None
M_id = self._client.sendMessage(M).id
files = {
'file': open(path, 'rb'),
}
params = {
'name': 'media',
'oid': M_id,
'size': len(open(path, 'rb').read()),
'type': 'image',
'ver': '1.0',
}
data = {
'params': json.dumps(params)
}
r = cl.post_content('https://os.line.naver.jp/talk/m/upload.nhn', data=data, files=files)
if r.status_code != 201:
raise Exception('Upload image failure.')
return True
def sendMessage(to, text, contentMetadata={}, contentType=0):
mes = Message()
mes.to, mes.from_ = to, profile.mid
mes.text = text
mes.contentType, mes.contentMetadata = contentType, contentMetadata
if to not in messageReq:
messageReq[to] = -1
messageReq[to] += 1
def NOTIFIED_READ_MESSAGE(op):
try:
if op.param1 in wait2['readPoint']:
Name = cl.getContact(op.param2).displayName
if Name in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += "\n・" + Name
wait2['ROM'][op.param1][op.param2] = "・" + Name
else:
pass
except:
pass
def bot(op):
try:
if op.type == 0:
return
if op.type == 5:
if wait["autoAdd"] == True:
cl.findAndAddContactsByMid(op.param1)
if (wait["message"] in [""," ","\n",None]):
pass
else:
cl.sendText(op.param1,str(wait["message"]))
if op.type == 11:
if op.param3 == '1':
if op.param1 in wait['pname']:
try:
G = cl.getGroup(op.param1)
except:
try:
G = ki1.getGroup(op.param1)
except:
try:
G = ki2.getGroup(op.param1)
except:
try:
G = ki3.getGroup(op.param1)
except:
try:
G = ki4.getGroup(op.param1)
except:
try:
G = ki5.getGroup(op.param1)
except:
pass
G.name = wait['pro_name'][op.param1]
try:
cl.updateGroup(G)
except:
try:
ki1.updateGroup(G)
except:
try:
ki2.updateGroup(G)
except:
try:
ki2.updateGroup(G)
except:
try:
ki3.updateGroup(G)
except:
try:
ki4.updateGroup(G)
except:
pass
if op.param2 in ken:
pass
else:
try:
ki1.kickoutFromGroup(op.param1,[op.param2])
except:
try:
ki1.kickoutFromGroup(op.param1,[op.param2])
except:
try:
ki2.kickoutFromGroup(op.param1,[op.param2])
except:
try:
ki3.kickoutFromGroup(op.param1,[op.param2])
except:
try:
ki4.kickoutFromGroup(op.param1,[op.param2])
except:
pass
cl.sendText(op.param1,"Group Name Lock")
ki1.sendText(op.param1,"Haddeuh dikunci Pe'a")
ki2.sendText(op.param1,"Wekawekaweka (Har Har)")
c = Message(to=op.param1, from_=None, text=None, contentType=13)
c.contentMetadata={'mid':op.param2}
cl.sendMessage(c)
if op.type == 13:
if op.param3 in mid:
if op.param2 in mid:
G = cl.getGroup(op.param1)
G.preventJoinByTicket = False
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
cl.updateGroup(G)
Ticket = cl.reissueGroupTicket(op.param1)
if op.param3 in mid:
if op.param2 in Amid1:
G = ki1.getGroup(op.param1)
G.preventJoinByTicket = False
ki1.updateGroup(X)
Ti = ki1.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki1.updateGroup(X)
Ti = ki1.reissueGroupTicket(op.param1)
if op.param3 in mid:
if op.param2 in Amid2:
X = ki2.getGroup(op.param1)
X.preventJoinByTicket = False
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
if op.param3 in mid:
if op.param2 in Amid3:
X = ki3.getGroup(op.param1)
X.preventJoinByTicket = False
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
if op.param3 in mid:
if op.param2 in Amid4:
G = ki4.getGroup(op.param1)
G.preventJoinByTicket = False
ki4.updateGroup(X)
Ti = ki4.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki4.updateGroup(X)
Ti = ki4.reissueGroupTicket(op.param1)
if op.param3 in mid:
if op.param2 in Amid5:
G = ki5.getGroup(op.param1)
G.preventJoinByTicket = False
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ticket)
G.preventJoinByTicket = True
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
if op.param3 in Amid1:
if op.param2 in Amid2:
X = ki2.getGroup(op.param1)
X.preventJoinByTicket = False
ki2.updateGroup(X)
Ti = ki1.reissueGroupTicket(op.param1)
ki1.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
if op.param3 in Amid2:
if op.param2 in Amid3:
X = ki3.getGroup(op.param1)
X.preventJoinByTicket = False
ki3.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
ki2.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
if op.param3 in Amid3:
if op.param2 in Amid4:
X = ki4.getGroup(op.param1)
X.preventJoinByTicket = False
ki4.updateGroup(X)
Ti = ki4.reissueGroupTicket(op.param1)
ki3.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki4.updateGroup(X)
Ti = ki4.reissueGroupTicket(op.param1)
if op.param3 in Amid4:
if op.param2 in Amid5:
X = ki5.getGroup(op.param1)
X.preventJoinByTicket = False
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
ki4.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
if op.param3 in Amid5:
if op.param2 in Amid1:
X = ki1.getGroup(op.param1)
X.preventJoinByTicket = False
ki1.updateGroup(X)
Ti = ki6.reissueGroupTicket(op.param1)
ki5.acceptGroupInvitationByTicket(op.param1,Ticket)
X.preventJoinByTicket = True
ki1.updateGroup(X)
Ti = ki6.reissueGroupTicket(op.param1)
#===========================================
if op.type == 32:
if not op.param2 in Bots:
if wait["protectionOn"] == True:
try:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(op.param1)
kicker.kickoutFromGroup(op.param1,[op.param2])
kicker.inviteIntoGroup(op.param1, [op.param3])
except Exception, e:
print e
if op.type == 13:
if mid in op.param3:
G = cl.getGroup(op.param1)
if wait["autoJoin"] == True:
if wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
cl.acceptGroupInvitation(op.param1)
cl.sendText(op.param1, "Your invitation was declined\n\n[SELFBOT PHET HACK BOT]]\n\nhttp://line.me/ti/p/09T2waRE7l")
else:
cl.acceptGroupInvitation(op.param1)
cl.sendText(op.param1, "Your invitation was declined\n\n[SELFBOT PHET HACK BOT]]\n\nhttp://line.me/ti/p/09T2waRE7l")
elif wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
cl.rejectGroupInvitation(op.param1)
else:
Inviter = op.param3.replace("",',')
InviterX = Inviter.split(",")
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, InviterX)
if matched_list == []:
pass
else:
cl.cancelGroupInvitation(op.param1, matched_list)
if Amid1 in op.param3:
G = cl.getGroup(op.param1)
if wait["autoJoin"] == True:
if wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
ki1.rejectGroupInvitation(op.param1)
else:
ki1.acceptGroupInvitation(op.param1)
else:
ki1.acceptGroupInvitation(op.param1)
elif wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
ki1.rejectGroupInvitation(op.param1)
else:
Inviter = op.param3.replace("",',')
InviterX = Inviter.split(",")
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, InviterX)
if matched_list == []:
pass
else:
ki1.cancelGroupInvitation(op.param1, matched_list)
if Amid2 in op.param3:
G = cl.getGroup(op.param1)
if wait["autoJoin"] == True:
if wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
ki2.rejectGroupInvitation(op.param1)
else:
ki2.acceptGroupInvitation(op.param1)
else:
ki2.acceptGroupInvitation(op.param1)
elif wait["autoCancel"]["on"] == True:
if len(G.members) <= wait["autoCancel"]["members"]:
ki2.rejectGroupInvitation(op.param1)
else:
Inviter = op.param3.replace("",',')
InviterX = Inviter.split(",")
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, InviterX)
if matched_list == []:
pass
else:
ki2.cancelGroupInvitation(op.param1, matched_list)
if op.type == 11:
if not op.param2 in Bots:
if wait["qr"] == True:
try:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(op.param1)
G.preventJoinByTicket = True
kicker.updateGroup(G)
except Exception, e:
print e
if op.type == 11:
if not op.param2 in Bots:
if wait["protectionOn"] == True:
try:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(op.param1)
G.preventJoinByTicket = True
kicker.updateGroup(G)
kicker.kickoutFromGroup(op.param1,[op.param2])
G.preventJoinByTicket = True
kicker.updateGroup(G)
except Exception, e:
print e
if op.type == 13:
G = cl.getGroup(op.param1)
I = G.creator
if not op.param2 in Bots:
if wait["protectionOn"] == True:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(op.param1)
if G is not None:
gInviMids = [contact.mid for contact in G.invitee]
kicker.cancelGroupInvitation(op.param1, gInviMids)
if op.type == 19:
if not op.param2 in Bots:
try:
gs = ki1.getGroup(op.param1)
gs = ki2.getGroup(op.param1)
targets = [op.param2]
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
except:
pass
except Exception, e:
print e
if not op.param2 in Bots:
if wait["Backup"] == True:
try:
random.choice(KAC).inviteIntoGroup(op.param1, [op.param3])
except Exception, e:
print e
if not op.param2 in Bots:
if wait["protectionOn"] == True:
try:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(op.param1)
G.preventJoinByTicket = False
kicker.updateGroup(G)
invsend = 0
Ticket = kicker.reissueGroupTicket(op.param1)
kl1.acceptGroupInvitationByTicket(op.param1,Ticket)
time.sleep(0.1)
X = kicker.getGroup(op.param1)
X.preventJoinByTicket = True
kl1.kickoutFromGroup(op.param1,[op.param2])
kicker.kickoutFromGroup(op.param1,[op.param2])
kl1.leaveGroup(op.param1)
kicker.updateGroup(X)
except Exception, e:
print e
if not op.param2 in Bots:
try:
gs = ki1.getGroup(op.param1)
gs = ki2.getGroup(op.param1)
targets = [op.param2]
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
except:
pass
except Exception, e:
print e
if not op.param2 in Bots:
if wait["Backup"] == True:
try:
random.choice(KAC).inviteIntoGroup(op.param1, [op.param3])
except Exception, e:
print e
if op.type == 19:
if mid in op.param3:
if op.param2 in Bots:
pass
try:
ki1.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client Kick regulation or Because it does not exist in the group、\n["+op.param1+"]\nの\n["+op.param2+"]\nを蹴る事ができませんでした。\nブラックリストに追加します。")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
G = ki1.getGroup(op.param1)
G.preventJoinByTicket = False
ki1.updateGroup(G)
Ti = ki1.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = cl.getGroup(op.param1)
X.preventJoinByTicket = True
cl.updateGroup(X)
Ti = cl.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amid1 in op.param3:
if op.param2 in Bots:
pass
try:
ki2.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグループに存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki2.getGroup(op.param1)
X.preventJoinByTicket = False
ki2.updateGroup(X)
Ti = ki2.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki1.getGroup(op.param1)
X.preventJoinByTicket = True
ki1.updateGroup(X)
Ticket = ki1.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amid2 in op.param3:
if op.param2 in Bots:
pass
try:
ki3.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグループに存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki3.getGroup(op.param1)
X.preventJoinByTicket = False
ki3.updateGroup(X)
Ti = ki3.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki2.getGroup(op.param1)
X.preventJoinByTicket = True
ki2.updateGroup(X)
Ticket = ki2.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amid3 in op.param3:
if op.param2 in Bots:
pass
try:
ki4.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグループに存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki4.getGroup(op.param1)
X.preventJoinByTicket = False
ki4.updateGroup(X)
Ti = ki4.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki3.getGroup(op.param1)
X.preventJoinByTicket = True
ki3.updateGroup(X)
Ticket = ki3.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amid4 in op.param3:
if op.param2 in Bots:
pass
try:
ki5.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグルー�����に存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki5.getGroup(op.param1)
X.preventJoinByTicket = False
ki5.updateGroup(X)
Ti = ki5.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki4.getGroup(op.param1)
X.preventJoinByTicket = True
ki4.updateGroup(X)
Ticket = ki4.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amid5 in op.param3:
if op.param2 in Bots:
pass
try:
ki1.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグループに存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki1.getGroup(op.param1)
X.preventJoinByTicket = False
ki6.updateGroup(X)
Ti = ki1.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki5.getGroup(op.param1)
X.preventJoinByTicket = True
ki5.updateGroup(X)
Ticket = ki5.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if Amidki5 in op.param3:
if op.param2 in Bots:
pass
try:
ki1.kickoutFromGroup(op.param1,[op.param2])
except:
try:
random.choice(KAC).kickoutFromGroup(op.param1,[op.param2])
except:
print ("client が蹴り規制orグループに存在しない為、\n["+op.param1+"]\nの\n["+op.param2+"]\nBecause the client does not exist in the kick regulation or group.\nAdd it to the blacklist.")
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
X = ki1.getGroup(op.param1)
X.preventJoinByTicket = False
ki1.updateGroup(X)
Ti = ki1.reissueGroupTicket(op.param1)
cl.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki1.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki2.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki3.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki4.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
ki5.acceptGroupInvitationByTicket(op.param1,Ti)
time.sleep(0.01)
X = ki5.getGroup(op.param1)
X.preventJoinByTicket = True
ki10.updateGroup(X)
Ticket = ki5.reissueGroupTicket(op.param1)
if op.param2 in wait["blacklist"]:
pass
if op.param2 in wait["whitelist"]:
pass
else:
wait["blacklist"][op.param2] = True
if op.type == 13:
if mid in op.param3:
if wait["pautoJoin"] == True:
cl.acceptGroupInvitation(op.param1)
else:
cl.rejectGroupInvitation(op.param1)
if op.type == 22:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 24:
if wait["leaveRoom"] == True:
cl.leaveRoom(op.param1)
if op.type == 26:
msg = op.message
if msg.toType == 0:
msg.to = msg.from_
if msg.from_ == mid:
if "join:" in msg.text:
list_ = msg.text.split(":")
try:
cl.acceptGroupInvitationByTicket(list_[1],list_[2])
G = cl.getGroup(list_[1])
G.preventJoinByTicket = True
cl.updateGroup(G)
except:
cl.sendText(msg.to, "error")
if msg.toType == 1:
if wait["leaveRoom"] == True:
cl.leaveRoom(msg.to)
if msg.contentType == 16:
url = msg.contentMetadata["postEndUrl"]
cl.like(url[25:58], url[66:], likeType=1001)
if op.type == 26:
msg = op.message
if msg.to in settings["simiSimi"]:
if settings["simiSimi"][msg.to] == True:
if msg.text is not None:
text = msg.text
r = requests.get("http://api.ntcorp.us/chatbot/v1/?text=" + text.replace(" ","+") + "&key=beta1.nt")
data = r.text
data = json.loads(data)
if data['status'] == 200:
if data['result']['result'] == 100:
cl.sendText(msg.to, "[ChatBOT] " + data['result']['response'].encode('utf-8'))
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Dont Tag Me!! Im Busy",cName + " Ngapain Ngetag?",cName + " Nggak Usah Tag-Tag! Kalo Penting Langsung Pc Aja","-_-","Alin lagi off", cName + " Kenapa Tag saya?","SPAM PC aja " + cName, "Jangan Suka Tag gua " + cName, "Kamu siapa " + cName + "?", "Ada Perlu apa " + cName + "?","Tenggelamkan tuh yang suka tag pake BOT","Tersummon -_-"]
ret_ = "[Auto Respond] " + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["detectMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Dont Tag Me!! Im Busy",cName + ""]
ret_ = "[Auto] " + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
msg.contentType = 7
msg.text = ''
msg.contentMetadata = {
'STKPKGID': '608',
'STKTXT': '[]',
'STKVER': '16',
'STKID':'5507'
}
cl.sendMessage(msg)
break
if 'MENTION' in msg.contentMetadata.keys() != None:
if wait["kickMention"] == True:
contact = cl.getContact(msg.from_)
cName = contact.displayName
balas = ["Dont Tag Me!! Im Busy",cName + " Ngapain Ngetag?",cName + " Nggak Usah Tag-Tag! Kalo Penting Langsung Pc Aja","-_-","Alin lagi off", cName + " Kenapa Tag saya?","SPAM PC aja " + cName, "Jangan Suka Tag gua " + cName, "Kamu siapa " + cName + "?", "Ada Perlu apa " + cName + "?","Tenggelamkan tuh yang suka tag pake BOT","Tersummon -_-"]
ret_ = "[Auto Respond] " + random.choice(balas)
name = re.findall(r'@(\w+)', msg.text)
mention = ast.literal_eval(msg.contentMetadata['MENTION'])
mentionees = mention['MENTIONEES']
for mention in mentionees:
if mention['M'] in Bots:
cl.sendText(msg.to,ret_)
cl.kickoutFromGroup(msg.to,[msg.from_])
break
if msg.contentType == 13:
if wait["steal"] == True:
_name = msg.contentMetadata["displayName"]
copy = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
print "[Target] Stealed"
break
else:
targets.append(copy)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + msg.contentMetadata["mid"] + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithUrl(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithUrl(msg.to,path)
wait["steal"] = False
break
except:
pass
if wait["alwayRead"] == True:
if msg.toType == 0:
cl.sendChatChecked(msg.from_,msg.id)
else:
cl.sendChatChecked(msg.to,msg.id)
if op.type == 25:
msg = op.message
if msg.contentType == 13:
if wait["wblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
cl.sendText(msg.to,"already")
wait["wblack"] = False
else:
wait["commentBlack"][msg.contentMetadata["mid"]] = True
wait["wblack"] = False
cl.sendText(msg.to,"decided not to comment")
elif wait["dblack"] == True:
if msg.contentMetadata["mid"] in wait["commentBlack"]:
del wait["commentBlack"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done deleted")
wait["dblack"] = False
else:
wait["dblack"] = False
cl.sendText(msg.to,"It is not in the black list")
elif wait["wblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
cl.sendText(msg.to,"Done already")
wait["wblacklist"] = False
else:
wait["blacklist"][msg.contentMetadata["mid"]] = True
wait["wblacklist"] = False
cl.sendText(msg.to,"Done done aded")
elif wait["dblacklist"] == True:
if msg.contentMetadata["mid"] in wait["blacklist"]:
del wait["blacklist"][msg.contentMetadata["mid"]]
cl.sendText(msg.to,"Done deleted")
wait["dblacklist"] = False
else:
wait["dblacklist"] = False
cl.sendText(msg.to,"It is not in the black list")
elif wait["contact"] == True:
msg.contentType = 0
cl.sendText(msg.to,msg.contentMetadata["mid"])
if 'displayName' in msg.contentMetadata:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + msg.contentMetadata["displayName"] + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
else:
contact = cl.getContact(msg.contentMetadata["mid"])
try:
cu = cl.channel.getCover(msg.contentMetadata["mid"])
except:
cu = ""
cl.sendText(msg.to,"[displayName]:\n" + contact.displayName + "\n[mid]:\n" + msg.contentMetadata["mid"] + "\n[statusMessage]:\n" + contact.statusMessage + "\n[pictureStatus]:\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[coverURL]:\n" + str(cu))
elif msg.contentType == 16:
if wait["timeline"] == True:
msg.contentType = 0
if wait["lang"] == "JP":
msg.text = "post URL\n" + msg.contentMetadata["postEndUrl"]
else:
msg.text = "URL→\n" + msg.contentMetadata["postEndUrl"]
cl.sendText(msg.to,msg.text)
elif msg.text is None:
return
elif msg.text in ["Help","คำสั่ง"]:
print "\nHelp pick up..."
if wait["lang"] == "JP":
cl.sendText(msg.to, helpMessage + "")
else:
cl.sendText(msg.to,helpt)
elif msg.text in ["Help2"]:
print "\nHelp pick up..."
if wait["lang"] == "JP":
cl.sendText(msg.to, helpMessage2 + "")
else:
cl.sendText(msg.to,helpt)
elif msg.text in ["Help3","Sett"]:
print "\nHelp pick up..."
if wait["lang"] == "JP":
cl.sendText(msg.to, helpMessage3 + "")
else:
cl.sendText(msg.to,helpt)
elif ("Gn:" in msg.text):
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.name = msg.text.replace("Gn:","")
cl.updateGroup(X)
else:
cl.sendText(msg.to,"It can't be used besides the group.")
elif "Kick:" in msg.text:
midd = msg.text.replace("Kick:"," ")
klist=[ki2,ki3,ki4,ki5,ki1,cl]
kicker = random.choice(klist)
kicker.kickoutFromGroup(msg.to,[midd])
if op.type == 25:
msg = op.message
if msg.contentType == 13:
if wait["winvite"] == True:
if msg.from_ == admin:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
cl.sendText(msg.to,"-> " + _name + " was here")
break
elif invite in wait["blacklist"]:
cl.sendText(msg.to,"Sorry, " + _name + " On Blacklist")
cl.sendText(msg.to,"Call my daddy to use command !, \n➡Unban: " + invite)
break
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
cl.findAndAddContactsByMid(target)
cl.inviteIntoGroup(msg.to,[target])
cl.sendText(msg.to,"Done Invite : \n➡" + _name)
wait["winvite"] = False
break
except:
try:
ki1.findAndAddContactsByMid(invite)
ki1.inviteIntoGroup(op.param1,[invite])
wait["winvite"] = False
except:
cl.sendText(msg.to,"Negative, Error detected")
wait["winvite"] = False
break
if msg.contentType == 13:
if wait['ainvite'] == True:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
ki1.sendText(msg.to, _name + " สมาชิกอยู่ในกลุ่มเเล้ว")
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
ki1.findAndAddContactsByMid(target)
ki1.inviteIntoGroup(msg.to,[target])
ki1.sendText(msg.to,"Invite " + _name)
wait['ainvite'] = False
break
except:
ki1.sendText(msg.to,"Error")
wait['ainvite'] = False
break
if msg.contentType == 13:
if wait['binvite'] == True:
_name = msg.contentMetadata["displayName"]
invite = msg.contentMetadata["mid"]
groups = cl.getGroup(msg.to)
pending = groups.invitee
targets = []
for s in groups.members:
if _name in s.displayName:
ki2.sendText(msg.to, _name + " สมาชิกอยู่ในกลุ่มเเล้ว")
else:
targets.append(invite)
if targets == []:
pass
else:
for target in targets:
try:
ki2.findAndAddContactsByMid(target)
ki2.inviteIntoGroup(msg.to,[target])
ki2.sendText(msg.to,"Invite " + _name)
wait['binvite'] = False
break
except:
ki2.sendText(msg.to,"Error")
wait['binvite'] = False
break
#====================================================
elif "Contact" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.to}
cl.sendMessage(msg)
#====================================================
elif msg.text.lower() == 'Mybot':
msg.contentType = 13
msg.contentMetadata = {'mid': Amid1}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Amid2}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Amid3}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Amid4}
cl.sendMessage(msg)
msg.contentType = 13
msg.contentMetadata = {'mid': Amid5}
cl.sendMessage(msg)
#========================================
elif "Me" == msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
#=========================================
elif "vdo:" in msg.text.lower():
if msg.toType == 2:
query = msg.text.split(":")
try:
if len(query) == 3:
isi = yt(query[2])
hasil = isi[int(query[1])-1]
cl.sendText(msg.to, hasil)
else:
isi = yt(query[1])
cl.sendText(msg.to, isi[0])
except Exception as e:
cl.sendText(msg.to, str(e))
elif 'ยูทูป ' in msg.text:
try:
textToSearch = (msg.text).replace('ยูทูป ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
elif msg.text in ["55"]:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "100",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
ki2.sendMessage(msg)
elif msg.text in ["เห้อ"]:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "10",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
ki2.sendMessage(msg)
elif "youname " in msg.text.lower():
txt = msg.text.replace("youname ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "Bl " in msg.text:
if msg.from_ in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Done Banned")
print "[Command] Bannad"
except:
pass
#----------------------------------------------------------------------------
#------------------------------- UNBAN BY TAG -------------------------------
elif "Wl " in msg.text:
if msg.from_ in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Done Unbanned")
print "[Command] Unbannad"
except:
pass
# elif msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
# text = msg.text
# if text is not None:
# cl.sendText(msg.to,text)
# else:
# if msg.contentType == 7:
# msg.contentType = 7
# msg.text = None
# msg.contentMetadata = {
# "STKID": "6",
# "STKPKGID": "1",
# "STKVER": "100" }
# cl.sendMessage(msg)
# elif msg.contentType == 13:
# msg.contentType = 13
# msg.contentMetadata = {'mid': msg.contentMetadata["mid"]}
# cl.sendMessage(msg)
elif "Mimic:" in msg.text:
if msg.from_ in admin:
cmd = msg.text.replace("Mimic:","")
if cmd == "on":
if mimic["status"] == False:
mimic["status"] = True
cl.sendText(msg.to,"Mimic on\n\nเปิดการเลียนเเบบ")
else:
cl.sendText(msg.to,"Mimic already on\n\nเปิดการเลียนเเบบ")
elif cmd == "off":
if mimic["status"] == True:
mimic["status"] = False
cl.sendText(msg.to,"Mimic off\n\nปิดการเ���ียน���เบบ")
else:
cl.sendText(msg.to,"Mimic already off\n\nปิดการเลียนเเบบ")
elif "add:" in cmd:
target0 = msg.text.replace("Mimic:add:","")
target1 = target0.lstrip()
target2 = target1.replace("@","")
target3 = target2.rstrip()
_name = target3
gInfo = cl.getGroup(msg.to)
targets = []
for a in gInfo.members:
if _name == a.displayName:
targets.append(a.mid)
if targets == []:
cl.sendText(msg.to,"No targets\n\nเกิดผิดพลาด")
else:
for target in targets:
try:
mimic["target"][target] = True
cl.sendText(msg.to,"Success added target")
cl.sendMessageWithMention(msg.to,target)
break
except:
cl.sendText(msg.to,"โปรเเกรมเลียนเเบบทำงาน")
break
elif "del:" in cmd:
target0 = msg.text.replace("Mimic:del:","")
target1 = target0.lstrip()
target2 = target1.replace("@","")
target3 = target2.rstrip()
_name = target3
gInfo = cl.getGroup(msg.to)
targets = []
for a in gInfo.members:
if _name == a.displayName:
targets.append(a.mid)
if targets == []:
cl.sendText(msg.to,"No targets\n\nเกิดข้อผิดพลาด")
else:
for target in targets:
try:
del mimic["target"][target]
cl.sendText(msg.to,"Success deleted target")
cl.sendMessageWithMention(msg.to,target)
break
except:
cl.sendText(msg.to,"คุณลบการเลียนเเบบผู้ใช้นี้")
break
elif cmd == "list":
if mimic["target"] == {}:
cl.sendText(msg.to,"No target")
else:
lst = "<<List Target>>"
total = len(mimic["target"])
for a in mimic["target"]:
if mimic["target"][a] == True:
stat = "On"
else:
stat = "Off"
lst += "\n-> " + cl.getContact(a).displayName + " | " + stat
cl.sendText(msg.to,lst + "\nTotal: " + total)
#----------------------------------------------------------------------------
elif msg.text.lower() in ["botkill"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
cl.sendText(msg.to,"There was no blacklist user")
return
for jj in matched_list:
ki1.kickoutFromGroup(msg.to,[jj])
pass
elif msg.text.lower() in ["admins","mee"]:
msg.contentType = 13
adm = 'u00f827ce6641038d7c9b6704a9777dfa'
msg.contentMetadata = {'mid': adm}
cl.sendMessage(msg)
cl.sendText(msg.to,"Add Line http://line.me/ti/p/09T2waRE7l")
elif msg.text in ["Man gift","ของขวัญ"]:
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '1'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '2'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '3'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '4'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '5'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '7'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '8'}
msg.text = None
cl.sendMessage(msg)
msg.contentType = 9
msg.contentMetadata={'PRDID': 'a0768339-c2d3-4189-9653-2909e9bb6f58', 'PRDTYPE': 'THEME', 'MSGTPL': '6'}
msg.text = None
cl.sendMessage(msg)
#VPS STUFF - VPS NEEDED TO RUN THIS COMMAND :)
elif msg.text in ["vps","kernel","Vps"]:
if msg.from_ in admin:
botKernel = subprocess.Popen(["uname","-svmo"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel)
print "[Command]Kernel executed"
else:
cl.sendText(msg.to,"Command denied.")
cl.sendText(msg.to,"Admin permission required.")
print "[Error]Command denied - Admin permission required"
elif "Gc" == msg.text:
try:
group = cl.getGroup(msg.to)
GS = group.creator.mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': GS}
cl.sendMessage(M)
except:
W = group.members[0].mid
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': W}
cl.sendMessage(M)
cl.sendText(msg.to,"old user")
elif 'ขอเพลง ' in msg.text:
try:
textToSearch = (msg.text).replace('ขอเพลง ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
cl.sendText(msg.to,'https://www.youtube.com' + results['href'])
except:
cl.sendText(msg.to,"Could not find it")
elif "#set" in msg.text:
cl.sendText(msg.to, "Let's see who lazy to type")
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
wait2['ROM'][msg.to] = {}
print wait2
elif "#read" in msg.text:
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
chiya = ""
else:
chiya = ""
for rom in wait2["ROM"][msg.to].items():
print rom
chiya += rom[1] + "\n"
cl.sendText(msg.to, "people who reading%s\n is this\n\n\nDate and time I started it:\n[%s]" % (wait2['readMember'][msg.to],setTime[msg.to]))
else:
cl.sendText(msg.to, "read point not set\nReading point setting you send it it will send an esxisting one")
elif msg.text in ["Myginfoid"]:
gid = cl.getGroupIdsJoined()
g = ""
for i in gid:
g += "[%s]:%s\n" % (cl.getGroup(i).name,i)
cl.sendText(msg.to,g)
elif msg.text in ["P1 invite","P1 Invite"]:
wait["ainvite"] = True
ki.sendText(msg.to,"Send Contact")
elif msg.text in ["P2 invite","P2 Invite"]:
wait["binvite"] = True
kk.sendText(msg.to,"Send Contact")
#==================================================
elif "#ประกาศ:" in msg.text:
bctxt = msg.text.replace("#ประกาศ:", "")
a = cl.getGroupIdsJoined()
for manusia in a:
cl.sendText(manusia, (bctxt))
elif msg.text.lower() == 'bann':
blockedlist = cl.getBlockedContactIds()
cl.sendText(msg.to, "Please wait...")
kontak = cl.getContacts(blockedlist)
num=1
msgs="User Blocked List\n"
for ids in kontak:
msgs+="\n%i. %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n\nTotal %i blocked user(s)" % len(kontak)
cl.sendText(msg.to, msgs)
elif "#หำ1:" in msg.text:
string = msg.text.replace("#หำ1:","")
if len(string.decode('utf-8')) <= 20:
profile = ki.getProfile()
profile.displayName = string
ki.updateProfile(profile)
elif msg.text in ["บอทเข้า","#Kicker","#kicker","Kicker","kicker","•••"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.sendText(msg.to,"[TAEM SELFBOT THAILAND]")
ki2.sendText(msg.to,"[Do not think will try.]")
ki3.sendText(msg.to,"[☠ตาต้อมบอทไลน์☠")
ki1.sendText(msg.to,"Hello " + str(ginfo.name) + "\n\n[By.TOME BOTLINE]")
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki1.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki1.updateGroup(G)
elif msg.text in ["Join all"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.0)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.0)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.0)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.0)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.0)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki1.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki1.updateGroup(G)
elif msg.text in ["ออก","บอทออก","Bye","#bye"]:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki1.sendText(msg.to,"Bye~Bye " + str(ginfo.name) + "\n\n[By.ตาต้อม บอทไลน์]")
ki1.leaveGroup(msg.to)
ki2.sendText(msg.to,"Bye~Bye " + str(ginfo.name) + "\n\n[By.ตาต้อม บอทไลน์]")
ki2.leaveGroup(msg.to)
ki3.sendText(msg.to,"Bye~Bye " + str(ginfo.name) + "\n\n[By.ตาต้อม บอทไลน์]")
ki3.leaveGroup(msg.to)
ki4.sendText(msg.to,"Bye~Bye " + str(ginfo.name) + "\n\n[By.ตาต้อม บอทไลน์]")
ki4.leaveGroup(msg.to)
ki5.sendText(msg.to,"Bye~Bye " + str(ginfo.name) + "\n\n[By.ตาต้อม บอทไลน์]")
ki5.leaveGroup(msg.to)
except:
pass
elif msg.text.lower() == '#byeall':
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
except:
pass
elif "#v10" in msg.text:
cl.sendText(msg.to,"""[SELFBOT TOME BOTline]\n\n""")
#==================================================
elif msg.text in ["Invite"]:
if msg.from_ in admin:
wait["winvite"] = True
cl.sendText(msg.to,"send contact")
elif msg.text in ["เชิญ"]:
if msg.from_ in admin:
wait["winvite"] = True
cl.sendText(msg.to,"send contact")
elif msg.text in ["Invite off"]:
if msg.from_ in admin:
wait["winvite"] = False
cl.sendText(msg.to,"Done..")
elif msg.text in ["Bot1 invite contact","1เชิญ"]:
if msg.from_ in admin:
wait["ainvite"] = True
ki1.sendText(msg.to,"send contact")
elif msg.text in ["Bot2 invite contact","2เชิญ"]:
if msg.from_ in admin:
wait["binvite"] = True
ki2.sendText(msg.to,"send contact")
elif ("!Kick " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"] [0] ["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
cl.inviteIntoGroup(msg.to,[target])
cl.cancelGroupInvitation(msg.to,[target])
except:
cl.sendText(msg.to,"Error")
elif '123zzz' in msg.text.lower():
key = msg.text[-33:]
cl.findAndAddContactsByMid(key)
cl.inviteIntoGroup(msg.to, [key])
contact = cl.getContact(key)
elif msg.text in ["ยกเลิก"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
if X.invitee is not None:
gInviMids = [contact.mid for contact in X.invitee]
cl.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"No one is inviting。")
else:
cl.sendText(msg.to,"Sorry, nobody absent")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["บอทยกเลิก"]:
if msg.toType == 2:
klist=[ki1,ki2,ki3,ki4,ki5]
kicker = random.choice(klist)
G = kicker.getGroup(msg.to)
if G.invitee is not None:
gInviMids = [contact.mid for contact in G.invitee]
kicker.cancelGroupInvitation(msg.to, gInviMids)
else:
if wait["lang"] == "JP":
kicker.sendText(msg.to,"No one is inviting")
else:
kicker.sendText(msg.to,"Sorry, nobody absent")
else:
if wait["lang"] == "JP":
kicker.sendText(msg.to,"Can not be used outside the group")
else:
kicker.sendText(msg.to,"Not for use less than group")
elif msg.text in ["#Link on"]:
if msg.toType == 2:
uye = random.choice(KAC)
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
uye.updateGroup(X)
if wait["lang"] == "JP":
uye.sendText(msg.to,"done")
else:
uye.sendText(msg.to,"already open")
else:
if wait["lang"] == "JP":
uye.sendText(msg.to,"Can not be used outside the group")
else:
uye.sendText(msg.to,"Not for use less than group")
elif msg.text in ["Link on"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = False
cl.updateGroup(X)
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"already open")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["Link off"]:
if msg.toType == 2:
X = cl.getGroup(msg.to)
X.preventJoinByTicket = True
cl.updateGroup(X)
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"already close")
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text.lower() == 'ginfo':
ginfo = cl.getGroup(msg.to)
try:
gCreator = ginfo.creator.displayName
except:
gCreator = "Error"
if wait["lang"] == "JP":
if ginfo.invitee is None:
sinvitee = "0"
else:
sinvitee = str(len(ginfo.invitee))
msg.contentType = 13
msg.contentMetadata = {'mid': ginfo.creator.mid}
cl.sendText(msg.to,"[Nama]\n" + str(ginfo.name) + "\n[Group Id]\n" + msg.to + "\n\n[Group Creator]\n" + gCreator + "\n\nAnggota:" + str(len(ginfo.members)) + "\nInvitation:" + sinvitee + "")
cl.sendMessage(msg)
elif msg.text in ["!Glist","Myginfo"]:
gs = cl.getGroupIdsJoined()
L = "☫『 Groups List 』☫\n"
for i in gs:
L += "[⭐] %s \n" % (cl.getGroup(i).name + " | [ " + str(len (cl.getGroup(i).members)) + " ]")
cl.sendText(msg.to, L + "\nTotal Group : [ " + str(len(gs)) +" ]")
elif msg.text in ["Selfbot"]:
msg.contentType = 13
msg.contentMetadata = {'mid': mid}
cl.sendMessage(msg)
cl.sendText(msg.to,"[SELFBOT PHET HACK BOT]")
elif "Id" == msg.text:
key = msg.to
cl.sendText(msg.to, key)
elif ("Hack " in msg.text):
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
mi = cl.getContact(key1)
cl.sendText(msg.to,"Mid:" + key1)
elif "Mid:" in msg.text:
mmid = msg.text.replace("Mid:","")
msg.contentType = 13
msg.contentMetadata = {"mid":mmid}
cl.sendMessage(msg)
elif "Key" in msg.text:
cl.sendText(msg.to,""" [{PHET HACK BOT}] \n\n key Only Kicker \n\n[Kb1 in]\n[1Aditname:]\n[B Cancel]\n[kick @]\n[Ban @]\n[kill]\n[BotChat]\n[Respons]\n[Pb1 Gift]\n[Pb1 bye]""")
elif msg.text.lower() == 'ยกเลิก1':
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
cl.cancelGroupInvitation(msg.to,[_mid])
cl.sendText(msg.to,"I pretended to cancel and canceled(๑و•̀ω•́)و")
elif msg.text.lower() == 'บอทยกเลิก1':
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.invitee]
for _mid in gMembMids:
ki1.cancelGroupInvitation(msg.to,[_mid])
ki1.sendText(msg.to,"I pretended to cancel and canceled(๑و•̀ω•́)و")
cl.sendText(msg.to,"I pretended to cancel and canceled(๑و•̀ω•́)و")
elif "Me @" in msg.text:
msg.contentType = 13
_name = msg.text.replace("Me @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentMetadata = {'mid': g.mid}
cl.sendMessage(msg)
else:
pass
elif "#cb" in msg.text:
nk0 = msg.text.replace("#cb","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"😏")
pass
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"😏")
except:
cl.sendText(msg.to,"😏")
elif "#Banall" in msg.text:
nk0 = msg.text.replace("#Banall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Locked")
except:
cl.sendText(msg.to,"Error")
elif "#Unbanall" in msg.text:
nk0 = msg.text.replace("#Unbanall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Target Unlocked")
except:
cl.sendText(msg.to,"Error")
elif "Mid" == msg.text:
cl.sendText(msg.to,mid)
elif msg.text == "กลุ่ม":
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
gCreator = ginfo.creator.displayName
except:
gCreator = "ไม่พบผู้สร้างกลุ่ม"
if wait["lang"] == "JP":
if ginfo.invitee is None:
sinvitee = "0"
else:
sinvitee = str(len(ginfo.invitee))
if ginfo.preventJoinByTicket == True:
u = "[ปิด]"
else:
u = "[เปิด]"
cl.sendText(msg.to,"[ชื่อของกลุ่ม]:\n" + str(ginfo.name) + "\n[Gid]:\n" + msg.to + "\n[ผู้สร้างกลุ่ม:]\n" + gCreator + "\n[ลิ้งค์รูปกลุ่ม]:\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus + "\n[จำนวนสมาชิก]:" + str(len(ginfo.members)) + "คน\n[จำนวนค้างเชิญ]:" + sinvitee + "คน\n[สถานะลิ้งค์]:" + u + "URL [By. ตาต้อม บอทไลน์]")
else:
cl.sendText(msg.to,"Nama Gourp:\n" + str(ginfo.name) + "\nGid:\n" + msg.to + "\nCreator:\n" + gCreator + "\nProfile:\nhttp://dl.profile.line.naver.jp/" + ginfo.pictureStatus)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif "Bot1@@" in msg.text:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*200 : (j+1)*200]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += u'@Krampus\n'
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
ki1.sendMessage(msg)
elif "Bot?" in msg.text:
ki1.sendText(msg.to,"Bot 💀1💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki2.sendText(msg.to,"Bot 💀2💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki3.sendText(msg.to,"Bot 💀3💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki4.sendText(msg.to,"Bot 💀4💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki5.sendText(msg.to,"Bot 💀5💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki1.sendText(msg.to,"Bot 💀6💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki2.sendText(msg.to,"Bot 💀7💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki3.sendText(msg.to,"Bot 💀8💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki4.sendText(msg.to,"Bot 💀9💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki5.sendText(msg.to,"Bot 💀10💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki1.sendText(msg.to,"Bot 💀11💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki2.sendText(msg.to,"Bot 💀12💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki3.sendText(msg.to,"Bot 💀13💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki4.sendText(msg.to,"Bot 💀14💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki5.sendText(msg.to,"Bot 💀15💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ��̶̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki1.sendText(msg.to,"Bot 💀16💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki2.sendText(msg.to,"Bot 💀17💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
ki3.sendText(msg.to,"Bot 💀18💀 \n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•")
elif "Say " in msg.text:
bctxt = msg.text.replace("Say ","")
ki1.sendText(msg.to,(bctxt))
ki2.sendText(msg.to,(bctxt))
ki3.sendText(msg.to,(bctxt))
ki4.sendText(msg.to,(bctxt))
ki5.sendText(msg.to,(bctxt))
elif "All mid" == msg.text:
ki1.sendText(msg.to,Amid1)
ki2.sendText(msg.to,Amid2)
ki3.sendText(msg.to,Amid3)
ki4.sendText(msg.to,Amid4)
ki5.sendText(msg.to,Amid5)
elif msg.text in ["Protect:on","Protect on","เปิดป้องกัน"]:
if wait["protectionOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already on\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Protection On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["protectionOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Already on\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Qr:off","Qr off"]:
if wait["qr"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Protection QR PRO Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["qr"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection QR PRO Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Already off\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Qr:on","Qr on"]:
if wait["qr"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already on\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Protection QR PRO On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["qr"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection QR PRO On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Already on")
elif msg.text in ["Protect:off","Protect off","ปิดป้องกัน"]:
if wait["protectionOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Protection Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["protectionOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Protection Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Already off\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif "Namelock:on" in msg.text:
if msg.to in wait['pname']:
cl.sendText(msg.to,"Done..")
else:
cl.sendText(msg.to,"bone..")
wait['pname'][msg.to] = True
wait['pro_name'][msg.to] = cl.getGroup(msg.to).name
elif "Namelock:off" in msg.text:
if msg.to in wait['pname']:
cl.sendText(msg.to,"Done..")
del wait['pname'][msg.to]
else:
cl.sendText(msg.to,"bone..")
elif "Blockinvite:on" == msg.text:
gid = msg.to
autocancel[gid] = "poni"
cl.sendText(msg.to,"Done..")
elif "Blockinvite:off" == msg.text:
try:
del autocancel[msg.to]
cl.sendText(msg.to,"Done..")
except:
pass
elif "Cn: " in msg.text:
string = msg.text.replace("Cn: ","")
if len(string.decode('utf-8')) <= 20:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Name " + string + " Done Bosqu")
elif msg.text in ["invite:on"]:
if msg.from_ in admin:
wait["winvite"] = True
cl.sendText(msg.to,"send contact")
elif "Mc " in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
cl.sendText(msg.to,"Mc: " + key1)
elif "Mc: " in msg.text:
mmid = msg.text.replace("Mc: ","")
msg.contentType = 13
msg.contentMetadata = {"mid":mmid}
ki1.sendMessage(msg)
ki2.sendMessage(msg)
ki3.sendMessage(msg)
ki4.sendMessage(msg)
ki5.sendMessage(msg)
elif msg.text in ["K on","Contact:on","Contact on","K:on"]:
if wait["contact"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
else:
wait["contact"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
elif msg.text in ["contact v"]:
if msg.from_ in admin:
wait["winvite"] = True
random.choice(KAC).sendText(msg.to,"send contact")
elif msg.text in ["K:off","Contact:off","Contact off","K off"]:
if wait["contact"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah off Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu ")
else:
wait["contact"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
elif msg.text in ["Auto join on","Join on","Join:on","Auto join:on","Poin on"]:
if wait["autoJoin"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
else:
wait["autoJoin"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
elif msg.text in ["Join off","Auto join off","Auto join:off","Join:off","Poin off"]:
if wait["autoJoin"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah off Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
else:
wait["autoJoin"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah off Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
elif "Gcancel:" in msg.text:
try:
strnum = msg.text.replace("Gcancel:","")
if strnum == "off":
wait["autoCancel"]["on"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Invitation refused turned off\nTo turn on please specify the number of people and send")
else:
cl.sendText(msg.to,"关了邀请拒绝。要���开请指定人数发送")
else:
num = int(strnum)
wait["autoCancel"]["on"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,strnum + " The group of people and below decided to automatically refuse invitation")
else:
cl.sendText(msg.to,strnum + "使人以下的小组用自动邀请拒绝")
except:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Value is wrong")
else:
cl.sendText(msg.to,"Bizarre ratings")
elif msg.text in ["Leave:on","Auto leave on","Auto leave:on","Leave on"]:
if wait["leaveRoom"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already on")
else:
cl.sendText(msg.to,"done")
else:
wait["leaveRoom"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"要了开。")
elif msg.text in ["Leave:off","Auto leave off","Auto leave:off","Leave off"]:
if wait["leaveRoom"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already off")
else:
cl.sendText(msg.to,"done")
else:
wait["leaveRoom"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"already")
elif msg.text in ["共有:オン","Share on","Share:on"]:
if wait["timeline"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already on")
else:
cl.sendText(msg.to,"done")
else:
wait["timeline"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"要了开。")
elif msg.text in ["共有:オフ","Share off","Share:off"]:
if wait["timeline"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already off")
else:
cl.sendText(msg.to,"done")
else:
wait["timeline"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"done")
else:
cl.sendText(msg.to,"要了关断。")
elif msg.text in ["Auto like:on","Like on"]:
if wait["likeOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done。")
else:
wait["likeOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already。")
elif msg.text in ["Like off","Auto like:off"]:
if wait["likeOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done。")
else:
wait["likeOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Already。")
#========================================
#========================================
elif msg.text in ["Set"]:
print "Setting pick up..."
md = "─┅══ईह ㏒ ตาต้อม ㏒ ईह══┅─\n\n"
if wait["likeOn"] == True: md+=" Auto like : on \n"
else:md+=" Auto like : off \n"
if wait["alwayRead"] == True: md+=" Read : on \n"
else:md+=" Read : off \n"
if wait["detectMention"] == True: md+=" Autorespon : on \n"
else:md+=" Autorespon : off \n"
if wait["kickMention"] == True: md+=" Autokick: on \n"
else:md+=" Autokick : off \n"
if wait["Notifed"] == True: md+=" Notifed : on \n"
else:md+=" Notifed : off \n"
if wait["Notifedbot"] == True: md+=" Notifedbot : on \n"
else:md+=" Notifedbot : off \n"
if wait["acommentOn"] == True: md+=" Hhx1 : on \n"
else:md+=" Hhx1 : off \n"
if wait["bcommentOn"] == True: md+=" Hhx2 : on \n"
else:md+=" Hhx2 : off \n"
if wait["ccommentOn"] == True: md+=" Hhx3 : on \n"
else:md+=" Hhx3 : off \n"
if wait["Protectcancl"] == True: md+=" Cancel : on \n"
else:md+=" Cancel : off \n"
if wait["winvite"] == True: md+=" Invite : on ����\n"
else:md+=" Invite : off \n"
if wait["pname"] == True: md+=" Namelock : on \n"
else:md+=" Namelock : off \n"
if wait["contact"] == True: md+=" Contact : on \n"
else: md+=" Contact : off \n"
if wait["autoJoin"] == True: md+=" Auto join : on \n"
else: md +=" Auto join : off \n"
if wait["autoCancel"]["on"] == True:md+=" Group cancel :" + str(wait["autoCancel"]["members"]) + " \n"
else: md+= " Group cancel : off \n"
if wait["leaveRoom"] == True: md+="���� Auto leave : on \n"
else: md+=" Auto leave : off \n"
if wait["timeline"] == True: md+=" Share : on \n"
else:md+=" Share : off \n"
if wait["clock"] == True: md+=" Clock Name : on ��\n"
else:md+=" Clock Name : off \n"
if wait["autoAdd"] == True: md+=" Auto add : on \n"
else:md+=" Auto add : off \n"
if wait["commentOn"] == True: md+=" Comment : on \n"
else:md+=" Comment : off \n"
if wait["Backup"] == True: md+=" Backup : on \n"
else:md+=" Backup : off \n"
if wait["qr"] == True: md+=" Protect QR : on \n"
else:md+=" Protect QR : off \n"
cl.sendText(msg.to,md)
msg.contentType = 13
msg.contentMetadata = {'mid': admsa}
cl.sendMessage(msg)
#========================================
#------------------------------------------------
elif msg.text in ["Gcreator:inv","เชิญเเอทมิน"]:
if msg.from_ in admin:
ginfo = cl.getGroup(msg.to)
gCreator = ginfo.creator.mid
try:
cl.findAndAddContactsByMid(gCreator)
cl.inviteIntoGroup(msg.to,[gCreator])
print "success inv gCreator"
except:
pass
#-----------------------------------------------
elif msg.text in ["Backup:on","Backup on","เปิดการเชิญกลับ"]:
if wait["Backup"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bos\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Backup On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["Backup"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Backup On\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Sudah on Bos\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Backup:off","Backup off","ปิดการเชิญกลับ"]:
if wait["Backup"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah off Bos\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Backup Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
wait["Backup"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Backup Off\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"Sudah off Bos\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Reject","ลบรัน"]:
gid = cl.getGroupIdsInvited()
for i in gid:
cl.rejectGroupInvitation(i)
if wait["lang"] == "JP":
cl.sendText(msg.to,"Semua Spam Undangan Telah Di Tolak")
else:
cl.sendText(msg.to,"拒绝了全部的邀请。")
elif msg.text in ["Reject1","ลบรันบอท1"]:
gid = ki1.getGroupIdsInvited()
for i in gid:
ki1.rejectGroupInvitation(i)
if wait["lang"] == "JP":
ki1.sendText(msg.to,"Bot All invitations is clean")
else:
ki1.sendText(msg.to,"拒绝了全部的邀请。")
elif msg.text in ["Reject2","ลบรันบอท2"]:
gid = kk.getGroupIdsInvited()
for i in gid:
k12.rejectGroupInvitation(i)
if wait["lang"] == "JP":
ki2.sendText(msg.to,"Bot All invitations is clean")
else:
ki2.sendText(msg.to,"拒绝了全部的邀请。")
elif msg.text in ["Reject3","ลบรันบอท3"]:
gid = ki3.getGroupIdsInvited()
for i in gid:
ki3.rejectGroupInvitation(i)
if wait["lang"] == "JP":
ki3.sendText(msg.to,"Bot All invitations is clean")
else:
ki3.sendText(msg.to,"拒绝了全部的邀请。")
elif msg.text in ["Reject4","ลบรันบอท4"]:
gid = ki4.getGroupIdsInvited()
for i in gid:
ki4.rejectGroupInvitation(i)
if wait["lang"] == "JP":
ki4.sendText(msg.to,"Bot All invitations is clean")
else:
ki4.sendText(msg.to,"拒绝了全部的邀请。")
elif msg.text in ["Reject5","ลบรันบอท5"]:
gid = ki5.getGroupIdsInvited()
for i in gid:
ki5.rejectGroupInvitation(i)
if wait["lang"] == "JP":
ki5.sendText(msg.to,"Bot All invitations is clean")
else:
ki5.sendText(msg.to,"拒绝了全部的邀请。")
#-----------------------------------------------------------
elif msg.text in ["ลบแชท","ล้างแชท"]:
cl.removeAllMessages(op.param2)
cl.sendText(msg.to,"❇️Delete Chat Bot❇️")
#-----------------------------------------------------------
elif msg.text in ["ลบแชทบอท","ล้างแชทบอท"]:
ki1.removeAllMessages(op.param2)
ki2.removeAllMessages(op.param2)
ki3.removeAllMessages(op.param2)
ki4.removeAllMessages(op.param2)
ki5.removeAllMessages(op.param2)
cl.sendText(msg.to,"❇️Delete Chat Bot❇️")
cl.sendText(msg.to,"──────┅═ইई═┅──────\n•─ ͜͡✫ѕєʟғвот[ᴍ̶̲̅ᴀ̶̲̅ɴ̶̲̅]κɪcκєʀ ͜͡✫─•\nได้เคลียร์แชทบอท 5Kicker เรียบร้อย\n──────┅═ইई═┅──────")
#-----------------------------------------------------------
elif msg.text in ["Add:on","Auto add on","Auto add:on","Add on"]:
if wait["autoAdd"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah on Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
else:
wait["autoAdd"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Ok Bosqu")
else:
cl.sendText(msg.to,"Sudah on Bosqu")
elif msg.text in ["Add:off","Auto add off","Auto add:off","Add off"]:
if wait["autoAdd"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Sudah off Bosqu")
else:
cl.sendText(msg.to,"Ok Bosqu")
else:
wait["autoAdd"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Ok Bosqu")
else:
cl.sendText(msg.to,"Sudah off Bosqu")
#========================================
#========================================
elif "Message set:" in msg.text:
wait["message"] = msg.text.replace("Message set:","")
cl.sendText(msg.to,"message changed\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif "Add message: " in msg.text:
wait["message"] = msg.text.replace("Add message: ","")
if wait["lang"] == "JP":
cl.sendText(msg.to,"message changed\n\n"+ datetime.today().strftime('%H:%M:%S'))
else:
cl.sendText(msg.to,"done。\n\n"+ datetime.today().strftime('%H:%M:%S'))
elif msg.text in ["Message","Com"]:
if wait["lang"] == "JP":
cl.sendText(msg.to,"message change to\n\n" + wait["message"])
else:
cl.sendText(msg.to,"The automatic appending information is set as follows。\n\n" + wait["message"])
elif "Coms set:" in msg.text:
c = msg.text.replace("Coms set:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"String that can not be changed")
else:
wait["comment"] = c
cl.sendText(msg.to,"changed\n\n" + c)
elif "Add comment: " in msg.text:
c = msg.text.replace("Add comment: ","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"String that can not be changed")
else:
wait["comment"] = c
cl.sendText(msg.to,"changed\n\n" + c)
elif msg.text in ["Com on","Comment:on"]:
if wait["commentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
cl.sendText(msg.to,"Already on")
else:
wait["commentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
cl.sendText(msg.to,"Already on")
elif msg.text in ["Com off","Comment:off"]:
if wait["commentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
cl.sendText(msg.to,"Already off")
else:
wait["commentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Done")
else:
cl.sendText(msg.to,"Already off")
elif msg.text in ["Comment","Coms"]:
cl.sendText(msg.to,"message changed to\n\n" + str(wait["comment"]))
elif msg.text in ["HHX1","Hhx1"]:
cl.sendText(msg.to,"[เช็คข้อความต้อนรับของคุณ]\n\n" + str(wait["acomment"]))
elif msg.text in ["HHX2","Hhx2"]:
cl.sendText(msg.to,"[เช็คข้อความกล่าวถึงคนออกจากกลุ่ม]\n\n" + str(wait["bcomment"]))
elif msg.text in ["HHX3","Hhx3"]:
cl.sendText(msg.to,"[เช็คข้อความกล่าวถึงคนลบสมาชิก]\n\n" + str(wait["ccomment"]))
elif "Hhx1:" in msg.text:
c = msg.text.replace("Hhx1:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"เกิดข้อผิดพลาด..!!")
else:
wait["acomment"] = c
cl.sendText(msg.to,"➠ ตั้งค่าข้อความต้อนรับ👌\n\n" + c)
elif "Hhx2:" in msg.text:
c = msg.text.replace("Hhx2:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"เกิดข้อผิดพลาด..!!")
else:
wait["bcomment"] = c
cl.sendText(msg.to,"➠ ตั้งค่าข้อความกล่าวถึงคนออกจากกลุ่ม👌\n\n" + c)
elif "Hhx3:" in msg.text:
c = msg.text.replace("Hhx3:","")
if c in [""," ","\n",None]:
cl.sendText(msg.to,"เกิดข้อผิดพลาด..!!")
else:
wait["ccomment"] = c
cl.sendText(msg.to,"➠ ตั้งค่าข้อความกล่าวถึงคนลบสมาชิก👌\n\n" + c)
elif msg.text in ["Hhx1 on"]:
if wait["acommentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความต้อนรับเเล้ว👌")
else:
cl.sendText(msg.to,"Already on")
else:
wait["acommentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความต้อนรับเเล้ว👌")
else:
cl.sendText(msg.to,"Already on")
elif msg.text in ["Hhx2 on"]:
if wait["bcommentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความกล่าวถึงคนออกจากกลุ่ม👌")
else:
cl.sendText(msg.to,"Already on")
else:
wait["bcommentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความกล่าวถึงคนออกจากกลุ่ม👌")
else:
cl.sendText(msg.to,"Already on")
elif msg.text in ["Hhx3 on"]:
if wait["ccommentOn"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความกล่าวถึงคนลบสมาชิก👌")
else:
cl.sendText(msg.to,"Already on")
else:
wait["ccommentOn"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ เปิดข้อความกล่าวถึงคนลบสมาชิก👌")
else:
cl.sendText(msg.to,"Already on")
elif msg.text in ["Hhx1 off"]:
if wait["acommentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความต้อนรับเเล้ว👌")
else:
cl.sendText(msg.to,"Already off")
else:
wait["acommentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความต้อนรับเเล้ว👌")
else:
cl.sendText(msg.to,"Already off")
elif msg.text in ["Hhx2 off"]:
if wait["bcommentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความกล่าวถึงคนออกจากกลุ่ม👌")
else:
cl.sendText(msg.to,"Already off")
else:
wait["bcommentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความกล่าวถึงคนออกจากกลุ่ม👌")
else:
cl.sendText(msg.to,"Already off")
elif msg.text in ["Hhx3 off"]:
if wait["ccommentOn"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความกล่าวถึงคนลบสมาชิก👌")
else:
cl.sendText(msg.to,"Already off")
else:
wait["ccommentOn"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"➠ ปิดข้อความกล่าวถึงคนลบสมาชิก👌")
else:
cl.sendText(msg.to,"Already off")
elif msg.text in ["Gurl"]:
if msg.toType == 2:
uye = random.choice(KAC)
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
uye.updateGroup(x)
gurl = uye.reissueGroupTicket(msg.to)
uye.sendText(msg.to,"line://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
uye.sendText(msg.to,"Can not be used outside the group")
else:
uye.sendText(msg.to,"Not for use less than group")
elif "Ambil QR: " in msg.text:
if msg.toType == 2:
gid = msg.text.replace("Ambil QR: ","")
gurl = cl.reissueGroupTicket(gid)
cl.sendText(msg.to,"line://ti/g/" + gurl)
else:
cl.sendText(msg.to,"Not for use less than group")
elif "Y1 gurl: " in msg.text:
if msg.toType == 2:
gid = msg.text.replace("Y1 gurl: ","")
x = ki.getGroup(gid)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
ki.updateGroup(x)
gurl = ki.reissueGroupTicket(gid)
ki.sendText(msg.to,"line://ti/g/" + gurl)
else:
ki.sendText(msg.to,"Not for use less than group")
elif "Y2 gurl: " in msg.text:
if msg.toType == 2:
gid = msg.text.replace("Y2 gurl: ","")
x = kk.getGroup(gid)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
kk.updateGroup(x)
gurl = kk.reissueGroupTicket(gid)
kk.sendText(msg.to,"line://ti/g/" + gurl)
else:
kk.sendText(msg.to,"Not for use less than group")
#========================================
elif msg.text in ["Comment bl "]:
wait["wblack"] = True
cl.sendText(msg.to,"add to comment bl")
elif msg.text in ["Comment wl "]:
wait["dblack"] = True
cl.sendText(msg.to,"wl to comment bl")
elif msg.text in ["Comment bl confirm"]:
if wait["commentBlack"] == {}:
cl.sendText(msg.to,"confirmed")
else:
cl.sendText(msg.to,"Blacklist s")
mc = ""
for mi_d in wait["commentBlack"]:
mc += "・" +cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif msg.text in ["Clock:on","Clock on","Jam on","Jam:on"]:
if wait["clock"] == True:
cl.sendText(msg.to,"already on")
else:
wait["clock"] = True
now2 = datetime.now()
nowT = datetime.strftime(now2,"༺%H:%M༻")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"done")
elif msg.text in ["Clock:off","Clock off","Jam off","Jam:off"]:
if wait["clock"] == False:
cl.sendText(msg.to,"already off")
else:
wait["clock"] = False
cl.sendText(msg.to,"done")
elif "Cc: " in msg.text:
n = msg.text.replace("Cc: ","")
if len(n.decode("utf-8")) > 13:
cl.sendText(msg.to,"changed")
else:
wait["cName"] = n
cl.sendText(msg.to,"Changed to:\n\n" + n)
elif msg.text in ["Up"]:
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"༺%H:%M༻")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
cl.sendText(msg.to,"Refresh to update")
else:
cl.sendText(msg.to,"Please turn on the name clock")
#========================================
elif "Hack3 @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Hack3 @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithUrl(msg.to, path)
except:
pass
print "[Command]dp executed"
elif "Hack2mid:" in msg.text:
umid = msg.text.replace("Hack2mid:","")
contact = cl.getContact(umid)
try:
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
except:
image = "https://www.1and1.co.uk/digitalguide/fileadmin/DigitalGuide/Teaser/not-found-t.jpg"
try:
cl.sendImageWithUrl(msg.to,image)
except Exception as error:
cl.sendText(msg.to,(error))
pass
elif "Hack2 " in msg.text:
if msg.toType == 2:
msg.contentType = 0
steal0 = msg.text.replace("Hack2 ","")
steal1 = steal0.lstrip()
steal2 = steal1.replace("@","")
steal3 = steal2.rstrip()
_name = steal3
group = cl.getGroup(msg.to)
targets = []
for g in group.members:
if _name == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Gak da orange")
else:
for target in targets:
try:
contact = cl.getContact(target)
try:
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
except:
image = "https://www.1and1.co.uk/digitalguide/fileadmin/DigitalGuide/Teaser/not-found-t.jpg"
try:
cl.sendImageWithUrl(msg.to,image)
except Exception as error:
cl.sendText(msg.to,(error))
pass
except:
cl.sendText(msg.to,"Error!")
break
else:
cl.sendText(msg.to,"Tidak bisa dilakukan di luar grup")
#===============================================
elif msg.text in ["Sp","sp","Speed"]:
cl.sendText(msg.to, "Progress.......")
start = time.time()
time.sleep(0.001)
elapsed_time = time.time() - start
cl.sendText(msg.to, "%sseconds" % (elapsed_time))
print "[Command]Speed palsu executed"
elif msg.text in ["Bot Speed"]:
ki1.sendText(msg.to, "Progress.......")
start = time.time()
time.sleep(0.001)
elapsed_time = time.time() - start
ki1.sendText(msg.to, "%sseconds" % (elapsed_time))
ki2.sendText(msg.to, "%sseconds" % (elapsed_time))
ki3.sendText(msg.to, "%sseconds" % (elapsed_time))
ki4.sendText(msg.to, "%sseconds" % (elapsed_time))
ki5.sendText(msg.to, "%sseconds" % (elapsed_time))
print "[Command]Speed palsu executed"
elif msg.text in ["Keybot"]:
ki.sendText(msg.to, "[SELFBOT PHET HACK BOT]\n\n❂͜͡☆➣ Namelock on\n❂͜͡☆➣ Namelock off\n❂͜͡☆➣ Blockinvite on\n❂͜͡☆➣ Blockinvite off\n❂͜͡☆➣ Backup on\n❂͜͡☆➣ Backup off\n\n[By.เพชร ทีมทดลองบอท]")
#========================================
elif msg.text in ["Botbb"]:
try:
ki1.updateDisplayPicture(backup.pictureStatus)
ki1.updateProfile(backup)
ki2.updateDisplayPicture(backup.pictureStatus)
ki2.updateProfile(backup)
ki3.updateDisplayPicture(backup.pictureStatus)
ki3.updateProfile(backup)
ki4.updateDisplayPicture(backup.pictureStatus)
ki4.updateProfile(backup)
ki5.updateDisplayPicture(backup.pictureStatus)
ki5.updateProfile(backup)
cl.sendText(msg.to, "Backup Sukses Bosqu")
except Exception as e:
cl.sendText(msg.to, str (e))
elif msg.text in ["Mebb"]:
try:
cl.updateDisplayPicture(mybackup.pictureStatus)
cl.updateProfile(mybackup)
cl.sendText(msg.to, "Backup Sukses Bosqu")
except Exception as e:
cl.sendText(msg.to, str (e))
#=================================================
elif msg.text == "#mid on":
cl.sendText(msg.to, "Done..")
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
except:
pass
now2 = datetime.now()
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
wait2['ROM'][msg.to] = {}
print wait2
elif msg.text == "#mid off":
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
chiya = ""
else:
chiya = ""
for rom in wait2["ROM"][msg.to].items():
print rom
chiya += rom[1] + "\n"
cl.sendText(msg.to, "%s\n\n%s\nReadig point creation:\n [%s]\n" % (wait2['readMember'][msg.to],chiya,setTime[msg.to]))
else:
cl.sendText(msg.to, "Ketik Lurking dulu dudul Baru bilang result Point.")
#========================================
#-------------------Fungsi spam finish----------------------------
elif "Hackginfo" in msg.text:
if msg.from_ in admin:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithUrl(msg.to,path)
elif "#Turn off bots" in msg.text:
if msg.from_ in admsa:
try:
import sys
sys.exit()
except:
pass
#-----------------------------------------------
elif msg.text in ["Url","url"]:
if msg.toType == 2:
x = cl.getGroup(msg.to)
if x.preventJoinByTicket == True:
x.preventJoinByTicket = False
cl.updateGroup(x)
gurl = cl.reissueGroupTicket(msg.to)
cl.sendText(msg.to,"[SELFBO PHET HACK BOT]\n\nline://ti/g/" + gurl)
else:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Can not be used outside the group")
else:
cl.sendText(msg.to,"Not for use less than group")
elif msg.text in ["Notifed on","เปิดแจ้งเตือน","M on"]:
if msg.from_ in admin:
if wait["Notifed"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"All Notifed On\n\nเปิดเเจ้งเเตือนของคุณเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nเปิดเเจ้งเเตื���นของคุณเเล้ว")
else:
wait["Notifed"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"All Notifed On\n\nเปิดเเจ้งเเตือนของคุณเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nเปิดเเจ้งเเตือนของคุณเเล้ว")
elif msg.text in ["Notifed off","ปิดแจ้งเตือน","M off"]:
if msg.from_ in admin:
if wait["Notifed"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"All Notifed Off\n\nปิดเเจ้งเเตือนของคุณเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nปิดเเจ้งเเตือนของคุณเเล้ว")
else:
wait["Notifed"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"All Notifed Off\n\nปิดเเจ้งเเตือนของคุณเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nปิดเเจ้งเเตือนของคุณเเล้ว")
elif msg.text in ["Notifedbot on","เปิดเเจ้งเตือนบอท","Mbot on"]:
if msg.from_ in admin:
if wait["Notifedbot"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"All bot Notifed On\n\nเปิดเเจ้งเเตือนบอทเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nเปิดเเจ้งเเตือนบอทเเล้ว")
else:
wait["Notifedbot"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"All bot Notifed On\n\nเปิดเเจ้งเเตือนบอทเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nเปิดเเจ้งเเตือนบอทเเล้ว")
elif msg.text in ["Notifedbot off","ปิดแจ้งเตือนบอท","Mbot off"]:
if msg.from_ in admin:
if wait["Notifedbot"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"All bot Notifed Off\n\nปิดเเจ้งเเตือนบอทเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nปิดเเจ้งเเตือนบอทเเล้ว")
else:
wait["Notifedbot"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"All bot Notifed Off\n\nปิดเเจ้งเเตือนบอทเเล้ว")
else:
cl.sendText(msg.to,"Done\n\nปิดเเจ้งเเตือนบอทเเล้ว")
#=================================================
elif "Spam " in msg.text:
if msg.from_ in admin:
txt = msg.text.split(" ")
jmlh = int(txt[2])
teks = msg.text.replace("Spam "+str(txt[1])+" "+str(jmlh)+ " ","")
tulisan = jmlh * (teks+"\n")
#Keke cantik <3
if txt[1] == "on":
if jmlh <= 10000:
for x in range(jmlh):
cl.sendText(msg.to, teks)
else:
cl.sendText(msg.to, "Out of range! ")
elif txt[1] == "off":
if jmlh <= 10000:
cl.sendText(msg.to, tulisan)
else:
cl.sendText(msg.to, "Out of range! ")
#-----------------------------------------------
elif "Mid @" in msg.text:
_name = msg.text.replace("Mid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(msg.to, g.mid)
else:
pass
#-------------------------------------------------
elif msg.text in ["All on","all on"]:
cl.sendText(msg.to,"─┅══ईह ㏒ ตาต้อม ㏒ ईह══┅─\n\n[By. TOME BOTLINE]")
cl.sendText(msg.to,"Please wait......")
cl.sendText(msg.to,"Turn on all protection")
cl.sendText(msg.to,"Qr:on")
cl.sendText(msg.to,"Backup:on")
cl.sendText(msg.to,"Read:on")
cl.sendText(msg.to,"Respon:on")
cl.sendText(msg.to,"Responkick:on")
cl.sendText(msg.to,"Protect:on")
cl.sendText(msg.to,"Namelock:on")
cl.sendText(msg.to,"Blockinvite:on")
elif msg.text in ["All off","all off"]:
cl.sendText(msg.to,"─┅══ईह ㏒ TOME ㏒ ईह══┅─\n\n[SELF BOT By TOME BOTLINE]")
cl.sendText(msg.to,"Please wait......")
cl.sendText(msg.to,"Turn off all protection")
cl.sendText(msg.to,"Qr:off")
cl.sendText(msg.to,"Backup:off")
cl.sendText(msg.to,"Read:off")
cl.sendText(msg.to,"Respon:off")
cl.sendText(msg.to,"Responkick:off")
cl.sendText(msg.to,"Protect:off")
cl.sendText(msg.to,"Namelock:off")
cl.sendText(msg.to,"Blockinvite:off")
cl.sendText(msg.to,"Link off")
elif msg.text in ["ทีมงาน","ทีมทดลองบอท"]:
msg.contentType = 13
cl.sendText(msg.to, "[SELFBOT PHET HACK BOT]\n\n[☢Ŧ€₳M≈ನန้ণএ≈฿❂Ŧ☢]\n[By.ทีมงานทีมทดลองบอท]")
cl.sendText(msg.to, "ผู้จัดการทีมงาน:🐯हईທຮຮๅજईह🐯")
msg.contentMetadata = {'mid': 'u820d01252fdcf2a539fa194bcfc3400e'}
cl.sendMessage(msg)
cl.sendText(msg.to, "รองผู้จัดการทีมงาน:β•`BF.บั้ม•`")
msg.contentMetadata = {'mid': 'u49974a7c78af9f3a8fec3e1dc7c646a9'}
cl.sendMessage(msg)
cl.sendText(msg.to, "ประธานใหญ่:เพชร ทีมทดลองบอท")
msg.contentMetadata = {'mid': 'u00f827ce6641038d7c9b6704a9777dfa'}
cl.sendMessage(msg)
cl.sendText(msg.to, "ประธาน:ᴳᴜ ᵀᴇᵃᴍ ᴴa̴ᶜᴋ ᴮᴏᵀ")
msg.contentMetadata = {'mid': 'u6eb517fae5d8de8d1845325e995196a7'}
cl.sendMessage(msg)
cl.sendText(msg.to, "รองประธาน:💫ীန้ສقัπั௭❁💫")
msg.contentMetadata = {'mid': 'u765bec541d4f21cf0afdceb69b4b2ebd'}
cl.sendMessage(msg)
cl.sendText(msg.to, "รปภ.:✍Ŧ€₳M☬ж☬Ħ₳ʗҜ฿❂Ŧ✈๛")
msg.contentMetadata = {'mid': 'u409892727431e6e682114336a3be2784'}
cl.sendMessage(msg)
cl.sendText(msg.to, "ตัวเเทนสมาชิก:🍃🍁NothingEid🍁🍃")
msg.contentMetadata = {'mid': 'ue9e8dbdbfa31491ddc82ed73950b45f0'}
cl.sendMessage(msg)
cl.sendText(msg.to, "ตัวเเทนสมาชิก:Ĵöɱ💎Sтɪcκєʀᴸᶤᶰᵉ")
msg.contentMetadata = {'mid': 'u76be42d134b394580644e1eed2bed029'}
cl.sendMessage(msg)
#========================================
elif "{}" in msg.text:
msg.contentType = 13
msg.contentMetadata = {'mid': msg.to+"',"}
cl.sendMessage(msg)
elif 'Kickall' in msg.text:
if msg.toType == 2:
print "Kickall ok"
_name = msg.text.replace("Kickall","")
gs = ki1.getGroup(msg.to)
gs = ki2.getGroup(msg.to)
gs = ki3.getGroup(msg.to)
gs = ki4.getGroup(msg.to)
gs = ki5.getGroup(msg.to)
ki1.sendText(msg.to, "Hello all...😁😁 {}")
targets = []
for g in gs.members:
if _name in g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
# ki.sendText(msg.to,"Not found.")
else:
for target in targets:
if not target in Bots:
try:
klist=[ki1,ki2,ki3,ki4,ki5,ki5]
kicker=random.choice(klist)
kicker.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
pass
# ki3.sendText(msg,to,"Nuke Finish")
# ki2.sendText(msg,to,"
elif msg.text.lower() == '#rebotall':
if msg.toType == 2:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"waitting...")
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki1.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki1.updateGroup(G)
elif msg.text.lower() == '#boot#':
if msg.toType == 2:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"waitting...")
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
ki1.leaveGroup(msg.to)
ki2.leaveGroup(msg.to)
ki3.leaveGroup(msg.to)
ki4.leaveGroup(msg.to)
ki5.leaveGroup(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki1.updateGroup(G)
print "kicker ok"
G.preventJoinByTicket(G)
ki1.updateGroup(G)
elif msg.text in ["Kill"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
random.choice(KAC).sendText(msg.to,"Fuck You")
return
for jj in matched_list:
try:
klist=[ki1,ki2,ki3,ki4,ki5,ki5]
kicker = random.choice(klist)
kicker.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
elif ("PK4 " in msg.text):
if msg.from_ in admin:
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki4.kickoutFromGroup(msg.to,[target])
except:
ki4.sendText(msg.to,"Error")
elif "KK2 " in msg.text:
nk0 = msg.text.replace("KK2 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki2.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki2.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
elif "KK1 " in msg.text:
nk0 = msg.text.replace("KK1 ","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("@","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
gs.preventJoinByTicket = False
cl.updateGroup(gs)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"user does not exist")
pass
else:
for target in targets:
try:
ki1.kickoutFromGroup(msg.to,[target])
print (msg.to,[g.mid])
except:
ki1.leaveGroup(msg.to)
gs = cl.getGroup(msg.to)
gs.preventJoinByTicket = True
cl.updateGroup(gs)
gs.preventJoinByTicket(gs)
cl.updateGroup(gs)
#-----------------------------------------------------------
elif "contactjoin:" in msg.text:
try:
source_str = 'abcdefghijklmnopqrstuvwxyz1234567890@:;./_][!&%$#)(=~^|'
name = "".join([random.choice(source_str) for x in xrange(10)])
amid = msg.text.replace("contactjoin:","")
cl.sendText(msg.to,str(cl.channel.createAlbumF(msg.to,name,amid)))
except Exception as e:
try:
cl.sendText(msg.to,str(e))
except:
pass
elif ("PK2 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki2.kickoutFromGroup(msg.to,[target])
except:
ki2.sendText(msg.to,"Error")
elif ("PK3 " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
ki5.kickoutFromGroup(msg.to,[target])
except:
ki5.sendText(msg.to,"Error")
elif "tome@@" in msg.text:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*100 : (j+1)*100]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += u'@Krampus\n'
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
cl.sendMessage(msg)
elif "แทค" in msg.text:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*100 : (j+1)*100]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += u'@Krampus\n'
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
cl.sendMessage(msg)
elif ("PK " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
cl.kickoutFromGroup(msg.to,[target])
except:
cl.sendText(msg.to,"Error")
elif "Blacklist @" in msg.text:
_name = msg.text.replace("Blacklist @","")
_kicktarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _kicktarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found")
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Success Boss")
except:
cl.sendText(msg.to,"error")
elif "Ban @" in msg.text:
if msg.toType == 2:
print "[BL]ok"
_name = msg.text.replace("Ban @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Success Masuk daftar orang bejat Boss")
except:
cl.sendText(msg.to,"Error")
elif "Unban @" in msg.text:
if msg.toType == 2:
print "[WL]ok"
_name = msg.text.replace("Unban @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Not found.")
else:
for target in targets:
try:
del wait["blacklist"][target]
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Sudah di keluarkan dari daftar bejat Boss")
except:
cl.sendText(msg.to,"There was no blacklist user")
elif msg.text in ["Clear ban","ล้างดำ"]:
wait["blacklist"] = {}
cl.sendText(msg.to,"clear")
elif msg.text in ["Ban"]:
wait["wblacklist"] = True
cl.sendText(msg.to,"send contact to ban")
elif msg.text in ["Unban"]:
wait["dblacklist"] = True
cl.sendText(msg.to,"send contact to ban")
elif msg.text in ["Banlist","Mcheck"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Nothing double thumbs up")
else:
cl.sendText(msg.to,"Daftar Banlist")
mc = "[⎈]Blacklist [⎈]\n"
for mi_d in wait["blacklist"]:
mc += "[✗] " + cl.getContact(mi_d).displayName + " \n"
cl.sendText(msg.to, mc + "")
elif msg.text in ["Me ban","Cekban","Mcheck mid"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
cocoa = "[⎈]Mid Blacklist [⎈]"
for mm in matched_list:
cocoa += "\n" + mm + "\n"
cl.sendText(msg.to,cocoa + "")
#=============================================
elif msg.text in ["Simisimi on","Simisimi:on"]:
settings["simiSimi"][msg.to] = True
cl.sendText(msg.to,"Success activated simisimi")
elif msg.text in ["Simisimi off","Simisimi:off"]:
settings["simiSimi"][msg.to] = False
cl.sendText(msg.to,"Success deactive simisimi")
elif msg.text in ["Read on","Read:on"]:
wait['alwayRead'] = True
cl.sendText(msg.to,"Auto Sider ON")
elif msg.text in ["Read off","Read:off"]:
wait['alwayRead'] = False
cl.sendText(msg.to,"Auto Sider OFF")
elif msg.text in ["Tag on","Autorespon:on","Respon on","Respon:on"]:
wait["detectMention"] = True
cl.sendText(msg.to,"Auto Respon ON")
elif msg.text in ["Tag off","Autorespon:off","Respon off","Respon:off"]:
wait["detectMention"] = False
cl.sendText(msg.to,"Auto Respon OFF")
elif msg.text in ["Kicktag on","Autokick:on","Responkick on","Responkick:on"]:
wait["kickMention"] = True
cl.sendText(msg.to,"Auto Kick ON")
elif msg.text in ["Kicktag off","Autokick:off","Responkick off","Responkick:off"]:
wait["kickMention"] = False
cl.sendText(msg.to,"Auto Kick OFF")
elif msg.text in ["Cancel on","cancel on"]:
if msg.from_ in admin:
if wait["Protectcancl"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Semua Undangan On")
else:
cl.sendText(msg.to,"done")
else:
wait["Protectcancl"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Semua Undangan On")
else:
cl.sendText(msg.to,"done")
elif msg.text in ["Cancel off","cancel off"]:
if msg.from_ in admin:
if wait["Protectcancl"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Semua Undangan Off")
else:
cl.sendText(msg.to,"done")
else:
wait["Protectcancl"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"Cancel Semua Undangan Off")
else:
cl.sendText(msg.to,"done")
#==============================================================================#
#==============================================================================#
elif "hackmid:" in msg.text:
saya = msg.text.replace("hackmid:","")
msg.contentType = 13
msg.contentMetadata = {"mid":saya}
cl.sendMessage(msg)
contact = cl.getContact(saya)
cu = cl.channel.getCover(saya)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithUrl(msg.to,path)
except:
pass
elif "hackgid:" in msg.text:
saya = msg.text.replace("hackgid:","")
gid = cl.getGroupIdsJoined()
for i in gid:
h = cl.getGroup(i).id
group = cl.getGroup(i)
if h == saya:
try:
creator = group.creator.mid
msg.contentType = 13
msg.contentMetadata = {'mid': creator}
md = "Nama Grup :\n" + group.name + "\n\nID Grup :\n" + group.id
if group.preventJoinByTicket is False: md += "\n\nKode Url : Diizinkan"
else: md += "\n\nKode Url : Diblokir"
if group.invitee is None: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : 0 Orang"
else: md += "\nJumlah Member : " + str(len(group.members)) + " Orang" + "\nUndangan Yang Belum Diterima : " + str(len(group.invitee)) + " Orang"
cl.sendText(msg.to,md)
cl.sendMessage(msg)
cl.sendImageWithUrl(msg.to,"http://dl.profile.line.naver.jp/"+ group.pictureStatus)
except:
creator = "Error"
elif msg.text in ["Friendlist","เช็คเพื่อนทั้งหมด","เพื่อนทั้งหมด","Fyall"]:
contactlist = cl.getAllContactIds()
kontak = cl.getContacts(contactlist)
num=1
msgs="═════════List Friend═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Friend═════════\n\nTotal Friend : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["Memlist","Nameall"]:
kontak = cl.getGroup(msg.to)
group = kontak.members
num=1
msgs="═════════List Member═════════-"
for ids in group:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Member═════════\n\nTotal Members : %i" % len(group)
cl.sendText(msg.to, msgs)
elif "Friendinfo: " in msg.text:
saya = msg.text.replace('Friendinfo: ','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
contact = cl.getContact(i)
cu = cl.channel.getCover(i)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
if h == saya:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithURL(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithUrl(msg.to,path)
elif "#Friendpict:" in msg.text:
saya = msg.text.replace('#Friendpict:','')
gid = cl.getAllContactIds()
for i in gid:
h = cl.getContact(i).displayName
gna = cl.getContact(i)
if h == saya:
cl.sendImageWithUrl(msg.to,"http://dl.profile.line.naver.jp/"+ gna.pictureStatus)
elif msg.text in ["Blocklist","บร๊อก","Pbann"]:
blockedlist = cl.getBlockedContactIds()
kontak = cl.getContacts(blockedlist)
num=1
msgs="═════════List Blocked═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.displayName)
num=(num+1)
msgs+="\n═════════List Blocked═════════\n\nTotal Blocked : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["#Myginfoall"]:
gruplist = cl.getGroupIdsJoined()
kontak = cl.getGroups(gruplist)
num=1
msgs="═════════List Grup═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.name)
num=(num+1)
msgs+="\n═════════List Grup═════════\n\nTotal Grup : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif msg.text in ["#Myginfogidall"]:
gruplist = cl.getGroupIdsJoined()
kontak = cl.getGroups(gruplist)
num=1
msgs="═════════List GrupMid═════════"
for ids in kontak:
msgs+="\n[%i] %s" % (num, ids.id)
num=(num+1)
msgs+="\n═════════List GrupMid═════════\n\nTotal Grup : %i" % len(kontak)
cl.sendText(msg.to, msgs)
elif "ชื่อกลุ่ม" in msg.text:
saya = msg.text.replace('ชื่อกลุ่ม','')
gid = cl.getGroup(msg.to)
cl.sendText(msg.to, "[Nama Grup : ]\n" + gid.name)
elif "Gid" in msg.text:
saya = msg.text.replace('Gid','')
gid = cl.getGroup(msg.to)
cl.sendText(msg.to, "[ID Grup : ]\n" + gid.id)
elif msg.text in ["#Meginfoall"]:
gid = cl.getGroupIdsJoined()
h = ""
for i in gid:
h += "%s\n" % (cl.getGroup(i).name +" ? ["+str(len(cl.getGroup(i).members))+"]")
cl.sendText(msg.to,"-- List Groups --\n\n"+ h +"\nTotal groups =" +" ["+str(len(gid))+"]")
elif "tome tag all" == msg.text.lower():
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
summon(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, len(nama)-1):
nm2 += [nama[j]]
summon(msg.to, nm2)
if jml > 200 and jml < 500:
for i in range(0, 99):
nm1 += [nama[i]]
summon(msg.to, nm1)
for j in range(100, 199):
nm2 += [nama[j]]
summon(msg.to, nm2)
for k in range(200, 299):
nm3 += [nama[k]]
summon(msg.to, nm3)
for l in range(300, 399):
nm4 += [nama[l]]
summon(msg.to, nm4)
for m in range(400, len(nama)-1):
nm5 += [nama[m]]
summon(msg.to, nm5)
if jml > 500:
print "Terlalu Banyak Men 500+"
cnt = Message()
cnt.text = "TOME TAG DONE :\n" + str(jml) + " Members"
cnt.to = msg.to
cl.sendMessage(cnt)
elif "lurk on" == msg.text.lower():
if msg.to in wait2['readPoint']:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to,"Lurking already on\nเปิดการอ่านอัตโนมัต")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to, "เปิดการอ่านอัตโนมัต\nSet reading point:\n" + datetime.now().strftime('%H:%M:%S'))
print wait2
elif "lurk off" == msg.text.lower():
if msg.to not in wait2['readPoint']:
cl.sendText(msg.to,"Lurking already off\nปิดการอ่านอัตโนมัต")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
cl.sendText(msg.to, "ปิดการอ่านอัตโนมัต\nDelete reading point:\n" + datetime.now().strftime('%H:%M:%S'))
elif "lurkers" == msg.text.lower():
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
cl.sendText(msg.to, "Lurkers:\nNone")
else:
chiya = []
for rom in wait2["ROM"][msg.to].items():
chiya.append(rom[1])
cmem = cl.getContacts(chiya)
zx = ""
zxc = ""
zx2 = []
xpesan = 'Lurkers:\n'
for x in range(len(cmem)):
xname = str(cmem[x].displayName)
pesan = ''
pesan2 = pesan+"@a\n"
xlen = str(len(zxc)+len(xpesan))
xlen2 = str(len(zxc)+len(pesan2)+len(xpesan)-1)
zx = {'S':xlen, 'E':xlen2, 'M':cmem[x].mid}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
print zxc
msg.text = xpesan+ zxc + "\nLurking time: %s\nCurrent time: %s"%(wait2['setTime'][msg.to],datetime.now().strftime('%H:%M:%S'))
lol ={'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
print lol
msg.contentMetadata = lol
try:
cl.sendMessage(msg)
except Exception as error:
print error
pass
else:
cl.sendText(msg.to, "Lurking has not been set.")
elif msg.text in ["เปิดอ่าน","R on","ตั้งเวลา"]:
cl.sendText(msg.to,"lurk on")
elif msg.text in ["ปิดอ่าน","R off"]:
cl.sendText(msg.to,"lurk off")
elif msg.text in ["อ่าน","Ry"]:
cl.sendText(msg.to,"lurkers")
elif msg.text in ["Ry20"]:
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"llurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
cl.sendText(msg.to,"lurkers")
elif ("Micadd " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
mimic["target"][target] = True
cl.sendText(msg.to,"Target ditambahkan!")
break
except:
cl.sendText(msg.to,"Fail !")
break
elif ("Micdel " in msg.text):
targets = []
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
del mimic["target"][target]
cl.sendText(msg.to,"Target dihapuskan!")
break
except:
cl.sendText(msg.to,"Fail !")
break
elif msg.text in ["Miclist","Heckmic"]:
if mimic["target"] == {}:
cl.sendText(msg.to,"nothing")
else:
mc = "Target mimic user\n"
for mi_d in mimic["target"]:
mc += "• "+cl.getContact(mi_d).displayName + "\n"
cl.sendText(msg.to,mc)
elif "Mimic target " in msg.text:
if mimic["copy"] == True:
siapa = msg.text.replace("Mimic target ","")
if siapa.rstrip(' ') == "me":
mimic["copy2"] = "me"
cl.sendText(msg.to,"Mimic change to me")
elif siapa.rstrip(' ') == "target":
mimic["copy2"] = "target"
cl.sendText(msg.to,"Mimic change to target")
else:
cl.sendText(msg.to,"I dont know")
elif "Phetmic " in msg.text:
cmd = msg.text.replace("Phetmic ","")
if cmd == "on":
if mimic["status"] == False:
mimic["status"] = True
cl.sendText(msg.to,"Reply Message on")
else:
cl.sendText(msg.to,"Sudah on")
elif cmd == "off":
if mimic["status"] == True:
mimic["status"] = False
cl.sendText(msg.to,"Reply Message off")
else:
cl.sendText(msg.to,"Sudah off")
elif "Setimage: " in msg.text:
wait["pap"] = msg.text.replace("Setimage: ","")
cl.sendText(msg.to, "Pap telah di Set")
elif msg.text in ["Papimage","Papim","Pap"]:
cl.sendImageWithUrl(msg.to,wait["pap"])
elif "Setvideo: " in msg.text:
wait["pap"] = msg.text.replace("Setvideo: ","")
cl.sendText(msg.to,"Video Has Ben Set To")
elif msg.text in ["Papvideo","Papvid"]:
cl.sendVideoWithUrl(msg.to,wait["pap"])
#==============================================================================#
elif msg.text in ["Sk"]:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "100",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "10",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "9",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "7",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "6",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "4",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "3",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "110",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "101",
"STKPKGID": "1",
"STKVER": "100" }
ki1.sendMessage(msg)
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "247",
"STKPKGID": "3",
"STKVER": "100" }
ki1.sendMessage(msg)
elif msg.text.lower() == 'mymid':
cl.sendText(msg.to,mid)
elif "Timeline: " in msg.text:
tl_text = msg.text.replace("Timeline: ","")
cl.sendText(msg.to,"line://home/post?userMid="+mid+"&postId="+cl.new_post(tl_text)["result"]["post"]["postInfo"]["postId"])
elif "Myname: " in msg.text:
string = msg.text.replace("Myname: ","")
if len(string.decode('utf-8')) <= 10000000000:
profile = cl.getProfile()
profile.displayName = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Changed " + string + "")
elif "Mybio: " in msg.text:
string = msg.text.replace("Mybio: ","")
if len(string.decode('utf-8')) <= 10000000000:
profile = cl.getProfile()
profile.statusMessage = string
cl.updateProfile(profile)
cl.sendText(msg.to,"Changed " + string)
elif msg.text in ["Myname","Mename"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"===[DisplayName]===\n" + h.displayName)
elif msg.text in ["Mybio","Mey1"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"===[StatusMessage]===\n" + h.statusMessage)
elif msg.text in ["Mypict","Mey2"]:
h = cl.getContact(mid)
cl.sendImageWithUrl(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Myvid","Mey3"]:
h = cl.getContact(mid)
cl.sendVideoWithUrl(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Urlpict","Mey4"]:
h = cl.getContact(mid)
cl.sendText(msg.to,"http://dl.profile.line-cdn.net/" + h.pictureStatus)
elif msg.text in ["Mycover","Mey5"]:
h = cl.getContact(mid)
cu = cl.channel.getCover(mid)
path = str(cu)
cl.sendImageWithUrl(msg.to, path)
elif msg.text in ["Urlcover","Mey6"]:
h = cl.getContact(mid)
cu = cl.channel.getCover(mid)
path = str(cu)
cl.sendText(msg.to, path)
elif "Getmid @" in msg.text:
_name = msg.text.replace("Getmid @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
cl.sendText(msg.to, g.mid)
else:
pass
elif "#22Getinfo" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n\nHeader :\n" + str(cu))
except:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nMid :\n" + contact.mid + "\n\nBio :\n" + contact.statusMessage + "\n\nProfile Picture :\n" + str(cu))
elif "Ph4" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
except:
cl.sendText(msg.to, "===[StatusMessage]===\n" + contact.statusMessage)
elif "Ph2" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
except:
cl.sendText(msg.to, "===[DisplayName]===\n" + contact.displayName)
elif "mh2" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
try:
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithUrl(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithUrl(msg.to,path)
except:
pass
elif "#picall" in msg.text:
nk0 = msg.text.replace("#picall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"!!..ผิดพลาด")
pass
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendImageWithUrl(msg.to, path)
except Exception as e:
raise e
elif "#pictall" in msg.text:
nk0 = msg.text.replace("#pictall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"!!..ผิดพลาด")
pass
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
path = str(cu)
cl.sendImageWithUrl(msg.to, path)
except Exception as e:
raise e
elif "#phethackall" in msg.text:
nk0 = msg.text.replace("#phethackall","")
nk1 = nk0.lstrip()
nk2 = nk1.replace("","")
nk3 = nk2.rstrip()
_name = nk3
gs = cl.getGroup(msg.to)
targets = []
for s in gs.members:
if _name in s.displayName:
targets.append(s.mid)
if targets == []:
sendMessage(msg.to,"!!..ผิดพลาด")
pass
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
image = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to,"Nama :\n" + contact.displayName + "\n\nBio :\n" + contact.statusMessage)
cl.sendText(msg.to,"Profile Picture " + contact.displayName)
cl.sendImageWithUrl(msg.to,image)
cl.sendText(msg.to,"Cover " + contact.displayName)
cl.sendImageWithUrl(msg.to, path)
except Exception as e:
raise e
elif "Ph3vdo @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Ph3vdo @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendVideoWithUrl(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "Ph3url @" in msg.text:
print "[Command]dp executing"
_name = msg.text.replace("Ph3url @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
path = "http://dl.profile.line-cdn.net/" + contact.pictureStatus
cl.sendText(msg.to, path)
except Exception as e:
raise e
print "[Command]dp executed"
elif "2url @" in msg.text:
print "[Command]cover executing"
_name = msg.text.replace("2url @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendImageWithUrl(msg.to, path)
except Exception as e:
raise e
print "[Command]cover executed"
elif "Ph2url @" in msg.text:
print "[Command]cover executing"
_name = msg.text.replace("Ph2url @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to,"Contact not found")
else:
for target in targets:
try:
contact = cl.getContact(target)
cu = cl.channel.getCover(target)
path = str(cu)
cl.sendText(msg.to, path)
except Exception as e:
raise e
print "[Command]cover executed"
elif "เจ้งเตือน" in msg.text:
group = cl.getGroup(msg.to)
path = "http://dl.profile.line-cdn.net/" + group.pictureStatus
cl.sendImageWithUrl(msg.to,path)
elif "Mycopy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("Mycopy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
cl.CloneContactProfile(target)
cl.sendText(msg.to, "Copied.")
except Exception as e:
print e
elif msg.text in ["Mybb"]:
try:
cl.updateDisplayPicture(backup.pictureStatus)
cl.updateProfile(backup)
cl.sendText(msg.to, "Refreshed.")
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Botcopy @" in msg.text:
print "[COPY] Ok"
_name = msg.text.replace("Botcopy @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
targets = []
for g in gs.members:
if _nametarget == g.displayName:
targets.append(g.mid)
if targets == []:
cl.sendText(msg.to, "Not Found...")
else:
for target in targets:
try:
ki1.CloneContactProfile(target)
ki1.sendText(msg.to, "Copied.")
ki2.CloneContactProfile(target)
ki2.sendText(msg.to, "Copied.")
ki3.CloneContactProfile(target)
ki3.sendText(msg.to, "Copied.")
ki4.CloneContactProfile(target)
ki4.sendText(msg.to, "Copied.")
ki5.CloneContactProfile(target)
ki5.sendText(msg.to, "Copied.")
except Exception as e:
print e
#==============================================================================#
elif "[Auto Respond]" in msg.text:
cl.sendImageWithUrl(msg.to, "http://dl.profile.line.naver.jp/0hlGvN3GXvM2hLNx8goPtMP3dyPQU8GSIgJVUpCTpiPVtiA3M2clJ-C2hia11mUn04cAJ-DWljOVBj")
elif "Fancytext: " in msg.text:
txt = msg.text.replace("Fancytext: ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "Tx: " in msg.text:
txt = msg.text.replace("Tx: ", "")
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "Bx: " in msg.text:
txt = msg.text.replace("Bx: ", "")
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
ki1.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "Tx10: " in msg.text:
txt = msg.text.replace("Tx10: ", "")
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
cl.kedapkedip(msg.to,txt)
print "[Command] Kedapkedip"
elif "Tr-id " in msg.text:
isi = msg.text.replace("Tr-id ","")
translator = Translator()
hasil = translator.translate(isi, dest='id')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-en " in msg.text:
isi = msg.text.replace("Tr-en ","")
translator = Translator()
hasil = translator.translate(isi, dest='en')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-ar" in msg.text:
isi = msg.text.replace("Tr-ar ","")
translator = Translator()
hasil = translator.translate(isi, dest='ar')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-jp" in msg.text:
isi = msg.text.replace("Tr-jp ","")
translator = Translator()
hasil = translator.translate(isi, dest='ja')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Tr-ko" in msg.text:
isi = msg.text.replace("Tr-ko ","")
translator = Translator()
hasil = translator.translate(isi, dest='ko')
A = hasil.text
A = A.encode('utf-8')
cl.sendText(msg.to, A)
elif "Id@en" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'en'
kata = msg.text.replace("Id@en ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO ENGLISH----\n" + "" + result + "\n------SUKSES-----")
elif "En@id" in msg.text:
bahasa_awal = 'en'
bahasa_tujuan = 'id'
kata = msg.text.replace("En@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM EN----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO JP----\n" + "" + result + "\n------SUKSES-----")
elif "Jp@id" in msg.text:
bahasa_awal = 'ja'
bahasa_tujuan = 'id'
kata = msg.text.replace("Jp@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM JP----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@th" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'th'
kata = msg.text.replace("Id@th ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO TH----\n" + "" + result + "\n------SUKSES-----")
elif "Th@id" in msg.text:
bahasa_awal = 'th'
bahasa_tujuan = 'id'
kata = msg.text.replace("Th@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM TH----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@jp" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ja'
kata = msg.text.replace("Id@jp ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO JP----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ar" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ar'
kata = msg.text.replace("Id@ar ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO AR----\n" + "" + result + "\n------SUKSES-----")
elif "Ar@id" in msg.text:
bahasa_awal = 'ar'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ar@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM AR----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif "Id@ko" in msg.text:
bahasa_awal = 'id'
bahasa_tujuan = 'ko'
kata = msg.text.replace("Id@ko ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM ID----\n" + "" + kata + "\n----TO KO----\n" + "" + result + "\n------SUKSES-----")
elif "Ko@id" in msg.text:
bahasa_awal = 'ko'
bahasa_tujuan = 'id'
kata = msg.text.replace("Ko@id ","")
url = 'https://translate.google.com/m?sl=%s&tl=%s&ie=UTF-8&prev=_m&q=%s' % (bahasa_awal, bahasa_tujuan, kata.replace(" ", "+"))
agent = {'User-Agent':'Mozilla/5.0'}
cari_hasil = 'class="t0">'
request = urllib2.Request(url, headers=agent)
page = urllib2.urlopen(request).read()
result = page[page.find(cari_hasil)+len(cari_hasil):]
result = result.split("<")[0]
cl.sendText(msg.to,"----FROM KO----\n" + "" + kata + "\n----TO ID----\n" + "" + result + "\n------SUKSES-----")
elif msg.text.lower() == 'welcome':
ginfo = cl.getGroup(msg.to)
cl.sendText(msg.to,"Selamat Datang Di Grup " + str(ginfo.name))
jawaban1 = ("ยินดีต้อนรับเข้าสู่กลุ่ม " + str(ginfo.name))
cl.sendText(msg.to,"Owner Grup " + str(ginfo.name) + " :\n" + ginfo.creator.displayName )
tts = gTTS(text=jawaban1, lang='th')
tts.save('hasil.mp3')
cl.sendAudioWithUrl(msg.to,'hasil.mp3')
elif "Say-id " in msg.text:
say = msg.text.replace("Say-id ","")
lang = 'id'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudioWithUrl(msg.to,"hasil.mp3")
elif "Say-en " in msg.text:
say = msg.text.replace("Say-en ","")
lang = 'en'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudioWithUrl(msg.to,"hasil.mp3")
elif "Say-jp " in msg.text:
say = msg.text.replace("Say-jp ","")
lang = 'ja'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudioWithUrl(msg.to,"hasil.mp3")
elif "Say-ar " in msg.text:
say = msg.text.replace("Say-ar ","")
lang = 'ar'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudioWithUrl(msg.to,"hasil.mp3")
elif "Say-ko " in msg.text:
say = msg.text.replace("Say-ko ","")
lang = 'ko'
tts = gTTS(text=say, lang=lang)
tts.save("hasil.mp3")
cl.sendAudioWithUrl(msg.to,"hasil.mp3")
elif "Kapan " in msg.text:
tanya = msg.text.replace("Kapan ","")
jawab = ("kapan kapan","besok","satu abad lagi","Hari ini","Tahun depan","Minggu depan","Bulan depan","Sebentar lagi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudioWithUrl(msg.to,'tts.mp3')
elif "Apakah " in msg.text:
tanya = msg.text.replace("Apakah ","")
jawab = ("Ya","Tidak","Mungkin","Bisa jadi")
jawaban = random.choice(jawab)
tts = gTTS(text=jawaban, lang='id')
tts.save('tts.mp3')
cl.sendAudioWithUrl(msg.to,'tts.mp3')
elif '#dy ' in msg.text:
try:
textToSearch = (msg.text).replace('#dy ', "").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
ght = ('https://www.youtube.com' + results['href'])
cl.sendVideoWithUrl(msg.to, ght)
except:
cl.sendText(msg.to,"Could not find it")
elif 'mp4 ' in msg.text:
try:
textToSearch = (msg.text).replace('mp4 ',"").strip()
query = urllib.quote(textToSearch)
url = "https://www.youtube.com/results?search_query=" + query
response = urllib2.urlopen(url)
html = response.read()
soup = BeautifulSoup(html, "html.parser")
results = soup.find(attrs={'class':'yt-uix-tile-link'})
ght = ('https://www.youtube.com' + results['href'])
cl.sendVideoWithUrl(msg.to, ght)
except:
cl.sendText(msg.to, "Could not find it")
elif "Lirik " in msg.text:
try:
songname = msg.text.lower().replace("Lirik ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'Lyric Lagu ('
hasil += song[0]
hasil += ')\n\n'
hasil += song[5]
cl.sendText(msg.to, hasil)
except Exception as wak:
cl.sendText(msg.to, str(wak))
elif "/vk " in msg.text:
try:
wiki = msg.text.lower().replace("/vk ","")
wikipedia.set_lang("th")
pesan="Title ("
pesan+=wikipedia.page(wiki).title
pesan+=")\n\n"
pesan+=wikipedia.summary(wiki, sentences=1)
pesan+="\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except:
try:
pesan="Over Text Limit! Please Click link\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Music " in msg.text:
try:
songname = msg.text.lower().replace("Music ","")
params = {'songname': songname}
r = requests.get('http://ide.fdlrcn.com/workspace/yumi-apis/joox?' + urllib.urlencode(params))
data = r.text
data = json.loads(data)
for song in data:
hasil = 'This is Your Music\n'
hasil += 'Judul : ' + song[0]
hasil += '\nDurasi : ' + song[1]
hasil += '\nLink Download : ' + song[4]
cl.sendText(msg.to, hasil)
cl.sendText(msg.to, "Please Wait for audio...")
cl.sendAudioWithUrl(msg.to, song[4])
except Exception as njer:
cl.sendText(msg.to, str(njer))
elif "#Image " in msg.text:
search = msg.text.replace("Image ","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithUrl(msg.to,path)
except:
pass
elif "#ค้นหารูปภาพ:" in msg.text:
search = msg.text.replace("ค้นหารูปภาพ:","")
url = 'https://www.google.com/search?espv=2&biw=1366&bih=667&tbm=isch&oq=kuc&aqs=mobile-gws-lite.0.0l5&q=' + search
raw_html = (download_page(url))
items = []
items = items + (_images_get_all_items(raw_html))
path = random.choice(items)
print path
try:
cl.sendImageWithUrl(msg.to,path)
except:
pass
elif "#Profileig " in msg.text:
try:
instagram = msg.text.replace("#Profileig ","")
response = requests.get("https://www.instagram.com/"+instagram+"?__a=1")
data = response.json()
namaIG = str(data['user']['full_name'])
bioIG = str(data['user']['biography'])
mediaIG = str(data['user']['media']['count'])
verifIG = str(data['user']['is_verified'])
usernameIG = str(data['user']['username'])
followerIG = str(data['user']['followed_by']['count'])
profileIG = data['user']['profile_pic_url_hd']
privateIG = str(data['user']['is_private'])
followIG = str(data['user']['follows']['count'])
link = "Link: " + "https://www.instagram.com/" + instagram
text = "Name : "+namaIG+"\nUsername : "+usernameIG+"\nBiography : "+bioIG+"\nFollower : "+followerIG+"\nFollowing : "+followIG+"\nPost : "+mediaIG+"\nVerified : "+verifIG+"\nPrivate : "+privateIG+"" "\n" + link
cl.sendImageWithUrl(msg.to, profileIG)
cl.sendText(msg.to, str(text))
except Exception as e:
cl.sendText(msg.to, str(e))
elif "Checkdate " in msg.text:
tanggal = msg.text.replace("Checkdate ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"============ I N F O R M A S I ============\n"+"Date Of Birth : "+lahir+"\nAge : "+usia+"\nUltah : "+ultah+"\nZodiak : "+zodiak+"\n============ I N F O R M A S I ============")
elif msg.text in ["Kalender","Time","Waktu"]:
timeNow = datetime.now()
timeHours = datetime.strftime(timeNow,"(%H:%M)")
day = ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday","Friday", "Saturday"]
hari = ["Minggu", "Senin", "Selasa", "Rabu", "Kamis", "Jumat", "Sabtu"]
bulan = ["Januari", "Februari", "Maret", "April", "Mei", "Juni", "Juli", "Agustus", "September", "Oktober", "November", "Desember"]
inihari = datetime.today()
hr = inihari.strftime('%A')
bln = inihari.strftime('%m')
for i in range(len(day)):
if hr == day[i]: hasil = hari[i]
for k in range(0, len(bulan)):
if bln == str(k): blan = bulan[k-1]
rst = hasil + ", " + inihari.strftime('%d') + " - " + blan + " - " + inihari.strftime('%Y') + "\nJam : [ " + inihari.strftime('%H:%M:%S') + " ]"
cl.sendText(msg.to, rst)
#==============================================================================#
elif msg.text.lower() == 'ifconfig':
botKernel = subprocess.Popen(["ifconfig"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO NetStat===")
elif msg.text.lower() == 'system':
botKernel = subprocess.Popen(["df","-h"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO SYSTEM===")
elif msg.text.lower() == 'kernel':
botKernel = subprocess.Popen(["uname","-srvmpio"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO KERNEL===")
elif msg.text.lower() == 'cpu':
botKernel = subprocess.Popen(["cat","/proc/cpuinfo"], stdout=subprocess.PIPE).communicate()[0]
cl.sendText(msg.to, botKernel + "\n\n===SERVER INFO CPU===")
elif msg.text in ["Pmcheck","เชคดำ","เช็คดำ"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Tidak Ada Blacklist")
else:
cl.sendText(msg.to,"Daftar Banlist")
num=1
msgs="══════════List Blacklist═════════"
for mi_d in wait["blacklist"]:
msgs+="\n[%i] %s" % (num, cl.getContact(mi_d).displayName)
num=(num+1)
msgs+="\n══════════List Blacklist═════════\n\nTotal Blacklist : %i" % len(wait["blacklist"])
cl.sendText(msg.to, msgs)
elif msg.text in ["Mcheckcontact","Cb"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"Tidak Ada Blacklist")
else:
cl.sendText(msg.to,"Daftar Blacklist")
h = ""
for i in wait["blacklist"]:
h = cl.getContact(i)
M = Message()
M.to = msg.to
M.contentType = 13
M.contentMetadata = {'mid': i}
cl.sendMessage(M)
elif msg.text in ["Midban","Mid ban"]:
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
num=1
cocoa = "══════════List Blacklist═════════"
for mm in matched_list:
cocoa+="\n[%i] %s" % (num, mm)
num=(num+1)
cocoa+="\n═════════List Blacklist═════════\n\nTotal Blacklist : %i" % len(matched_list)
cl.sendText(msg.to,cocoa)
elif msg.text.lower() == '1kill':
if msg.toType == 2:
group = cl.getGroup(msg.to)
gMembMids = [contact.mid for contact in group.members]
matched_list = []
for tag in wait["blacklist"]:
matched_list+=filter(lambda str: str == tag, gMembMids)
if matched_list == []:
ki1.sendText(msg.to,"Tidak ada Daftar Blacklist")
return
for jj in matched_list:
try:
ki1.kickoutFromGroup(msg.to,[jj])
print (msg.to,[jj])
except:
pass
#==============================================#
elif msg.text in ["in on"]:
if msg.from_ in admin:
if wait["pautoJoin"] == True:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already on")
else:
cl.sendText(msg.to,"done")
else:
wait["pautoJoin"] = True
if wait["lang"] == "JP":
cl.sendText(msg.to,"already on")
else:
cl.sendText(msg.to,"done")
elif msg.text in ["in off"]:
if msg.from_ in admin:
if wait["pautoJoin"] == False:
if wait["lang"] == "JP":
cl.sendText(msg.to,"already off")
else:
cl.sendText(msg.to,"done")
else:
wait["pautoJoin"] = False
if wait["lang"] == "JP":
cl.sendText(msg.to,"already off")
else:
cl.sendText(msg.to,"done")
elif "Hack4" in msg.text:
key = eval(msg.contentMetadata["MENTION"])
key1 = key["MENTIONEES"][0]["M"]
contact = cl.getContact(key1)
cu = cl.channel.getCover(key1)
try:
cl.sendText(msg.to,"[name]\n" + contact.displayName + "\n[mid]\n" + contact.mid + "\n[statusmessage]\n" + contact.statusMessage + "\n[profilePicture]\nhttp://dl.profile.line-cdn.net/" + contact.pictureStatus + "\n[homePicture]\n" + str(cu))
except:
cl.sendText(msg.to,"[name]\n" + contact.displayName + "\n[mid]\n" + contact.mid + "\n[statusmessage]\n" + contact.statusMessage + "\n[homePicture]\n" + str(cu))
#=============================================
elif msg.text in ["!Sp"]:
start = time.time()
cl.sendText(msg.to, "Waiting...")
elapsed_time = time.time() - start
cl.sendText(msg.to, "%sTamii Server" % (elapsed_time))
# ----------------- BAN MEMBER BY TAG 2TAG ATAU 10TAG MEMBER
elif ("Bl " in msg.text):
if msg.from_ in admin:
key = eval(msg.contentMetadata["MENTION"])
key["MENTIONEES"][0]["M"]
targets = []
for x in key["MENTIONEES"]:
targets.append(x["M"])
for target in targets:
try:
wait["blacklist"][target] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
cl.sendText(msg.to,"Succes Banned Bos")
except:
pass
#-------------Fungsi Respon Start---------------------#
elif msg.text in ["#Cinvite"]:
if msg.from_ in admin:
wait["winvite"] = True
cl.sendText(msg.to,"send contact 😉")
elif "Gift @" in msg.text:
_name = msg.text.replace("Gift @","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentType = 2
msg.contentMetadata={'PRDID': '89131c1a-e549-4bd5-9e60-e24de0d2e252',
'PRDTYPE': 'THEME',
'MSGTPL': '10'}
msg.text = None
cl.sendMessage(msg,g)
cl.sendText(msg.to, "Done...")
elif msg.text in ["Mchecky"]:
if wait["blacklist"] == {}:
cl.sendText(msg.to,"nothing")
else:
cl.sendText(msg.to,"Blacklist user\nมีบัญชีดำของคุณอยู่กลุ่มนี้")
xname = ""
for mi_d in wait["blacklist"]:
xname = cl.getContact(mi_d).displayName + ""
xlen = str(len(xname)+1)
msg.contentType = 0
msg.text = "@"+xname+" "
msg.contentMetadata ={'MENTION':'{"MENTIONEES":[{"S":"0","E":'+json.dumps(xlen)+',"M":'+json.dumps(mm)+'}]}','EMTVER':'4'}
try:
cl.sendMessage(msg)
except Exception as error:
print error
elif "มอง" in msg.text:
group = cl.getGroup(msg.to)
k = len(group.members)//100
for j in xrange(k+1):
msg = Message(to=msg.to)
txt = u''
s=0
d=[]
for i in group.members[j*100 : (j+1)*100]:
d.append({"S":str(s), "E" :str(s+8), "M":i.mid})
s += 9
txt += "@Krampus\n"
msg.text = txt
msg.contentMetadata = {u'MENTION':json.dumps({"MENTIONEES":d})}
cl.sendMessage(msg)
elif msg.text in ["Name me","Men"]:
G = cl.getProfile()
X = G.displayName
cl.sendText(msg.to,X)
elif "siri " in msg.text.lower():
query = msg.text.lower().replace("siri ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'https://google-translate-proxy.herokuapp.com/api/tts'
params = {'language': 'th', 'speed': '1', 'query': query}
r = s.get(url, params=params)
mp3 = r.url
cl.sendAudioWithUrl(msg.to, mp3)
elif "siri:" in msg.text.lower():
query = msg.text.lower().replace("siri:","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'https://google-translate-proxy.herokuapp.com/api/tts'
params = {'language': 'th', 'speed': '1', 'query': query}
r = s.get(url, params=params)
mp3 = r.url
cl.sendAudioWithUrl(msg.to, mp3)
elif "siri-en " in msg.text.lower():
query = msg.text.lower().replace("siri-en ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'https://google-translate-proxy.herokuapp.com/api/tts'
params = {'language': 'en', 'speed': '1', 'query': query}
r = s.get(url, params=params)
mp3 = r.url
cl.sendAudioWithUrl(msg.to, mp3)
elif "พูด " in msg.text.lower():
query = msg.text.lower().replace("พูด ","")
with requests.session() as s:
s.headers['user-agent'] = 'Mozilla/5.0'
url = 'https://google-translate-proxy.herokuapp.com/api/tts'
params = {'language': 'th', 'speed': '1', 'query': query}
r = s.get(url, params=params)
mp3 = r.url
cl.sendAudioWithUrl(msg.to, mp3)
elif msg.text in ["1in","Bot1 in"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki1.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki1.updateGroup(G)
print "kickers_Ok"
G.preventJoinByTicket(G)
ki1.updateGroup(G)
elif msg.text in ["2in","Bot2 in"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki2.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki2.updateGroup(G)
print "kickers_Ok"
G.preventJoinByTicket(G)
ki2.updateGroup(G)
elif msg.text in ["3in","Bot3 in"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki3.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki3.updateGroup(G)
print "kickers_Ok"
G.preventJoinByTicket(G)
ki3.updateGroup(G)
elif msg.text in ["4in","Bot4 in"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki4.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki4.updateGroup(G)
print "kickers_Ok"
G.preventJoinByTicket(G)
ki4.updateGroup(G)
elif msg.text in ["5in","Bot5 in"]:
G = cl.getGroup(msg.to)
ginfo = cl.getGroup(msg.to)
G.preventJoinByTicket = False
cl.updateGroup(G)
invsend = 0
Ticket = cl.reissueGroupTicket(msg.to)
ki5.acceptGroupInvitationByTicket(msg.to,Ticket)
time.sleep(0.01)
G = cl.getGroup(msg.to)
G.preventJoinByTicket = True
ki5.updateGroup(G)
print "kickers_Ok"
G.preventJoinByTicket(G)
ki5.updateGroup(G)
elif '/w ' in msg.text.lower():
try:
wiki = msg.text.lower().replace("/w ","")
wikipedia.set_lang("th")
pesan="Wikipedia : "
pesan+=wikipedia.page(wiki).title
pesan+="\n\n"
pesan+=wikipedia.summary(wiki, sentences=1)
pesan+="\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except:
try:
pesan="Text Terlalu Panjang Silahkan Click link di bawah ini\n"
pesan+=wikipedia.page(wiki).url
cl.sendText(msg.to, pesan)
except Exception as e:
cl.sendText(msg.to, str(e))
elif "/go " in msg.text:
tanggal = msg.text.replace("/go ","")
r=requests.get('https://script.google.com/macros/exec?service=AKfycbw7gKzP-WYV2F5mc9RaR7yE3Ve1yN91Tjs91hp_jHSE02dSv9w&nama=ervan&tanggal='+tanggal)
data=r.text
data=json.loads(data)
lahir = data["data"]["lahir"]
usia = data["data"]["usia"]
ultah = data["data"]["ultah"]
zodiak = data["data"]["zodiak"]
cl.sendText(msg.to,"Tanggal Lahir : "+lahir+"\n\nUmur : "+usia+"\n\nUltah : "+ultah+"\n\nZodiak : "+zodiak)
elif "declined" in msg.text:
if msg.toType == 2:
ginfo = cl.getGroup(msg.to)
try:
cl.leaveGroup(msg.to)
except:
pass
elif "[Auto] " in msg.text:
msg.contentType = 13
_name = msg.text.replace("[Auto] ","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentMetadata = {'mid': g.mid}
cl.sendMessage(msg)
else:
pass
elif "☜ʕ•ﻌ•ʔ " in msg.text:
msg.contentType = 13
_name = msg.text.replace("☜ʕ•ﻌ•ʔ ","")
_nametarget = _name.rstrip(' ')
gs = cl.getGroup(msg.to)
for g in gs.members:
if _nametarget == g.displayName:
msg.contentMetadata = {'mid': g.mid}
cl.sendMessage(msg)
else:
pass
if op.type == 25:
msg = op.message
if msg.text.lower() in ["pheytcg fgtagg all"]:
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
mention(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, len(nama)):
nm2 += [nama[j]]
mention(msg.to, nm2)
if jml > 200 and jml < 300:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, len(nama)):
nm3 += [nama[k]]
mention(msg.to, nm3)
if jml > 300 and jml < 400:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, 300):
nm3 += [nama[k]]
mention(msg.to, nm3)
for l in range(301, len(nama)):
nm4 += [nama[l]]
mention(msg.to, nm4)
if jml > 400 and jml < 500:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, 300):
nm3 += [nama[k]]
mention(msg.to, nm3)
for l in range(301, 400):
nm4 += [nama[l]]
mention(msg.to, nm4)
for h in range(401, len(nama)):
nm5 += [nama[h]]
mention(msg.to, nm5)
if jml > 500:
cl.sendText(msg.to,'Member melebihi batas.')
cnt = Message()
cnt.text = "PHET TAG DONE : " + str(jml) + " Members"
cnt.to = msg.to
cl.sendMessage(cnt)
if op.type == 26:
msg = op.message
if msg.text.lower() in ["1123"]:
group = cl.getGroup(msg.to)
nama = [contact.mid for contact in group.members]
nm1, nm2, nm3, nm4, nm5, jml = [], [], [], [], [], len(nama)
if jml <= 100:
mention(msg.to, nama)
if jml > 100 and jml < 200:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, len(nama)):
nm2 += [nama[j]]
mention(msg.to, nm2)
if jml > 200 and jml < 300:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, len(nama)):
nm3 += [nama[k]]
mention(msg.to, nm3)
if jml > 300 and jml < 400:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, 300):
nm3 += [nama[k]]
mention(msg.to, nm3)
for l in range(301, len(nama)):
nm4 += [nama[l]]
mention(msg.to, nm4)
if jml > 400 and jml < 500:
for i in range(0, 100):
nm1 += [nama[i]]
mention(msg.to, nm1)
for j in range(101, 200):
nm2 += [nama[j]]
mention(msg.to, nm2)
for k in range(201, 300):
nm3 += [nama[k]]
mention(msg.to, nm3)
for l in range(301, 400):
nm4 += [nama[l]]
mention(msg.to, nm4)
for h in range(401, len(nama)):
nm5 += [nama[h]]
mention(msg.to, nm5)
if jml > 500:
cl.sendText(msg.to,'Member melebihi batas.')
cnt = Message()
cnt.text = "PHET TAG DONE : " + str(jml) + " Members"
cnt.to = msg.to
cl.sendMessage(cnt)
elif msg.text in [".me","me","Me"]:
cl.sendText(msg.to,"You.....")
elif "/ตั้งเวลา" == msg.text.lower():
if msg.to in wait2['readPoint']:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to,"Lurking already on\nเปิดการอ่านอัตโนมัตกรุณาพิมพ์ ➠ /อ่าน")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
wait2['readPoint'][msg.to] = msg.id
wait2['readMember'][msg.to] = ""
wait2['setTime'][msg.to] = datetime.now().strftime('%H:%M:%S')
wait2['ROM'][msg.to] = {}
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
cl.sendText(msg.to, "โปรเเกรมเปิดการอ่านอัตโนมัต\nSet reading point:\n" + datetime.now().strftime('%H:%M:%S'))
print wait2
elif "/ปิดการอ่าน" == msg.text.lower():
if msg.to not in wait2['readPoint']:
cl.sendText(msg.to,"Lurking already off\nปิดการอ่านอัตโนมัต")
else:
try:
del wait2['readPoint'][msg.to]
del wait2['readMember'][msg.to]
del wait2['setTime'][msg.to]
except:
pass
cl.sendText(msg.to, "ปิดการอ่านอัตโนมัต\nDelete reading point:\n" + datetime.now().strftime('%H:%M:%S'))
elif "/อ่าน" == msg.text.lower():
if msg.to in wait2['readPoint']:
if wait2["ROM"][msg.to].items() == []:
cl.sendText(msg.to, "SELFBOT PHET HACK BOT\n\nLurkers:\nNone")
else:
chiya = []
for rom in wait2["ROM"][msg.to].items():
chiya.append(rom[1])
cmem = cl.getContacts(chiya)
zx = ""
zxc = ""
zx2 = []
xpesan = 'Lurkers:\n'
for x in range(len(cmem)):
xname = str(cmem[x].displayName)
pesan = ''
pesan2 = pesan+"@a\n"
xlen = str(len(zxc)+len(xpesan))
xlen2 = str(len(zxc)+len(pesan2)+len(xpesan)-1)
zx = {'S':xlen, 'E':xlen2, 'M':cmem[x].mid}
zx2.append(zx)
zxc += pesan2
msg.contentType = 0
print zxc
msg.text = xpesan+ zxc + "\nLurking time: %s\nCurrent time: %s"%(wait2['setTime'][msg.to],datetime.now().strftime('%H:%M:%S'))
lol ={'MENTION':str('{"MENTIONEES":'+json.dumps(zx2).replace(' ','')+'}')}
print lol
msg.contentMetadata = lol
try:
cl.sendMessage(msg)
except Exception as error:
print error
pass
else:
cl.sendText(msg.to, "กรุณาตั้งเวลาการอ่านใหม่อีกครั้งโปรดพิมพ์ ➠ /ตั้งเวลา")
elif msg.from_ in mimic["target"] and mimic["status"] == True and mimic["target"][msg.from_] == True:
text = msg.text
if text is not None:
cl.sendText(msg.to, "[อัตโนมัติ]: " + text)
else:
if msg.contentType == 7:
msg.contentType = 7
msg.text = None
msg.contentMetadata = {
"STKID": "6",
"STKPKGID": "1", "STKVER": "100" }
cl.sendMessage(msg)
if op.type == 15:
if wait["Notifed"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\n เเล้วพบใหม่นะ ")
print "MEMBER OUT GROUP"
if op.type == 17:
if wait["Notifed"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + " ☜ʕ•ﻌ•ʔ ")
cl.sendText(op.param1, " ยินดีต้อนรับครับ \n สวัสดีครับผม \n อย่าลืมปิดเสียงการเเจ้งเตือนด้วยนะ \n\n[By.TOME BOTLINE]")
print "MEMBER HAS JOIN THE GROUP"
if op.type == 19:
if wait["Notifed"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\n ไม่น่าจะจุกเท่าไหร่หรอก ")
print "MEMBER HAS KICKOUT FROM THE GROUP"
if op.type == 15:
if wait["Notifedbot"] == True:
if op.param2 in Bots:
return
ki1.sendText(op.param1,cl.getContact(op.param2).displayName + "\n\n Bye~bye ")
ki2.sendText(op.param1,cl.getContact(op.param2).displayName + "\n\n Bye~bye ")
print "MEMBER OUT GROUP"
if op.type == 17:
if wait["Notifedbot"] == True:
if op.param2 in Bots:
return
ki1.sendText(op.param1,cl.getContact(op.param2).displayName + "\n\n\n\n[By. TOME BOTLINE]")
print "MEMBER HAS JOIN THE GROUP"
if op.type == 19:
if wait["Notifedbot"] == True:
if op.param2 in Bots:
return
ki1.sendText(op.param1,cl.getContact(op.param2).displayName + "\n ไม่น่าจะจุกเท่าไหร่หรอก ")
ki2.sendText(op.param1,cl.getContact(op.param2).displayName + "\n ไม่น่าจะจุกเท่าไหร่หรอก ")
print "MEMBER HAS KICKOUT FROM THE GROUP"
if op.type == 15:
if wait["bcommentOn"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\n" + str(wait["bcomment"]))
print "MEMBER OUT GROUP"
if op.type == 17:
if wait["acommentOn"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\n" + str(wait["acomment"]))
print "MEMBER HAS JOIN THE GROUP"
if op.type == 19:
if wait["ccommentOn"] == True:
if op.param2 in Bots:
return
cl.sendText(op.param1,cl.getContact(op.param2).displayName + "\n" + str(wait["ccomment"]))
print "MEMBER HAS KICKOUT FROM THE GROUP"
if op.type == 13:
if wait["Protectcancl"] == True:
if op.param2 not in Bots:
group = cl.getGroup(op.param1)
gMembMids = [contact.mid for contact in group.invitee]
random.choice(KAC).cancelGroupInvitation(op.param1, gMembMids)
if op.param3 == "1":
if op.param1 in protectname:
group = cl.getGroup(op.param1)
try:
group.name = wait["pro_name"][op.param1]
cl.updateGroup(group)
cl.sendText(op.param1, "Groupname protect now")
wait["blacklist"][op.param2] = True
f=codecs.open('st2__b.json','w','utf-8')
json.dump(wait["blacklist"], f, sort_keys=True, indent=4,ensure_ascii=False)
except Exception as e:
print e
pass
if op.type == 55:
try:
if op.param1 in wait2['readPoint']:
if op.param2 in wait2['readMember'][op.param1]:
pass
else:
wait2['readMember'][op.param1] += op.param2
wait2['ROM'][op.param1][op.param2] = op.param2
with open('sider.json', 'w') as fp:
json.dump(wait2, fp, sort_keys=True, indent=4)
else:
pass
except:
pass
if op.type == 59:
print op
except Exception as error:
print error
def autolike():
count = 1
while True:
try:
for posts in cl.activity(1)["result"]["posts"]:
if posts["postInfo"]["liked"] is False:
if wait["likeOn"] == True:
cl.like(posts["userInfo"]["writerMid"], posts["postInfo"]["postId"], 1001)
if wait["commentOn"] == True:
if posts["userInfo"]["writerMid"] in wait["commentBlack"]:
pass
else:
cl.comment(posts["userInfo"]["writerMid"],posts["postInfo"]["postId"],wait["comment"])
except:
count += 1
if(count == 50):
sys.exit(0)
else:
pass
thread1 = threading.Thread(target=autolike)
thread1.daemon = True
thread1.start()
def a2():
now2 = datetime.now()
nowT = datetime.strftime(now2,"%M")
if nowT[14:] in ["10","20","30","40","50","00"]:
return False
else:
return True
def nameUpdate():
while True:
try:
#while a2():
#pass
if wait["clock"] == True:
now2 = datetime.now()
nowT = datetime.strftime(now2,"༺%H:%M༻")
profile = cl.getProfile()
profile.displayName = wait["cName"] + nowT
cl.updateProfile(profile)
time.sleep(600)
except:
pass
thread2 = threading.Thread(target=nameUpdate)
thread2.daemon = True
thread2.start()
while True:
try:
Ops = cl.fetchOps(cl.Poll.rev, 5)
except EOFError:
raise Exception("It might be wrong revision\n" + str(cl.Poll.rev))
for Op in Ops:
if (Op.type != OpType.END_OF_OPERATION):
cl.Poll.rev = max(cl.Poll.rev, Op.revision)
bot(Op)
|
[
"noreply@github.com"
] |
tankeng.noreply@github.com
|
65ac87e32913f82bd9f12be4f6f81cffa19a4a1c
|
8c8a481485cd297180454a5495b68dd28867fd51
|
/Models/States.py
|
02af0f9d78f1d8495951c0d3bb45266180d88afb
|
[] |
no_license
|
M-R-FARHADI/Thermobrig
|
66cdf47a87b7353e9306bd69af2037c6b5ff6132
|
9ed107373f232cc0a007e5b6992d0ad4ad59102d
|
refs/heads/master
| 2023-02-07T19:17:27.547134
| 2020-12-29T07:05:39
| 2020-12-29T07:05:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,897
|
py
|
from pandas import DataFrame
from collections import OrderedDict
from dataclasses import dataclass, field
from typing import Union, Dict, List
from Utilities.Numeric import isNumeric, isWithin
@dataclass(unsafe_hash=True)
class StatePure:
P: float = float('nan')
T: float = float('nan')
mu: float = float('nan')
h: float = float('nan')
u: float = float('nan')
s: float = float('nan')
x: float = float('nan')
_properties_regular = ['P', 'T', 'mu', 'h', 'u', 's'] # ordered in preference to use in interpolation
_properties_mixture = ['x']
_properties_all = _properties_regular + _properties_mixture
def hasDefined(self, propertyName: str) -> bool:
"""Returns true if a value for the given property is defined."""
return isNumeric(getattr(self, propertyName))
def isFullyDefined(self, consider_mixProperties: bool = True) -> bool:
propertyList = self._properties_all
if not consider_mixProperties:
propertyList = self._properties_regular
if all(self.hasDefined(propertyName) for propertyName in propertyList):
return True
return False
def isFullyDefinable(self):
definable = False
saturated = (0 <= self.x <= 1)
if saturated:
if sum(1 for property_regular in self._properties_regular if isNumeric(getattr(self, property_regular))) >= 1:
# if saturated mixture, values of quality & 1 other intensive property are enough to fully define state
definable = True
else:
if sum(1 for property_regular in self._properties_regular if isNumeric(getattr(self, property_regular))) >= 2:
# if not a saturated mixture, values of 2 intensive properties (other than quality) are necessary to define state
definable = True
return definable
def get_asDict_definedProperties(self) -> OrderedDict:
"""Returns OrderedDict mapping property names to values for properties which have numeric values defined. Models are ordered according to preference in interpolation."""
return OrderedDict([(propertyName, getattr(self, propertyName)) for propertyName in self._properties_all if self.hasDefined(propertyName)])
def get_asList_definedPropertiesNames(self) -> List:
"""Returns list of property names whose values are numeric, i.e. whose values are defined."""
return [propertyName for propertyName in self._properties_all if self.hasDefined(propertyName)]
def get_asDict_allProperties(self, ordered: bool = False) -> Union[Dict, OrderedDict]:
dictClass = dict
if ordered:
dictClass = OrderedDict
return dictClass([(propertyName, getattr(self, propertyName)) for propertyName in self._properties_all])
def init_fromDFRow(self, dfRow: DataFrame):
"""Sets property attributes of the StatePure using columns of the provided DataFrame row. Assumes only a single row is provided."""
assert len(dfRow.index) == 1
dfRow = dfRow.squeeze()
missingProperties_inDFRow = []
for propertyName in self._properties_all:
if propertyName in dfRow.index:
setattr(self, propertyName, float(dfRow[propertyName]))
else:
missingProperties_inDFRow.append(propertyName)
if missingProperties_inDFRow != []:
print('Models ' + str(missingProperties_inDFRow) + ' not provided in DataFrame row.')
return self
def init_fromDict(self, dictionary: Dict):
"""Sets property attributes of the StatePure using values provided in the dictionary."""
for propertyName in dictionary.keys():
if propertyName in self._properties_all:
setattr(self, propertyName, dictionary[propertyName])
return self
def init_fromState(self, state: 'StatePure'):
for propertyName in self._properties_all:
setattr(self, propertyName, getattr(state, propertyName))
def copy_fromState(self, referenceState: 'StatePure'):
for propertyName in self._properties_all:
if isNumeric(referenceValue := getattr(referenceState, propertyName)):
setattr(self, propertyName, referenceValue)
def copy_or_verify_fromState(self, referenceState: 'StatePure', pTolerance: float = 3):
"""Copies property values from the provided reference state. If property already has a value defined, compares it to the one desired to be assigned, raises error if values do not match.
If the values match, still copies the value from the referenceState - decimals might change."""
for propertyName in self._properties_all:
if isNumeric(referenceValue := getattr(referenceState, propertyName)):
if not isNumeric(getattr(self, propertyName)):
setattr(self, propertyName, referenceValue)
else:
# property has a value defined
if not isWithin(getattr(self, propertyName), 3, '%', referenceValue):
raise AssertionError
def set(self, setDict: Dict):
"""Sets values of the properties to the values provided in the dictionary."""
for parameterName in setDict:
if parameterName in self._properties_all:
setattr(self, parameterName, setDict[parameterName])
def set_or_verify(self, setDict: Dict, percentDifference: float = 3):
for parameterName in setDict:
if parameterName in self._properties_all:
if not self.hasDefined(parameterName):
setattr(self, parameterName, setDict[parameterName])
else:
assert isWithin(getattr(self, parameterName), percentDifference, '%', setDict[parameterName])
class StateIGas(StatePure):
# In addition to properties of StatePure
s0: float = float('nan')
x: int = 2 # superheated vapor / gas
_properties_regular = ['P', 'T', 'mu', 'h', 'u', 's0'] # ordered in preference to use in interpolation
_properties_mixture = ['x']
_properties_all = _properties_regular + _properties_mixture
class FlowPoint(StatePure):
def __init__(self, baseState: StatePure, flow: 'Flow'):
"""Class for flow-aware states. Normally, states are unaware / independent of flows and are simply data containers for thermodynamic information. Flow points represent **points in flows**,
and hence allow access to flow data through the reference to the flow, and contain the state information inherently, in the same way as a state object."""
self.baseState = baseState
self.flow = flow
# Custom __eq__ method - the default version does not take flow of state into account. Overriding __eq__ could have been avoided by simpler use of dataclass but due to property (getter/setter) based
# nature of this dataclass, I used this method.
def __members(self):
return (self.baseState, self.flow)
def __eq__(self, other):
if isinstance(other, StatePure):
return all(getattr(self, property) == getattr(other, property) for property in self._properties_all) and (self.flow is other.flow)
else:
return False
def __hash__(self): # if __eq__ is overridden, __hash__ also needs to be overridden.
return hash(self.__members())
#
def get_P(self):
return getattr(self.baseState, 'P')
def set_P(self, value):
setattr(self.baseState, 'P', value)
P = property(fget=get_P, fset=set_P)
def get_T(self):
return getattr(self.baseState, 'T')
def set_T(self, value):
setattr(self.baseState, 'T', value)
T = property(fget=get_T, fset=set_T)
def get_h(self):
return getattr(self.baseState, 'h')
def set_h(self, value):
setattr(self.baseState, 'h', value)
h = property(fget=get_h, fset=set_h)
def get_u(self):
return getattr(self.baseState, 'u')
def set_u(self, value):
setattr(self.baseState, 'u', value)
u = property(fget=get_u, fset=set_u)
def get_mu(self):
return getattr(self.baseState, 'mu')
def set_mu(self, value):
setattr(self.baseState, 'mu', value)
mu = property(fget=get_mu, fset=set_mu)
def get_s(self):
return getattr(self.baseState, 's')
def set_s(self, value):
setattr(self.baseState, 's', value)
s = property(fget=get_s, fset=set_s)
def get_x(self):
return getattr(self.baseState, 'x')
def set_x(self, value):
setattr(self.baseState, 'x', value)
x = property(fget=get_x, fset=set_x)
def get_s0(self):
return getattr(self.baseState, 's0')
def set_s0(self, value):
setattr(self.baseState, 's0', value)
s0 = property(fget=get_s0, fset=set_s0)
|
[
"unlusoycan@gmail.com"
] |
unlusoycan@gmail.com
|
96d43d8fa24fe2bf0141da26ab1de903a5a6164a
|
6d3c865ce6d9c416d8d11e91d6571a5154b036cf
|
/js_vacancies/apps.py
|
c28e61b96619b705fa4509492f9bf1a51fea5e6d
|
[] |
no_license
|
compoundpartners/js-vacancies
|
2cc94c842df980be177c6fa64b3879b5dcc50bbc
|
175d9f3673c7b002db5c0ea550bb0f29638b7cbb
|
refs/heads/master
| 2021-07-17T05:41:29.800636
| 2020-07-07T14:25:28
| 2020-07-07T14:25:28
| 178,962,329
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 152
|
py
|
# -*- coding: utf-8 -*-
from django.apps import AppConfig
class Vacancies(AppConfig):
name = 'js_vacancies'
verbose_name = 'Vacancies'
|
[
"evgeny.dmi3ev@gmail.com"
] |
evgeny.dmi3ev@gmail.com
|
581cec1c23e01efee6f3cfc052c5291558001790
|
66f6b8dfbbaff85ba57bf9c2baec4450ca37bfef
|
/allennlp/data/dataset_readers/penn_tree_bank.py
|
0cd9135f8002e015b053d73db9fbefa73432d0fd
|
[
"Apache-2.0"
] |
permissive
|
siddsach/allennlp
|
cccae3063a20bd2093cd0543278ea2fc9e4a2cf8
|
17eaf449020d7307fca7aec901043dc43a16ff59
|
refs/heads/master
| 2021-04-06T07:30:49.620074
| 2018-03-17T08:51:03
| 2018-03-17T08:51:03
| 124,619,246
| 0
| 0
|
Apache-2.0
| 2018-03-17T08:51:04
| 2018-03-10T03:38:59
|
Python
|
UTF-8
|
Python
| false
| false
| 10,752
|
py
|
from typing import Dict, List, Tuple
import logging
import os
from overrides import overrides
# NLTK is so performance orientated (ha ha) that they have lazy imports. Why? Who knows.
from nltk.corpus.reader.bracket_parse import BracketParseCorpusReader # pylint: disable=no-name-in-module
from nltk.tree import Tree
from allennlp.common import Params
from allennlp.common.file_utils import cached_path
from allennlp.data.dataset_readers.dataset_reader import DatasetReader
from allennlp.data.fields import TextField, SpanField, SequenceLabelField, ListField, MetadataField, Field
from allennlp.data.instance import Instance
from allennlp.data.token_indexers import TokenIndexer, SingleIdTokenIndexer
from allennlp.data.tokenizers import Token
from allennlp.data.dataset_readers.dataset_utils.span_utils import enumerate_spans
from allennlp.common.checks import ConfigurationError
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
@DatasetReader.register("ptb_trees")
class PennTreeBankConstituencySpanDatasetReader(DatasetReader):
"""
Reads constituency parses from the WSJ part of the Penn Tree Bank from the LDC.
This ``DatasetReader`` is designed for use with a span labelling model, so
it enumerates all possible spans in the sentence and returns them, along with gold
labels for the relevant spans present in a gold tree, if provided.
Parameters
----------
token_indexers : ``Dict[str, TokenIndexer]``, optional (default=``{"tokens": SingleIdTokenIndexer()}``)
We use this to define the input representation for the text. See :class:`TokenIndexer`.
Note that the `output` tags will always correspond to single token IDs based on how they
are pre-tokenised in the data file.
use_pos_tags : ``bool``, optional, (default = ``True``)
Whether or not the instance should contain gold POS tags
as a field.
lazy : ``bool``, optional, (default = ``False``)
Whether or not instances can be consumed lazily.
"""
def __init__(self,
token_indexers: Dict[str, TokenIndexer] = None,
use_pos_tags: bool = True,
lazy: bool = False) -> None:
super().__init__(lazy=lazy)
self._token_indexers = token_indexers or {'tokens': SingleIdTokenIndexer()}
self._use_pos_tags = use_pos_tags
@overrides
def _read(self, file_path):
# if `file_path` is a URL, redirect to the cache
file_path = cached_path(file_path)
directory, filename = os.path.split(file_path)
logger.info("Reading instances from lines in file at: %s", file_path)
for parse in BracketParseCorpusReader(root=directory, fileids=[filename]).parsed_sents():
self._strip_functional_tags(parse)
# This is un-needed and clutters the label space.
# All the trees also contain a root S node.
if parse.label() == "VROOT":
parse = parse[0]
pos_tags = [x[1] for x in parse.pos()] if self._use_pos_tags else None
yield self.text_to_instance(parse.leaves(), pos_tags, parse)
@overrides
def text_to_instance(self, # type: ignore
tokens: List[str],
pos_tags: List[str] = None,
gold_tree: Tree = None) -> Instance:
"""
We take `pre-tokenized` input here, because we don't have a tokenizer in this class.
Parameters
----------
tokens : ``List[str]``, required.
The tokens in a given sentence.
pos_tags ``List[str]``, optional, (default = None).
The POS tags for the words in the sentence.
gold_tree : ``Tree``, optional (default = None).
The gold parse tree to create span labels from.
Returns
-------
An ``Instance`` containing the following fields:
tokens : ``TextField``
The tokens in the sentence.
pos_tags : ``SequenceLabelField``
The POS tags of the words in the sentence.
Only returned if ``use_pos_tags`` is ``True``
spans : ``ListField[SpanField]``
A ListField containing all possible subspans of the
sentence.
span_labels : ``SequenceLabelField``, optional.
The constiutency tags for each of the possible spans, with
respect to a gold parse tree. If a span is not contained
within the tree, a span will have a ``NO-LABEL`` label.
gold_tree : ``MetadataField(Tree)``
The gold NLTK parse tree for use in evaluation.
"""
# pylint: disable=arguments-differ
text_field = TextField([Token(x) for x in tokens], token_indexers=self._token_indexers)
fields: Dict[str, Field] = {"tokens": text_field}
if self._use_pos_tags and pos_tags is not None:
pos_tag_field = SequenceLabelField(pos_tags, text_field, label_namespace="pos")
fields["pos_tags"] = pos_tag_field
elif self._use_pos_tags:
raise ConfigurationError("use_pos_tags was set to True but no gold pos"
" tags were passed to the dataset reader.")
spans: List[Field] = []
gold_labels = []
if gold_tree is not None:
gold_spans_with_pos_tags: Dict[Tuple[int, int], str] = {}
self._get_gold_spans(gold_tree, 0, gold_spans_with_pos_tags)
gold_spans = {span: label for (span, label)
in gold_spans_with_pos_tags.items() if "-POS" not in label}
else:
gold_spans = None
for start, end in enumerate_spans(tokens):
spans.append(SpanField(start, end, text_field))
if gold_spans is not None:
if (start, end) in gold_spans.keys():
gold_labels.append(gold_spans[(start, end)])
else:
gold_labels.append("NO-LABEL")
metadata = {"tokens": tokens}
if gold_tree:
metadata["gold_tree"] = gold_tree
if self._use_pos_tags:
metadata["pos_tags"] = pos_tags
fields["metadata"] = MetadataField(metadata)
span_list_field: ListField = ListField(spans)
fields["spans"] = span_list_field
if gold_tree is not None:
fields["span_labels"] = SequenceLabelField(gold_labels,
span_list_field)
return Instance(fields)
def _strip_functional_tags(self, tree: Tree) -> None:
"""
Removes all functional tags from constituency labels in an NLTK tree.
We also strip off anything after a =, - or | character, because these
are functional tags which we don't want to use.
This modification is done in-place.
"""
clean_label = tree.label().split("=")[0].split("-")[0].split("|")[0]
tree.set_label(clean_label)
for child in tree:
if not isinstance(child[0], str):
self._strip_functional_tags(child)
def _get_gold_spans(self, # pylint: disable=arguments-differ
tree: Tree,
index: int,
typed_spans: Dict[Tuple[int, int], str]) -> int:
"""
Recursively construct the gold spans from an nltk ``Tree``.
Labels are the constituents, and in the case of nested constituents
with the same spans, labels are concatenated in parent-child order.
For example, ``(S (NP (D the) (N man)))`` would have an ``S-NP`` label
for the outer span, as it has both ``S`` and ``NP`` label.
Spans are inclusive.
TODO(Mark): If we encounter a gold nested labelling at test time
which we haven't encountered, we won't be able to run the model
at all.
Parameters
----------
tree : ``Tree``, required.
An NLTK parse tree to extract spans from.
index : ``int``, required.
The index of the current span in the sentence being considered.
typed_spans : ``Dict[Tuple[int, int], str]``, required.
A dictionary mapping spans to span labels.
Returns
-------
typed_spans : ``Dict[Tuple[int, int], str]``.
A dictionary mapping all subtree spans in the parse tree
to their constituency labels. Leaf nodes have POS tag spans, which
are denoted by a label of "LABEL-POS".
"""
# NLTK leaves are strings.
if isinstance(tree[0], str):
# The "length" of a tree is defined by
# NLTK as the number of children.
# We don't actually want the spans for leaves, because
# their labels are POS tags. However, it makes the
# indexing more straightforward, so we'll collect them
# and filter them out below. We subtract 1 from the end
# index so the spans are inclusive.
end = index + len(tree)
typed_spans[(index, end - 1)] = tree.label() + "-POS"
else:
# otherwise, the tree has children.
child_start = index
for child in tree:
# typed_spans is being updated inplace.
end = self._get_gold_spans(child, child_start, typed_spans)
child_start = end
# Set the end index of the current span to
# the last appended index - 1, as the span is inclusive.
span = (index, end - 1)
current_span_label = typed_spans.get(span)
if current_span_label is None:
# This span doesn't have nested labels, just
# use the current node's label.
typed_spans[span] = tree.label()
else:
# This span has already been added, so prepend
# this label (as we are traversing the tree from
# the bottom up).
typed_spans[span] = tree.label() + "-" + current_span_label
return end
@classmethod
def from_params(cls, params: Params) -> 'PennTreeBankConstituencySpanDatasetReader':
token_indexers = TokenIndexer.dict_from_params(params.pop('token_indexers', {}))
use_pos_tags = params.pop('use_pos_tags', True)
lazy = params.pop('lazy', False)
params.assert_empty(cls.__name__)
return PennTreeBankConstituencySpanDatasetReader(token_indexers=token_indexers,
use_pos_tags=use_pos_tags,
lazy=lazy)
|
[
"noreply@github.com"
] |
siddsach.noreply@github.com
|
44821bdff3b8556116c400a1b2f16fed3d49ee25
|
9de2ba9978d133ea5ad78510de041487f1877816
|
/service/inter/__init__.py
|
fa0054bcb1ef57f621ac53796bf314e55c215296
|
[] |
no_license
|
yaochenkun/aialg-python
|
bc25e18d7bb76dcecaa18f10dff2444e3f0cc8dd
|
cce090eeda491f5d2eef08b3ba0b54e303042aad
|
refs/heads/master
| 2021-09-13T09:02:41.298523
| 2018-04-27T12:36:51
| 2018-04-27T12:36:51
| 108,155,303
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 117
|
py
|
__all__ = ['ttypes', 'constants', 'nlp_alg_service', 'image_alg_service', 'speech_alg_service', 'video_alg_service']
|
[
"liwangadd@gmail.com"
] |
liwangadd@gmail.com
|
5b377eddf555f4f895e5d9a03673fd35a5b796b4
|
801084adaaa40d9a0045ccf827be29b44426b4d1
|
/assignment 5 bonus.py
|
8ed8581c2894819415933a432815db1de8aa5026
|
[] |
no_license
|
RomanBallinFaxJah/Assignment-5
|
11023011b069ec6bd5d9da158cf6d7a39adacf3f
|
9d8b15b55bbffe5c3109b7f2115f14a83eae2068
|
refs/heads/master
| 2023-05-22T17:12:04.427059
| 2021-06-12T20:06:33
| 2021-06-12T20:06:33
| 376,368,408
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 297
|
py
|
from math import pi
#calculates circumference and area
radius = int(input('Enter radius: '))
area = pi * radius ** 2
circumference = 2 * pi * radius
#outputs area and circumference rounded to 3 decimal points
print(f'\nArea: {round(area, 3)}\nCircumference: {round(circumference, 3)}')
|
[
"noreply@github.com"
] |
RomanBallinFaxJah.noreply@github.com
|
4f596c420101e3d0cb7db56aec280d763311ef13
|
6f04a6ef99c581ed2f0519c897f254a7b63fb61d
|
/rastervision/data/vector_source/default.py
|
3946d67b4fa693f28e9a6590c44f1eadb29e48b8
|
[
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] |
permissive
|
dgketchum/raster-vision
|
18030c9a8bfe99386aa95adbf8e3ec51d204947f
|
fe74bef30daa5821023946576b00c584ddc56de8
|
refs/heads/master
| 2020-08-30T13:56:08.598240
| 2019-11-03T17:38:33
| 2019-11-03T17:38:33
| 218,400,435
| 3
| 1
|
NOASSERTION
| 2019-10-29T23:09:57
| 2019-10-29T23:09:57
| null |
UTF-8
|
Python
| false
| false
| 1,230
|
py
|
from abc import (ABC, abstractmethod)
import os
import rastervision as rv
class VectorSourceDefaultProvider(ABC):
@staticmethod
@abstractmethod
def handles(s):
"""Returns True of this provider is a default for this string"""
pass
@abstractmethod
def construct(s):
"""Constructs a default VectorSource based on the
string.
"""
pass
class GeoJSONVectorSourceDefaultProvider(VectorSourceDefaultProvider):
@staticmethod
def handles(uri):
ext = os.path.splitext(uri)[1]
return ext.lower() in ['.json', '.geojson']
@staticmethod
def construct(uri):
return rv.VectorSourceConfig.builder(rv.GEOJSON_SOURCE) \
.with_uri(uri) \
.build()
class VectorTileVectorSourceDefaultProvider(VectorSourceDefaultProvider):
@staticmethod
def handles(uri):
ext = os.path.splitext(uri)[1]
return ext.lower() in ['.pbf', '.mvt']
@staticmethod
def construct(uri):
return rv.VectorSourceConfig.builder(rv.VECTOR_TILE_SOURCE) \
.with_uri(uri) \
.build()
|
[
"lewfish@gmail.com"
] |
lewfish@gmail.com
|
9aa448374059d0676a08d21260a20ddf73c763fb
|
05463cb39965208024aa691f501e112086f45715
|
/testldap/settings.py
|
de29792f581a310fa01ae387f4db1921ce09b06b
|
[] |
no_license
|
Melyxi/testldap
|
ea8bcaa078ad3601c92a2476dbe97523a2291696
|
8c052cc8403200ee8847d76cdbe98f109af9a968
|
refs/heads/master
| 2023-05-02T12:58:16.219280
| 2021-05-02T19:44:11
| 2021-05-02T19:44:11
| 347,760,742
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,654
|
py
|
"""
Django settings for testldap project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e5@6el#2o$38ad*w&igoa9@^^4@=7lu2spg1g!#pvl!!sxs#00'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap_modal_forms',
'widget_tweaks',
'authapp',
'playbooks',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'testldap.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'testldap.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATICFILES_FINDERS = [
# searches in STATICFILES_DIRS
'django.contrib.staticfiles.finders.FileSystemFinder',
# searches in STATIC subfolder of each app
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
########################################################################################################################
# LDAP Authentication Settings
########################################################################################################################
import ldap
import os
from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
#LDAP_SERVER = 'ipa.demo1.freeipa.org'
LDAP_SERVER = 'ipa.demo1.freeipa.org'
AUTH_LDAP_SERVER_URI = 'ldap://' + LDAP_SERVER
#conn = Connection(server, 'uid=admin,cn=users,cn=accounts,dc=demo1,dc=freeipa, dc=org', 'Secret123', auto_bind=True)
AUTH_LDAP_BIND_DN = 'uid=admin, cn=users, cn=accounts, dc=demo1, dc=freeipa, dc=org'
#AUTH_LDAP_BIND_DN = 'uid=admin, cn=users, cn=accounts, dc=demo1, dc=freeipa, dc=org'
os.environ['MY_PASS'] = 'Secret123'
AUTH_LDAP_BIND_PASSWORD = os.environ.get('MY_PASS')
AUTH_LDAP_USER_DN_TEMPLATE = 'uid=%(user)s, cn=users, cn=accounts, dc=demo1, dc=freeipa, dc=org'
#AUTH_LDAP_USER_DN_TEMPLATE = 'uid=%(user)s, cn=users, cn=accounts, dc=demo1, dc=freeipa, dc=org'
AUTH_LDAP_USER_ATTR_MAP = {
'first_name': 'givenName',
'last_name': 'sn',
'email': 'mail'
}
AUTH_LDAP_GROUP_BASE = "cn=groups,cn=accounts,dc=demo1, dc=freeipa,dc=org"
AUTH_LDAP_GROUP_FILTER = "(objectClass=groupOfNames)"
AUTH_LDAP_GROUP_SEARCH = LDAPSearch(AUTH_LDAP_GROUP_BASE,
ldap.SCOPE_SUBTREE, AUTH_LDAP_GROUP_FILTER)
AUTH_LDAP_GROUP_TYPE = GroupOfNamesType(name_attr="cn")
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
'is_staff': 'cn=ipausers,' + AUTH_LDAP_GROUP_BASE,
'is_support': 'cn=ipausers,' + AUTH_LDAP_GROUP_BASE,
'is_superuser': 'cn=ipausers,' + AUTH_LDAP_GROUP_BASE,
}
AUTHENTICATION_BACKENDS = (
'django_auth_ldap.backend.LDAPBackend',
'django.contrib.auth.backends.ModelBackend',
)
# This is the default, but I like to be explicit.
AUTH_LDAP_ALWAYS_UPDATE_USER = True
# Use LDAP group membership to calculate group permissions.
AUTH_LDAP_FIND_GROUP_PERMS = True
# Cache group memberships for an hour to minimize LDAP traffic
AUTH_LDAP_CACHE_GROUPS = True
AUTH_LDAP_GROUP_CACHE_TIMEOUT = 3600
|
[
"igorkuz2018@yandex.ru"
] |
igorkuz2018@yandex.ru
|
9049cd5710e2708895a539776b48cf2983cf166f
|
26354ac2ef71f53be0078116a46f917bf00059df
|
/venv/TP1/Exo4.py
|
04b4552a3f79af073befbe3c57245cc2a0018d40
|
[] |
no_license
|
rraattnnii/MyPythonProjects
|
151d8c4442a564fad0c405033413f303f7be6ac5
|
da609d2c56ce798f760f4abb0a14d1523f9d2ae3
|
refs/heads/master
| 2020-09-08T14:25:56.274227
| 2019-11-12T07:58:39
| 2019-11-12T07:58:39
| 221,156,761
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 255
|
py
|
print("\n^^^^^ Exercice 4 : ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^")
# Exercice 4 :
nomComplet = input("Saisir votre nom complet : ")
#nom_et_prenom = nom +" "+ prenom
age = input("Saisir votre age : ")
print(nomComplet + ", vous avez : " + age + " ans")
|
[
"mohmetal@live.fr"
] |
mohmetal@live.fr
|
30cddb6c1b1f8d348c3d729945e222719e0af836
|
7928200959c0b369239917d1993ba5f9e0400109
|
/其他/消除小碎斑.py
|
bb664b3d8bd7458d96bc2cea1d26a161bc0c24ae
|
[
"MIT"
] |
permissive
|
chenjl0710/arcpyTools
|
30b14e580125e115dd62d49b24af0f81a8dbb419
|
4f31e79f402cc2a0827450ab3aaba8f8d9a5f502
|
refs/heads/master
| 2022-11-24T00:49:41.802612
| 2020-08-26T03:28:29
| 2020-08-26T03:28:29
| 141,035,441
| 2
| 1
|
MIT
| 2022-11-22T01:10:07
| 2018-07-15T14:47:02
|
Python
|
GB18030
|
Python
| false
| false
| 3,023
|
py
|
# -*- coding: cp936 -*-
import arcpy
import time
import os,shutil
#将面积小于阈值的图斑和其相邻面积最大的图斑融合
arcpy.env.overwriteOutput = True
def EliminateSamllArea(shp):
# try:
# arcpy.AddField_management(shp,"Area","LONG",9,2)
# except:
# print "Area field already exists!"
#
# print "Calculating Area field..."
# arcpy.CalculateField_management(shp,"Area", '!SHAPE.AREA!',"PYTHON_9.3")
# print "finish Calculate Area field!"
temp = os.path.join(os.path.dirname(shp),"temp")
arcpy.MakeFeatureLayer_management(shp,"shp")
# sql = '"Area" < 400'
arcpy.SelectLayerByAttribute_management("shp","NEW_SELECTION", '"Area" = 0')
print "Eliminating..."
Eliminate = os.path.join(temp,"Eliminate.shp" )
print Eliminate
arcpy.Eliminate_management("shp", Eliminate, "AREA")
flag = True
i = 1
while flag:
print i
arcpy.CalculateField_management(Eliminate,"Area", '!SHAPE.AREA!',"PYTHON_9.3")
arcpy.MakeFeatureLayer_management(Eliminate,"Eliminate")
arcpy.SelectLayerByAttribute_management("Eliminate","NEW_SELECTION",'"Area" = 0')
FirstSelectionCount = arcpy.GetCount_management("Eliminate")
print "FirstSelectionCount:",int(FirstSelectionCount.getOutput(0))
arcpy.SelectLayerByLocation_management("Eliminate", "SHARE_A_LINE_SEGMENT_WITH", "Eliminate", "", "NEW_SELECTION")
SecondSelectionCount = arcpy.GetCount_management("Eliminate")
print "SecondSelectionCount:",int(SecondSelectionCount.getOutput(0))
Chacount = int(SecondSelectionCount.getOutput(0)) - int(FirstSelectionCount.getOutput(0))
print "Chacount:",Chacount
i = i + 1
if Chacount > 0:
flag = True
Eliminate = os.path.join(os.path.join(os.path.dirname(shp),"temp"),"Eliminate" + str(i) + ".shp" )
arcpy.Eliminate_management("Eliminate", Eliminate, "AREA")
else:
flag = False
arcpy.Copy_management(Eliminate, os.path.join(os.path.dirname(shp),"Result"))
print Eliminate
if __name__ == "__main__":
# xunhuanNUM = 1
# arcpy.env.workspace = r"F:\fenkuai.gdb"#改了
inFC = r"C:\Users\sherry\Desktop\泰和县\JiangXi_TaiHe_Landuse_20160818.shp"
# outFC = r"XinFeng1_Diss"
print "~~~~~~~~~~~~~Start~~~~~~~~~~~~"
print time.strftime('%Y-%m-%d %H:%M:%S',time.gmtime())
Fielfolder = os.path.dirname(inFC)
temp = os.path.join(Fielfolder,"temp")
if os.path.exists(temp):
shutil.rmtree(temp)
os.mkdir(temp)
else:
os.mkdir(temp)
print temp
Result = os.path.join(Fielfolder,"Result")
if os.path.exists(Result):
shutil.rmtree(Result)
os.mkdir(Result)
else:
os.mkdir(Result)
print Result
EliminateSamllArea(inFC)
print time.strftime('%Y-%m-%d %H:%M:%S',time.gmtime())
print "~~~~~~~~~~~~Ending~~~~~~~~~~~"
|
[
"32512198+chenjl0710@users.noreply.github.com"
] |
32512198+chenjl0710@users.noreply.github.com
|
b31f396350cbbd37359e1a9e342d4727d49e9c68
|
fd11966649c70d561ef66358e6fe34594e6afa93
|
/Lab 4/planning.py
|
ffeb050e872dc029b2985dec1081f19221b8bb14
|
[] |
no_license
|
parasj/CS3630
|
8836fa70e8a12a32f7c555d8836d33d545a031d2
|
baec4db82b9855c0f4b8abce833755f523022bf4
|
refs/heads/master
| 2021-03-16T08:28:55.779414
| 2017-04-25T20:58:45
| 2017-04-25T20:58:45
| 80,070,243
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,744
|
py
|
#author1: Paras Jain
#author2: Connor Lindquist
import asyncio
from grid import *
from visualizer import *
import threading
from queue import PriorityQueue
import math
import cozmo
from cozmo.util import degrees, distance_mm, speed_mmps
# State globals
ix = 0 # initial x position
iy = 0 # initial y position
grid_scale = 25
# Utility functions
def grid_dist(a, b):
ax, ay = a
bx, by = b
return math.sqrt((ax - bx) ** 2 + (ay - by) ** 2)
def c(start):
return (int(round(start[0])), int(round(start[1])))
def poseToGrid(pose: cozmo.util.Pose):
return c(poseToGridRaw(pose))
def poseToGridRaw(pose: cozmo.util.Pose):
pos = pose.position
x = ((pos.x - ix) / grid_scale) + 3
y = ((pos.y - iy) / grid_scale) + 2
return (x, y)
# A*
def astar(grid: CozGrid, heuristic):
"""Perform the A* search algorithm on a defined grid
Arguments:
grid -- CozGrid instance to perform search on
heuristic -- supplied heuristic function
"""
start = grid.getStart()
goal = grid.getGoals()[0]
obstacles = grid._obstacles
path, visited = astarImpl(heuristic, start, goal, obstacles)
grid.setPath(path)
for v in visited:
grid.addVisited(v)
def astarImpl(heuristic, start, goal, grid: CozGrid):
start = (int(round(start[0])), int(round(start[1])))
goal = (int(round(goal[0])), int(round(goal[1])))
visited = [start]
costs = {}
costs[start] = 0
front = PriorityQueue()
front.put((0 + heuristic(start, goal), 0, (start, [start])))
while not front.empty():
node = front.get()
cost, counter, data = node
cell, path = data
visited.append(cell)
if grid_dist(cell, goal) < 1:
return path, visited
else:
for neighborWithCost in grid.getNeighbors(cell):
neighbor, cost = neighborWithCost
if neighbor in grid._obstacles:
print(neighbor)
newCost = costs[cell] + cost
if neighbor not in costs or newCost < costs[neighbor]:
costs[neighbor] = newCost
priority = newCost + heuristic(neighbor, goal)
newpath = path[:]
newpath.append(neighbor)
front.put((priority, counter + 1, (neighbor, newpath)))
return [start], visited
def heuristic(current, goal):
"""Heuristic function for A* algorithm
Arguments:
current -- current cell
goal -- desired goal cell
"""
currx, curry = current
goalx, goaly = goal
return pow(pow(goaly - curry, 2) + pow(goalx - currx, 2), 0.5)
# add an obstacle to the map
def add_obstacle_to_grid(grid: CozGrid, obstacle):
ox, oy = c(obstacle)
for dx in [ -3, -2, -1, 0, 1, 2, 3 ]:
for dy in [-3, -2, -1, 0, 1, 2, 3]:
grid.addObstacle((ox + dx, oy + dy))
def add_goal_obstacle_to_grid(grid: CozGrid, obstacle, cube):
print("Goal cube 1 added")
ox, oy = c(obstacle)
oz = 180 + cube.pose.rotation.angle_z.degrees
for dx in [-4, -3, -2, -1, 0, 1, 2, 3, 4]:
for dy in [-4, -3, -2, -1, 0, 1, 2, 3, 4]:
grid.addObstacle((ox + dx, oy + dy))
dx = 5 * math.cos(math.radians(oz))
dy = 5 * math.sin(math.radians(oz))
g = c((ox + dx, oy + dy))
grid.addGoal(g)
return g
# find any new cubes and place them into the map if necessary. returns true if any cubes were added
def find_and_update_cubes(robot: cozmo.robot.Robot, seen_cubes: dict, grid: CozGrid, oldGoalPosition):
gP = oldGoalPosition
try:
cubes = list(robot.world.visible_objects)
except asyncio.TimeoutError:
print("find_and_update_cubes: timeout error")
return False, seen_cubes
else:
changed = False
for cube in cubes:
is_cube_1 = cube.object_id == robot.world.light_cubes[cozmo.objects.LightCube1Id].object_id
is_cube_2 = cube.object_id == robot.world.light_cubes[cozmo.objects.LightCube2Id].object_id
is_cube_3 = cube.object_id == robot.world.light_cubes[cozmo.objects.LightCube3Id].object_id
if 1 not in seen_cubes and is_cube_1:
print("I found cube 1 at " + str(poseToGridRaw(cube.pose)))
seen_cubes[1] = cube
changed = True
gP = add_goal_obstacle_to_grid(grid, poseToGridRaw(cube.pose), seen_cubes[1])
elif 2 not in seen_cubes and is_cube_2:
print("I found cube 2 at " + str(poseToGridRaw(cube.pose)))
seen_cubes[2] = cube
changed = True
add_obstacle_to_grid(grid, poseToGridRaw(cube.pose))
elif 3 not in seen_cubes and is_cube_3:
print("I found cube 3 at " + str(poseToGridRaw(cube.pose)))
seen_cubes[3] = cube
changed = True
add_obstacle_to_grid(grid, poseToGridRaw(cube.pose))
return changed, seen_cubes, gP
# straight line drive to
def drive_to(robot: cozmo.robot.Robot, pos):
nx, ny = pos
rx, ry = poseToGridRaw(robot.pose)
dx = nx - rx
dy = ny - ry
rz = robot.pose.rotation.angle_z
rotz = math.degrees(math.atan2(dy + .6, dx + .6))
robot.turn_in_place(degrees(rotz) - rz).wait_for_completed()
rx, ry = poseToGridRaw(robot.pose)
dx = nx - rx
dy = ny - ry
rotd = math.sqrt(math.pow(dx, 2) + math.pow(dy, 2))
robot.drive_straight(distance_mm(rotd * 25), speed_mmps(50)).wait_for_completed()
def init(robot: cozmo.robot.Robot):
global ix, iy
robot.move_lift(-3)
robot.set_head_angle(degrees(0)).wait_for_completed()
ix = robot.pose.position.x
iy = robot.pose.position.y
def cozmoBehavior(robot: cozmo.robot.Robot):
"""Cozmo search behavior. See assignment document for details
Has global access to grid, a CozGrid instance created by the main thread, and
stopevent, a threading.Event instance used to signal when the main thread has stopped.
You can use stopevent.is_set() to check its status or stopevent.wait() to wait for the
main thread to finish.
Arguments:
robot -- cozmo.robot.Robot instance, supplied by cozmo.run_program
"""
global grid, stopevent
state = "stopped"
init(robot)
print("Initial calibration ", str((ix, iy)))
cubes = {}
goalPosition = None
path = []
path_pos = -1
grid.setStart((3, 2))
for i in range(26):
grid.addObstacle((i, 0))
grid.addObstacle((i, 17))
for i in range(18):
grid.addObstacle((0, i))
grid.addObstacle((25, i))
while not stopevent.is_set():
cubes_changed, cubes, goalPosition = find_and_update_cubes(robot, cubes, grid, goalPosition)
if cubes_changed:
state = "stopped"
grid.setStart(poseToGrid(robot.pose))
if state != "done":
print("-- ", state, " --")
if state == "stopped":
if 1 not in cubes:
path, visited = astarImpl(heuristic, grid.getStart(), (13, 9), grid)
grid.clearVisited()
for v in visited:
grid.addVisited(v)
path_pos = 0
grid.clearGoals()
grid.addGoal((13, 9))
grid.setPath(path)
print("Plotted path for center")
state = "go_to_center"
else:
dest = goalPosition
path, visited = astarImpl(heuristic, grid.getStart(), dest, grid) # todo choose right side of cube
grid.clearVisited()
for v in visited:
grid.addVisited(v)
path_pos = 0
grid.clearGoals()
grid.addGoal(dest)
grid.setPath(path)
print("Plotted path for cube 1")
state = "drive"
elif state == "go_to_center":
if 1 not in cubes:
if grid_dist(poseToGridRaw(robot.pose), (13, 9)) < 1.41:
state = "search"
elif path_pos == len(path):
print("Huh... out of path options but I still am not in the center!")
state = "stopped"
else:
print("go_to_center driving from " + str(poseToGridRaw(robot.pose)) + " to " + str(path[path_pos]))
drive_to(robot, path[path_pos])
path_pos += 1
else:
state = "stopped"
elif state == "search":
if 1 not in cubes:
robot.turn_in_place(degrees(30)).wait_for_completed()
else:
state = "drive"
elif state == "drive":
print("In drive, goal is: ", goalPosition)
if 1 not in cubes:
state = "stopped"
else:
if grid_dist(poseToGridRaw(robot.pose), goalPosition) < 1:
state = "rotate"
elif path_pos == len(path):
print("Huh... out of path options but I still am not in the center!")
state = "stopped"
else:
print("drive driving from " + str(poseToGridRaw(robot.pose)) + " to " + str(path[path_pos]))
drive_to(robot, path[path_pos])
path_pos += 1
elif state == "rotate":
cozmoZ = robot.pose.rotation.angle_z.degrees
cubeZ = cubes[1].pose.rotation.angle_z.degrees + 180
angleZ = (cubeZ + 180)%360 - cozmoZ
print("CUBE: ", cubes[1].pose.rotation.angle_z.degrees + 180)
print("COZMO: ", robot.pose.rotation.angle_z.degrees)
print("Rotation value: ", angleZ)
robot.turn_in_place(degrees(angleZ)).wait_for_completed()
robot.play_anim(name="ID_pokedB").wait_for_completed()
state = "done"
######################## DO NOT MODIFY CODE BELOW THIS LINE ####################################
class RobotThread(threading.Thread):
"""Thread to run cozmo code separate from main thread
"""
def __init__(self):
threading.Thread.__init__(self, daemon=True)
def run(self):
cozmo.run_program(cozmoBehavior)
# If run as executable, start RobotThread and launch visualizer with empty grid file
if __name__ == "__main__":
global grid, stopevent
stopevent = threading.Event()
grid = CozGrid("emptygrid.json")
visualizer = Visualizer(grid)
updater = UpdateThread(visualizer)
updater.start()
robot = RobotThread()
robot.start()
visualizer.start()
stopevent.set()
|
[
"parasjain@parasjain.com"
] |
parasjain@parasjain.com
|
a5b9e77e1622370ce346f4060e2d06b8a3ed2ece
|
8f6e1af18963f9a49ff1e8406191106f2ff4db99
|
/login.py
|
ec3480d942532be129b9e6f5e937a084450a27d5
|
[
"MIT"
] |
permissive
|
HappyBoy007007/git_test
|
a68a685bb5403a9f6f6e638f51ceafe1fb6cab90
|
5b90390bbc404c494c4be5e90430a013fa587aad
|
refs/heads/master
| 2021-08-22T06:20:28.306265
| 2017-11-29T13:52:08
| 2017-11-29T13:52:08
| 112,475,032
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 38
|
py
|
num = 10
num2 = 20
num3 = 3
num4 = 4
|
[
"manager@163.com"
] |
manager@163.com
|
0b4c09b49d4e4bade511307fd9dd8f06b83a2362
|
f39dfb5b634410851f276b065c926b4cf42ae078
|
/figuras/PycharmKayStatisticalReport/kalman_filter_steady_state_example.py
|
02c933d3ccd530bee2c37762ee13893e5d6740f5
|
[
"MIT"
] |
permissive
|
santoshmore85/estudiando_el_kay
|
9fc57d2bbd884a4188a93a15a00436947d60338f
|
787f9b2599efeea93c1cf67408edcf1fa199c9b7
|
refs/heads/main
| 2023-08-26T23:05:00.701166
| 2021-10-29T21:49:08
| 2021-10-29T21:49:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,663
|
py
|
import matplotlib.pyplot as plt
import numpy.polynomial.polynomial as poly
import numpy as np
from scipy import signal
from matplotlib import rc
from matplotlib import rcParams
__author__ = 'ernesto'
# if use latex or mathtext
rc('text', usetex=True)
rcParams['text.latex.preamble']=[r"\usepackage{amsmath}"]
# colors
lgray = "#dddddd" # ligth gray
### Parámetros ###
a = 0.9
var_u = 1
var_w = 1
# número de frecuencias entre 0 y 0.5
nf = 512
##################
# resolución de la ecuación de ricatti de estado estacionario
# para calcular M[\infty]
p = [-var_u * var_w, var_u + var_w * (1 - a ** 2), a ** 2]
M_inf = np.max(poly.polyroots(p))
# cálculo de Mp[\infty]
Mp_inf = (a ** 2) * M_inf + var_u
# cálculo de K[\infty]
K_inf = Mp_inf / (var_w + Mp_inf)
print(M_inf)
print(Mp_inf)
print(K_inf)
# vector de frecuencias (normalizada)
f = np.arange(nf)/(2 * nf)
# respuesta en frecuencia del filtro de Kalman en estado estacionario
_, H_inf = signal.freqz(K_inf, [1, -a * (1 - K_inf)], worN=nf, whole=False)
# PSD de la señal s[n] en estado estacionario: proceso AR(1)
_, S = signal.freqz(var_u, [1, -a], worN=nf, whole=False)
Pss = np.square(np.abs(S))
H_inf_db = 20 * np.log10(np.abs(H_inf))
Pss_db = 10 * np.log10(Pss)
# filtro de Kalman como blanqueador
_, H_w = signal.freqz([1, -a], [1, -a * (1 - K_inf)], worN=nf, whole=False)
# PSD de los datos x[n]
Pxx = Pss + var_w
# PSD de la innovación
Pii = np.square(np.abs(H_w)) * Pxx
# para probar
Pii_2 = (var_u + var_w * (1 - 2 * a * np.cos(2 * np.pi * f) + a ** 2)) / \
(1 - 2 * a * (1 - K_inf) * np.cos(2 * np.pi * f) + (a * (1 - K_inf)) ** 2)
H_w_db = 20 * np.log10(np.abs(H_w))
Pxx_db = 10 * np.log10(Pxx)
Pii_db = 10 * np.log10(Pii)
# fontsize
fs = 12
fig = plt.figure(0, figsize=(9, 4), frameon=False)
ax = plt.subplot2grid((8, 4), (0, 0), rowspan=8, colspan=2)
plt.xlim(0, 0.5)
plt.ylim(-17, 21)
plt.plot(f, H_inf_db, 'k', label='$|H_\infty(f)|$', lw=2)
plt.plot(f, Pss_db, 'r', label='$P_{ss}(f)$', lw=2)
ax.set_xlabel('${\\rm Frecuencia\;normalizada}$', fontsize=fs)
ax.set_ylabel('${\\rm Magnitud\;(dB)}$', fontsize=fs)
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
ax = plt.subplot2grid((8, 4), (0, 2), rowspan=8, colspan=2)
plt.xlim(0, 0.5)
plt.ylim(-17, 21)
plt.plot(f, H_w_db, 'k', label='$|H_w(f)|$', lw=2)
plt.plot(f, Pxx_db, 'r', label='$P_{xx}(f)$', lw=2)
plt.plot(f, Pii_db, 'b', label='$P_{\\tilde{x}\\tilde{x}}(f)$', lw=2)
ax.set_xlabel('${\\rm Frecuencia\;normalizada}$', fontsize=fs)
leg = plt.legend(loc=1, frameon=False, fontsize=fs)
ax.set_yticklabels([])
plt.savefig('kalman_filter_steady_state_example.pdf', bbox_inches='tight')
plt.show()
|
[
"bor9net@gmail.com"
] |
bor9net@gmail.com
|
586dd622119475aefed5f1cc9ae7c15634b326da
|
0f147c8e301b36dd33ff15a93557b8758d3aabf0
|
/book_examles/11_10 GRAPH.py
|
b0d71a1168bd794727366c057872919c71e5e18a
|
[] |
no_license
|
maxsagitov/hello-world
|
ec786344e89ba19a66ea14ad68fec5cebb1ff297
|
33c59968181bbca3ed96172b8fa8c3e8ea254e6b
|
refs/heads/master
| 2021-01-10T07:29:29.219304
| 2018-07-18T14:26:59
| 2018-07-18T14:26:59
| 51,152,290
| 0
| 0
| null | 2016-02-06T07:50:59
| 2016-02-05T14:47:28
| null |
UTF-8
|
Python
| false
| false
| 782
|
py
|
import tkinter
import random
import math
canvas_size = 500
npoints = 5
colors = ["red","orange","yellow","green","blue","cyan","magenta","light blue"]
root=tkinter.Tk()
canv = tkinter.Canvas(root, width=canvas_size, height=canvas_size, bg="white")
canv.pack()
def draw():
x = []
y = []
for i in range(npoints):
x.append(random.randint(0, canvas_size))
y.append(random.randint(0, canvas_size))
canv.delete("all")
for i in range(npoints):
for j in range(i + 1, npoints):
color = random.choice(colors)
canv.create_line(x[i], y[i], x[j], y[j], fill = color, width = 2)
button = tkinter.Button(root, text='draw', width=30, command=draw)
button.pack()
|
[
"maxsagitov@gmail.com"
] |
maxsagitov@gmail.com
|
0d26f422ee7d0c456dc605cbf5d8db0d626ba2a3
|
b5dfd4f11558d8a42b59701bbfd459d8162d21cc
|
/online_messanger/clientui.py
|
4e46f5764cb4d2ab90c0e6f6cef9eae825d5439b
|
[] |
no_license
|
Makillaa/Messanger_PY
|
232c5a6f4b2b8665684d49493ff14dd99c23ddf7
|
f277a1d7d8bffea2f9ddf6150288dc1b58691d37
|
refs/heads/main
| 2023-04-23T18:27:05.845695
| 2021-05-11T21:57:19
| 2021-05-11T21:57:19
| 366,520,034
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,797
|
py
|
# Form implementation generated from reading ui file 'messanger.ui'
#
# Created by: PyQt6 UI code generator 6.0.3
#
# WARNING: Any manual changes made to this file will be lost when pyuic6 is
# run again. Do not edit this file unless you know what you are doing.
# The graphical part of the messenger, created in Qt and translated through the PyQt6 library into Python code
from PyQt6 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow): # Установка юзер интерфейса(то, что увидит пользователь)
MainWindow.setObjectName("MainWindow")
MainWindow.setWindowModality(QtCore.Qt.WindowModality.NonModal)
MainWindow.resize(355, 592)
MainWindow.setMinimumSize(QtCore.QSize(355, 592))
MainWindow.setMaximumSize(QtCore.QSize(355, 592))
MainWindow.setLayoutDirection(QtCore.Qt.LayoutDirection.LeftToRight)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(298, 515, 51, 51))
self.pushButton.setObjectName("pushButton")
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setGeometry(QtCore.QRect(10, 499, 281, 81))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Policy.Expanding,
QtWidgets.QSizePolicy.Policy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit.sizePolicy().hasHeightForWidth())
self.textEdit.setSizePolicy(sizePolicy)
self.textEdit.setMinimumSize(QtCore.QSize(1, 41))
self.textEdit.setMaximumSize(QtCore.QSize(391, 120))
font = QtGui.QFont()
font.setPointSize(10)
self.textEdit.setFont(font)
self.textEdit.setObjectName("textEdit")
self.textBrowser = QtWidgets.QTextBrowser(self.centralwidget)
self.textBrowser.setGeometry(QtCore.QRect(10, 130, 338, 351))
self.textBrowser.setObjectName("textBrowser")
self.lineEdit = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit.setGeometry(QtCore.QRect(207, 82, 141, 31))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.lineEdit.setFont(font)
self.lineEdit.setText("")
self.lineEdit.setObjectName("lineEdit")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(142, 85, 61, 21))
font = QtGui.QFont()
font.setPointSize(12)
self.label.setFont(font)
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(123, 16, 161, 51))
font = QtGui.QFont()
font.setPointSize(20)
font.setBold(True)
font.setUnderline(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow): # Setting text
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "BASKAGE"))
self.pushButton.setText(_translate("MainWindow", ">"))
self.textEdit.setPlaceholderText(_translate("MainWindow", "Write a message..."))
self.label.setText(_translate("MainWindow", "Name:"))
self.label_2.setText(_translate("MainWindow", "BASKAGE"))
|
[
"1999gmo@gmail.com"
] |
1999gmo@gmail.com
|
9359e762b8b25c861b32337ae9f6b139862987da
|
55c250525bd7198ac905b1f2f86d16a44f73e03a
|
/Python/PyBox/pybox2d-android/examples/.svn/text-base/pyglet_framework.py.svn-base
|
94e88482c339765bee83db8a62e627517c221639
|
[] |
no_license
|
NateWeiler/Resources
|
213d18ba86f7cc9d845741b8571b9e2c2c6be916
|
bd4a8a82a3e83a381c97d19e5df42cbababfc66c
|
refs/heads/master
| 2023-09-03T17:50:31.937137
| 2023-08-28T23:50:57
| 2023-08-28T23:50:57
| 267,368,545
| 2
| 1
| null | 2022-09-08T15:20:18
| 2020-05-27T16:18:17
| null |
UTF-8
|
Python
| false
| false
| 21,827
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# C++ version Copyright (c) 2006-2007 Erin Catto http://www.box2d.org
# Python version Copyright (c) 2010 kne / sirkne at gmail dot com
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
"""
Global Keys:
Space - shoot projectile
Z/X - zoom
Escape - quit
Other keys can be set by the individual test.
Mouse:
Left click - select/drag body (creates mouse joint)
Right click - pan
Shift+Left - drag to create a directional projectile
Scroll - zoom
You can easily add your own tests based on test_empty.
"""
import pyglet
import framework
from framework import *
from pyglet import gl
import string
import math
class grBlended (pyglet.graphics.Group):
"""
This pyglet rendering group enables blending.
"""
def set_state(self):
gl.glEnable(gl.GL_BLEND)
gl.glBlendFunc(gl.GL_SRC_ALPHA, gl.GL_ONE_MINUS_SRC_ALPHA)
def unset_state(self):
gl.glDisable(gl.GL_BLEND)
class grPointSize (pyglet.graphics.Group):
"""
This pyglet rendering group sets a specific point size.
"""
def __init__(self, size=4.0):
super(grPointSize, self).__init__()
self.size = size
def set_state(self):
gl.glPointSize(self.size)
def unset_state(self):
gl.glPointSize(1.0)
class grText(pyglet.graphics.Group):
"""
This pyglet rendering group sets the proper projection for
displaying text when used.
"""
window = None
def __init__(self, window=None):
super(grText, self).__init__()
self.window = window
def set_state(self):
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glPushMatrix()
gl.glLoadIdentity()
gl.gluOrtho2D(0, self.window.width, 0, self.window.height)
gl.glMatrixMode(gl.GL_MODELVIEW)
gl.glPushMatrix()
gl.glLoadIdentity()
def unset_state(self):
gl.glPopMatrix()
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glPopMatrix()
gl.glMatrixMode(gl.GL_MODELVIEW)
class PygletDraw(b2Draw):
"""
This debug draw class accepts callbacks from Box2D (which specifies what to draw)
and handles all of the rendering.
If you are writing your own game, you likely will not want to use debug drawing.
Debug drawing, as its name implies, is for debugging.
"""
blended = grBlended()
circle_segments = 16
surface = None
circle_cache_tf = {} # triangle fan (inside)
circle_cache_ll = {} # line loop (border)
def __init__(self, test):
super(PygletDraw, self).__init__()
self.test=test
def StartDraw(self): pass
def EndDraw(self): pass
def triangle_fan(self, vertices):
"""
in: vertices arranged for gl_triangle_fan ((x,y),(x,y)...)
out: vertices arranged for gl_triangles (x,y,x,y,x,y...)
"""
out = []
for i in range(1, len(vertices)-1):
# 0,1,2 0,2,3 0,3,4 ..
out.extend( vertices[0 ] )
out.extend( vertices[i ] )
out.extend( vertices[i+1] )
return len(out) / 2, out
def line_loop(self, vertices):
"""
in: vertices arranged for gl_line_loop ((x,y),(x,y)...)
out: vertices arranged for gl_lines (x,y,x,y,x,y...)
"""
out = []
for i in range(0, len(vertices)-1):
# 0,1 1,2 2,3 ... len-1,len len,0
out.extend( vertices[i ] )
out.extend( vertices[i+1] )
out.extend( vertices[len(vertices)-1] )
out.extend( vertices[0] )
return len(out)/2, out
def _getLLCircleVertices(self, radius, points):
"""
Get the line loop-style vertices for a given circle.
Drawn as lines.
"Line Loop" is used as that's how the C++ code draws the
vertices, with lines going around the circumference of the
circle (GL_LINE_LOOP).
This returns 'points' amount of lines approximating the
border of a circle.
(x1, y1, x2, y2, x3, y3, ...)
"""
ret = []
step = 2*math.pi/points
n = 0
for i in range(0, points):
ret.append( (math.cos(n) * radius, math.sin(n) * radius ) )
n += step
ret.append( (math.cos(n) * radius, math.sin(n) * radius ) )
return ret
def _getTFCircleVertices(self, radius, points):
"""
Get the triangle fan-style vertices for a given circle.
Drawn as triangles.
"Triangle Fan" is used as that's how the C++ code draws the
vertices, with triangles originating at the center of the
circle, extending around to approximate a filled circle
(GL_TRIANGLE_FAN).
This returns 'points' amount of lines approximating the
circle.
(a1, b1, c1, a2, b2, c2, ...)
"""
ret = []
step = 2*math.pi/points
n = 0
for i in range(0, points):
ret.append( (0.0, 0.0) )
ret.append( (math.cos(n) * radius, math.sin(n) * radius ) )
n += step
ret.append( (math.cos(n) * radius, math.sin(n) * radius ) )
return ret
def getCircleVertices(self, center, radius, points):
"""
Returns the triangles that approximate the circle and
the lines that border the circles edges, given
(center, radius, points).
Caches the calculated LL/TF vertices, but recalculates
based on the center passed in.
TODO: Currently, there's only one point amount,
so the circle cache ignores it when storing. Could cause
some confusion if you're using multiple point counts as
only the first stored point-count for that radius will
show up.
TODO: What does the previous TODO mean?
Returns: (tf_vertices, ll_vertices)
"""
if radius not in self.circle_cache_tf:
self.circle_cache_tf[radius]=self._getTFCircleVertices(radius,points)
self.circle_cache_ll[radius]=self._getLLCircleVertices(radius,points)
ret_tf, ret_ll = [], []
for x, y in self.circle_cache_tf[radius]:
ret_tf.extend( (x+center[0], y+center[1]) )
for x, y in self.circle_cache_ll[radius]:
ret_ll.extend( (x+center[0], y+center[1]) )
return ret_tf, ret_ll
def DrawCircle(self, center, radius, color):
"""
Draw an unfilled circle given center, radius and color.
"""
unused, ll_vertices = self.getCircleVertices( center, radius, self.circle_segments)
ll_count = len(ll_vertices)/2
self.batch.add(ll_count, gl.GL_LINES, None,
('v2f', ll_vertices),
('c4f', [color.r, color.g, color.b, 1.0] * (ll_count)))
def DrawSolidCircle(self, center, radius, axis, color):
"""
Draw an filled circle given center, radius, axis (of orientation) and color.
"""
tf_vertices, ll_vertices = self.getCircleVertices( center, radius, self.circle_segments)
tf_count, ll_count = len(tf_vertices) / 2, len(ll_vertices) / 2
self.batch.add(tf_count, gl.GL_TRIANGLES, self.blended,
('v2f', tf_vertices),
('c4f', [0.5 * color.r, 0.5 * color.g, 0.5 * color.b, 0.5] * (tf_count)))
self.batch.add(ll_count, gl.GL_LINES, None,
('v2f', ll_vertices),
('c4f', [color.r, color.g, color.b, 1.0] * (ll_count)))
p = b2Vec2(center) + radius * b2Vec2(axis)
self.batch.add(2, gl.GL_LINES, None,
('v2f', (center[0], center[1], p[0], p[1])),
('c3f', [1.0, 0.0, 0.0] * 2))
def DrawPolygon(self, vertices, color):
"""
Draw a wireframe polygon given the world vertices (tuples) with the specified color.
"""
if len(vertices)==2:
p1, p2=vertices
self.batch.add(2, gl.GL_LINES, None,
('v2f', (p1[0], p1[1], p2[0], p2[1])),
('c3f', [color.r, color.g, color.b]*2))
else:
ll_count, ll_vertices = self.line_loop(vertices)
self.batch.add(ll_count, gl.GL_LINES, None,
('v2f', ll_vertices),
('c4f', [color.r, color.g, color.b, 1.0] * (ll_count)))
def DrawSolidPolygon(self, vertices, color):
"""
Draw a filled polygon given the world vertices (tuples) with the specified color.
"""
if len(vertices)==2:
p1, p2=vertices
self.batch.add(2, gl.GL_LINES, None,
('v2f', (p1[0], p1[1], p2[0], p2[1])),
('c3f', [color.r, color.g, color.b]*2))
else:
tf_count, tf_vertices = self.triangle_fan(vertices)
if tf_count==0:
return
self.batch.add(tf_count, gl.GL_TRIANGLES, self.blended,
('v2f', tf_vertices),
('c4f', [0.5 * color.r, 0.5 * color.g, 0.5 * color.b, 0.5] * (tf_count)))
ll_count, ll_vertices = self.line_loop(vertices)
self.batch.add(ll_count, gl.GL_LINES, None,
('v2f', ll_vertices),
('c4f', [color.r, color.g, color.b, 1.0] * (ll_count)))
def DrawSegment(self, p1, p2, color):
"""
Draw the line segment from p1-p2 with the specified color.
"""
self.batch.add(2, gl.GL_LINES, None,
('v2f', (p1[0], p1[1], p2[0], p2[1])),
('c3f', [color.r, color.g, color.b]*2))
def DrawXForm(self, xf):
"""
Draw the transform xf on the screen
"""
p1 = xf.position
k_axisScale = 0.4
p2 = p1 + k_axisScale * xf.R.col1
p3 = p1 + k_axisScale * xf.R.col2
self.batch.add(3, gl.GL_LINES, None,
('v2f', (p1[0], p1[1], p2[0], p2[1], p1[0], p1[1], p3[0], p3[1])),
('c3f', [1.0, 0.0, 0.0] * 2 + [0.0, 1.0, 0.0] * 2))
def DrawPoint(self, p, size, color):
"""
Draw a single point at point p given a point size and color.
"""
self.batch.add(1, gl.GL_POINTS, grPointSize(size),
('v2f', (p[0], p[1])),
('c3f', [color.r, color.g, color.b]))
def DrawAABB(self, aabb, color):
"""
Draw a wireframe around the AABB with the given color.
"""
self.renderer.batch.add(8, gl.GL_LINES, None,
('v2f', (aabb.lowerBound.x, aabb.lowerBound.y, abb.upperBound.x, aabb.lowerBound.y,
abb.upperBound.x, aabb.lowerBound.y, aabb.upperBound.x, aabb.upperBound.y,
aabb.upperBound.x, aabb.upperBound.y, aabb.lowerBound.x, aabb.upperBound.y,
aabb.lowerBound.x, aabb.upperBound.y, aabb.lowerBound.x, aabb.lowerBound.y)),
('c3f', [color.r, color.g, color.b] * 8))
def to_screen(self, point):
"""
In here for compatibility with other frameworks.
"""
return tuple(point)
class PygletWindow(pyglet.window.Window):
def __init__(self, test):
super(PygletWindow, self).__init__()
self.test=test
def on_close(self):
"""
Callback: user tried to close the window
"""
pyglet.clock.unschedule(self.test.SimulationLoop)
super(PygletWindow, self).on_close()
def on_show(self):
"""
Callback: the window was shown.
"""
self.test.updateProjection()
def on_key_press(self, key, modifiers):
self.test._Keyboard_Event(key, down=True)
def on_key_release(self, key, modifiers):
self.test._Keyboard_Event(key, down=False)
def on_mouse_press(self, x, y, button, modifiers):
p = self.test.ConvertScreenToWorld(x, y)
self.test.mouseWorld = p
if button == pyglet.window.mouse.LEFT:
if modifiers & pyglet.window.key.MOD_SHIFT:
self.test.ShiftMouseDown( p )
else:
self.test.MouseDown( p )
elif button == pyglet.window.mouse.MIDDLE:
pass
def on_mouse_release(self, x, y, button, modifiers):
"""
Mouse up
"""
p = self.test.ConvertScreenToWorld(x, y)
self.test.mouseWorld = p
if button == pyglet.window.mouse.LEFT:
self.test.MouseUp(p)
def on_mouse_scroll(self, x, y, scroll_x, scroll_y):
"""
Mouse scrollwheel used
"""
if scroll_y < 0:
self.test.viewZoom *= 1.1
elif scroll_y > 0:
self.test.viewZoom /= 1.1
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
"""
Mouse moved while clicking
"""
p = self.test.ConvertScreenToWorld(x, y)
self.test.mouseWorld = p
self.test.MouseMove(p)
if buttons & pyglet.window.mouse.RIGHT:
self.test.viewCenter -= (float(dx)/5, float(dy)/5)
class PygletFramework(FrameworkBase):
def setup_keys(self):
key=pyglet.window.key
self.keys=key.KeyStateHandler()
# Only basic keys are mapped for now: K_[a-z0-9], K_F[1-12] and K_COMMA.
for letter in string.uppercase:
setattr(Keys, 'K_'+letter.lower(), getattr(key, letter))
for i in range(0,10):
setattr(Keys, 'K_%d'%i, getattr(key, '_%d' % i))
for i in range(1,13):
setattr(Keys, 'K_F%d'%i, getattr(key, 'F%d' % i))
Keys.K_LEFT=key.LEFT
Keys.K_RIGHT=key.RIGHT
Keys.K_UP=key.UP
Keys.K_DOWN=key.DOWN
Keys.K_HOME=key.HOME
Keys.K_PAGEUP=key.PAGEUP
Keys.K_PAGEDOWN=key.PAGEDOWN
Keys.K_COMMA=key.COMMA
def __reset(self):
# Screen/rendering-related
self._viewZoom = 10.0
self._viewCenter = None
self._viewOffset = None
self.screenSize = None
self.rMouseDown = False
self.textLine = 30
self.font = None
self.fps = 0
# Window-related
self.fontname = "Arial"
self.fontsize = 10
self.font = None
self.textGroup = None
# Screen-related
self._viewZoom = 1.0
self._viewCenter = None
self.screenSize = None
self.textLine = 30
self.font = None
self.fps = 0
self.setup_keys()
def __init__(self):
super(PygletFramework, self).__init__()
if fwSettings.onlyInit: # testing mode doesn't initialize Pyglet
return
print('Initializing Pyglet framework...')
self.__reset()
self.window=PygletWindow(self)
# Initialize the text display group
self.textGroup = grText(self.window)
# Load the font and record the screen dimensions
self.font = pyglet.font.load(self.fontname, self.fontsize)
self.screenSize = b2Vec2(self.window.width, self.window.height)
self.renderer = PygletDraw(self)
self.renderer.surface = self.window.screen
self.world.renderer=self.renderer
self._viewCenter = b2Vec2(0,10.0)
self.groundbody = self.world.CreateBody()
def setCenter(self, value):
"""
Updates the view offset based on the center of the screen.
Tells the debug draw to update its values also.
"""
self._viewCenter = b2Vec2( *value )
self.updateProjection()
def setZoom(self, zoom):
self._viewZoom = zoom
self.updateProjection()
viewZoom = property(lambda self: self._viewZoom, setZoom,
doc='Zoom factor for the display')
viewCenter = property(lambda self: self._viewCenter, setCenter,
doc='Screen center in camera coordinates')
def updateProjection(self):
"""
Recalculates the necessary projection.
"""
gl.glViewport(0, 0, self.window.width, self.window.height)
gl.glMatrixMode(gl.GL_PROJECTION)
gl.glLoadIdentity()
ratio = float(self.window.width) / self.window.height
extents = b2Vec2(ratio * 25.0, 25.0)
extents *= self._viewZoom
lower = self._viewCenter - extents
upper = self._viewCenter + extents
# L/R/B/T
gl.gluOrtho2D(lower.x, upper.x, lower.y, upper.y)
gl.glMatrixMode(gl.GL_MODELVIEW)
gl.glLoadIdentity()
def run(self):
"""
Main loop.
"""
if self.settings.hz > 0.0:
pyglet.clock.schedule_interval(self.SimulationLoop, 1.0 / self.settings.hz)
#self.window.push_handlers(pyglet.window.event.WindowEventLogger())
self.window._enable_event_queue=False # TODO: figure out why this is required
pyglet.app.run()
self.world.contactListener = None
self.world.destructionListener=None
self.world.renderer=None
def SimulationLoop(self, dt):
"""
The main simulation loop. Don't override this, override Step instead.
And be sure to call super(classname, self).Step(settings) at the end
of your Step function.
"""
# Check the input and clear the screen
self.CheckKeys()
self.window.clear()
# Update the keyboard status
self.window.push_handlers(self.keys)
# Create a new batch for drawing
self.renderer.batch = pyglet.graphics.Batch()
# Reset the text position
self.textLine=15
# Draw the title of the test at the top
self.Print(self.name)
# Step the physics
self.Step(self.settings)
self.renderer.batch.draw()
self.window.invalid = True
self.fps = pyglet.clock.get_fps()
def _Keyboard_Event(self, key, down=True):
"""
Internal keyboard event, don't override this.
Checks for the initial keydown of the basic testbed keys. Passes the unused
ones onto the test via the Keyboard() function.
"""
if down:
if key==pyglet.window.key.ESCAPE:
exit(0)
elif key==pyglet.window.key.SPACE:
# Launch a bomb
self.LaunchRandomBomb()
elif key==Keys.K_z:
# Zoom in
self.viewZoom = min(1.1 * self.viewZoom, 20.0)
elif key==Keys.K_x:
# Zoom out
self.viewZoom = max(0.9 * self.viewZoom, 0.02)
else:
# Inform the test of the key press
self.Keyboard(key)
else:
self.KeyboardUp(key)
def CheckKeys(self):
"""
Check the keys that are evaluated on every main loop iteration.
I.e., they aren't just evaluated when first pressed down
"""
keys=self.keys
if keys[Keys.K_LEFT]:
self.viewCenter -= (0.5, 0)
elif keys[Keys.K_RIGHT]:
self.viewCenter += (0.5, 0)
if keys[Keys.K_UP]:
self.viewCenter += (0, 0.5)
elif keys[Keys.K_DOWN]:
self.viewCenter -= (0, 0.5)
if keys[Keys.K_HOME]:
self.viewZoom = 1.0
self.viewCenter = (0.0, 20.0)
#def Step(self, settings):
# super(PygletFramework, self).Step(settings)
def ConvertScreenToWorld(self, x, y):
"""
Takes screen (x, y) and returns
world coordinate b2Vec2(x,y).
"""
u = float(x) / self.window.width
v = float(y) / self.window.height
ratio = float(self.window.width) / self.window.height
extents = b2Vec2(ratio * 25.0, 25.0)
extents *= self._viewZoom
lower = self._viewCenter - extents
upper = self._viewCenter + extents
p = b2Vec2(
(1.0 - u) * lower.x + u * upper.x,
(1.0 - v) * lower.y + v * upper.y )
return p
def DrawStringAt(self, x, y, str, color=(229,153,153,255)):
"""
Draw some text, str, at screen coordinates (x, y).
"""
text = pyglet.text.Label(str, font_name=self.fontname, font_size=self.fontsize,
x=x, y=self.window.height-y, color=color, batch=self.renderer.batch, group=self.textGroup)
def Print(self, str, color=(229,153,153,255)):
"""
Draw some text, str, at screen coordinates (x, y).
"""
text = pyglet.text.Label(str, font_name=self.fontname, font_size=self.fontsize,
x=5, y=self.window.height-self.textLine, color=color, batch=self.renderer.batch, group=self.textGroup)
self.textLine += 15
def Keyboard(self, key):
"""
Callback indicating 'key' has been pressed down.
"""
pass
def KeyboardUp(self, key):
"""
Callback indicating 'key' has been released.
See Keyboard() for key information
"""
pass
|
[
"nateweiler84@gmail.com"
] |
nateweiler84@gmail.com
|
|
b2e0391d750efe19f614deb8c2bd1631da82841d
|
5916383e8d3df886edd20ac00ce9706a78078f56
|
/飞机大战/v2/world.py
|
9e05cd9b131661fae9882e44e040079213137409
|
[] |
no_license
|
sczhan/wode
|
556154e8ccaa9192ea257bc88df3c5e4b268f88e
|
af4c721d0cedfdd2fe01dd681539724d1d64c378
|
refs/heads/master
| 2021-07-06T22:26:34.465708
| 2020-09-04T18:56:38
| 2020-09-04T18:56:38
| 181,295,279
| 1
| 0
| null | 2019-09-09T16:30:00
| 2019-04-14T10:53:57
|
Python
|
UTF-8
|
Python
| false
| false
| 1,656
|
py
|
import tkinter
"""
蜜蜂从上向下运动
可以通过键盘左右控制
"""
step = 0 # 计算器,计算一个走了多少步
direction = (1, 1)
x = 0
y = 10
def set_right(e):
"""
:param e:
:return:
"""
global x
x += 20
def set_left(e):
"""
:param e:
:return:
"""
global x
x -= 20
root_window = tkinter.Tk()
root_window.title("world")
root_window.bind("<Key-Left>", set_left)
root_window.bind("<Key-Right>", set_right)
# 设置不能更改宽,高
root_window.resizable(width=False, height=False)
window_canvas = tkinter.Canvas(root_window, width=450, height=600)
window_canvas.pack()
def main():
# 创建开始界面
bg_img_name = "../img/background.gif"
bg_img = tkinter.PhotoImage(file=bg_img_name)
# tags 的作用是,以后我们使用创建好的image可以通过tags使用
window_canvas.create_image(480/2, 600/2, anchor=tkinter.CENTER, image=bg_img, tags="bg")
# 画上一个小蜜蜂
bee = "../img/bee.gif"
bee_img = tkinter.PhotoImage(file=bee)
window_canvas.create_image(150, 180/2, anchor=tkinter.CENTER, image=bee_img, tags="bee")
sp = "../img/smallplane.gif"
sp_img = tkinter.PhotoImage(file=sp)
window_canvas.create_image(50, 100/2, anchor=tkinter.CENTER, image=sp_img, tags="sp")
# 让小飞机动起来
ap_move()
tkinter.mainloop()
def ap_move():
"""
:return:
"""
global step
global x
global y
y += 20
print(x, y)
window_canvas.move("sp", x, y)
window_canvas.move("bee", x, y)
step += 1
window_canvas.after(1000, ap_move)
if __name__ == "__main__":
main()
|
[
"5481493@qq.com"
] |
5481493@qq.com
|
f5e26bb9443dd48bac9db4319fda8080bc02b761
|
1e4f1aa147a02dd47b8865d04ed46171cf312a26
|
/historical/historical-toy2/netIO/ServerSimpleLoop/net_common.py
|
99866561de7c21e6f41c07e8207d4e85e9853967
|
[] |
no_license
|
ziliangpeng/naoshima
|
d71ae8abbcd36c96da93c1460d38fe9135b4430c
|
d5b2648d95022e3810bfc9a25dadf8fe7cba5a17
|
refs/heads/master
| 2023-09-06T06:17:03.569270
| 2023-09-06T05:29:06
| 2023-09-06T05:29:06
| 100,778,986
| 1
| 0
| null | 2022-11-22T06:28:46
| 2017-08-19T07:47:31
|
Python
|
UTF-8
|
Python
| false
| false
| 292
|
py
|
def handle_rev_str(conn):
l = ord(recv_n(conn, 1))
s = recv_n(conn, l)
s = s[::-1]
send_all(conn, s)
conn.close()
def send_all(s, content):
s.sendall(content)
def recv_n(s, n):
ret = ''
while len(ret) < n:
ret += s.recv(n - len(ret))
return ret
|
[
"ziliangdotme@gmail.com"
] |
ziliangdotme@gmail.com
|
6f636b5072e7d57d722bcf8845eabfe6746a93a9
|
81f2cd08a11f6be0d11a2664001491329957b200
|
/pyscf/pbc/df/mdf.py
|
afdf3d5c06d62899305f92b0be06cb20c77f5436
|
[
"Apache-2.0"
] |
permissive
|
crisely09/pyscf
|
18b564556b249bafab24e1c7d08fdf0a57dfcf0a
|
cb92f7974bd9c87c0ef5b2b52abf5d3219b3d6b6
|
refs/heads/master
| 2021-07-10T01:54:45.698418
| 2019-11-27T22:49:43
| 2019-11-27T22:49:43
| 224,692,664
| 0
| 0
|
Apache-2.0
| 2019-11-28T16:32:10
| 2019-11-28T16:32:09
| null |
UTF-8
|
Python
| false
| false
| 17,494
|
py
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Gaussian and planewaves mixed density fitting
Ref:
J. Chem. Phys. 147, 164119 (2017)
'''
import os
import time
import tempfile
import numpy
import h5py
import scipy.linalg
from pyscf import lib
from pyscf.lib import logger
from pyscf.df.outcore import _guess_shell_ranges
from pyscf.pbc import tools
from pyscf.pbc import gto
from pyscf.pbc.df import outcore
from pyscf.pbc.df import ft_ao
from pyscf.pbc.df import df
from pyscf.pbc.df import aft
from pyscf.pbc.df.df import fuse_auxcell, _round_off_to_odd_mesh
from pyscf.pbc.df.df_jk import zdotNN, zdotCN, zdotNC
from pyscf.pbc.lib.kpts_helper import (is_zero, gamma_point, member, unique,
KPT_DIFF_TOL)
from pyscf.pbc.df import mdf_jk
from pyscf.pbc.df import mdf_ao2mo
from pyscf import __config__
# kpti == kptj: s2 symmetry
# kpti == kptj == 0 (gamma point): real
def _make_j3c(mydf, cell, auxcell, kptij_lst, cderi_file):
t1 = (time.clock(), time.time())
log = logger.Logger(mydf.stdout, mydf.verbose)
max_memory = max(2000, mydf.max_memory-lib.current_memory()[0])
fused_cell, fuse = fuse_auxcell(mydf, auxcell)
# Create swap file to avoid huge cderi_file. see also function
# pyscf.pbc.df.df._make_j3c
swapfile = tempfile.NamedTemporaryFile(dir=os.path.dirname(cderi_file))
fswap = lib.H5TmpFile(swapfile.name)
# Unlink swapfile to avoid trash
swapfile = None
outcore._aux_e2(cell, fused_cell, fswap, 'int3c2e', aosym='s2',
kptij_lst=kptij_lst, dataname='j3c-junk', max_memory=max_memory)
t1 = log.timer_debug1('3c2e', *t1)
nao = cell.nao_nr()
naux = auxcell.nao_nr()
mesh = mydf.mesh
Gv, Gvbase, kws = cell.get_Gv_weights(mesh)
b = cell.reciprocal_vectors()
gxyz = lib.cartesian_prod([numpy.arange(len(x)) for x in Gvbase])
ngrids = gxyz.shape[0]
kptis = kptij_lst[:,0]
kptjs = kptij_lst[:,1]
kpt_ji = kptjs - kptis
uniq_kpts, uniq_index, uniq_inverse = unique(kpt_ji)
log.debug('Num uniq kpts %d', len(uniq_kpts))
log.debug2('uniq_kpts %s', uniq_kpts)
# j2c ~ (-kpt_ji | kpt_ji)
j2c = fused_cell.pbc_intor('int2c2e', hermi=1, kpts=uniq_kpts)
for k, kpt in enumerate(uniq_kpts):
aoaux = ft_ao.ft_ao(fused_cell, Gv, None, b, gxyz, Gvbase, kpt).T
aoaux = fuse(aoaux)
coulG = mydf.weighted_coulG(kpt, False, mesh)
LkR = numpy.asarray(aoaux.real, order='C')
LkI = numpy.asarray(aoaux.imag, order='C')
j2c_k = fuse(fuse(j2c[k]).T).T.copy()
if is_zero(kpt): # kpti == kptj
j2c_k -= lib.dot(LkR*coulG, LkR.T)
j2c_k -= lib.dot(LkI*coulG, LkI.T)
else:
# aoaux ~ kpt_ij, aoaux.conj() ~ kpt_kl
j2cR, j2cI = zdotCN(LkR*coulG, LkI*coulG, LkR.T, LkI.T)
j2c_k -= j2cR + j2cI * 1j
fswap['j2c/%d'%k] = j2c_k
aoaux = LkR = LkI = j2cR = j2cI = coulG = None
j2c = None
def cholesky_decomposed_metric(uniq_kptji_id):
j2c = numpy.asarray(fswap['j2c/%d'%uniq_kptji_id])
j2c_negative = None
# Note large difference may be found in results between the CD/eig treatments.
# In some systems, small integral errors can lead to different treatments of
# linear dependency which can be observed in the total energy/orbital energy
# around 4th decimal place.
# try:
# j2c = scipy.linalg.cholesky(j2c, lower=True)
# j2ctag = 'CD'
# except scipy.linalg.LinAlgError as e:
#
# Abandon CD treatment for better numerical stablity
w, v = scipy.linalg.eigh(j2c)
log.debug('MDF metric for kpt %s cond = %.4g, drop %d bfns',
uniq_kptji_id, w[-1]/w[0], numpy.count_nonzero(w<mydf.linear_dep_threshold))
v1 = v[:,w>mydf.linear_dep_threshold].T.conj()
v1 /= numpy.sqrt(w[w>mydf.linear_dep_threshold]).reshape(-1,1)
j2c = v1
if cell.dimension == 2 and cell.low_dim_ft_type != 'inf_vacuum':
idx = numpy.where(w < -mydf.linear_dep_threshold)[0]
if len(idx) > 0:
j2c_negative = (v[:,idx]/numpy.sqrt(-w[idx])).conj().T
j2ctag = 'eig'
return j2c, j2c_negative, j2ctag
feri = h5py.File(cderi_file, 'a')
feri['j3c-kptij'] = kptij_lst
nsegs = len(fswap['j3c-junk/0'])
def make_kpt(uniq_kptji_id, cholesky_j2c): # kpt = kptj - kpti
kpt = uniq_kpts[uniq_kptji_id]
log.debug1('kpt = %s', kpt)
adapted_ji_idx = numpy.where(uniq_inverse == uniq_kptji_id)[0]
adapted_kptjs = kptjs[adapted_ji_idx]
nkptj = len(adapted_kptjs)
log.debug1('adapted_ji_idx = %s', adapted_ji_idx)
j2c, j2c_negative, j2ctag = cholesky_j2c
Gaux = ft_ao.ft_ao(fused_cell, Gv, None, b, gxyz, Gvbase, kpt).T
Gaux = fuse(Gaux)
Gaux *= mydf.weighted_coulG(kpt, False, mesh)
kLR = Gaux.T.real.copy('C')
kLI = Gaux.T.imag.copy('C')
if is_zero(kpt): # kpti == kptj
aosym = 's2'
nao_pair = nao*(nao+1)//2
if cell.dimension == 3:
vbar = fuse(mydf.auxbar(fused_cell))
ovlp = cell.pbc_intor('int1e_ovlp', hermi=1, kpts=adapted_kptjs)
ovlp = [lib.pack_tril(s) for s in ovlp]
else:
aosym = 's1'
nao_pair = nao**2
mem_now = lib.current_memory()[0]
log.debug2('memory = %s', mem_now)
max_memory = max(2000, mydf.max_memory-mem_now)
# nkptj for 3c-coulomb arrays plus 1 Lpq array
buflen = min(max(int(max_memory*.38e6/16/naux/(nkptj+1)), 1), nao_pair)
shranges = _guess_shell_ranges(cell, buflen, aosym)
buflen = max([x[2] for x in shranges])
# +1 for a pqkbuf
if aosym == 's2':
Gblksize = max(16, int(max_memory*.1e6/16/buflen/(nkptj+1)))
else:
Gblksize = max(16, int(max_memory*.2e6/16/buflen/(nkptj+1)))
Gblksize = min(Gblksize, ngrids, 16384)
pqkRbuf = numpy.empty(buflen*Gblksize)
pqkIbuf = numpy.empty(buflen*Gblksize)
# buf for ft_aopair
buf = numpy.empty((nkptj,buflen*Gblksize), dtype=numpy.complex128)
def pw_contract(istep, sh_range, j3cR, j3cI):
bstart, bend, ncol = sh_range
if aosym == 's2':
shls_slice = (bstart, bend, 0, bend)
else:
shls_slice = (bstart, bend, 0, cell.nbas)
for p0, p1 in lib.prange(0, ngrids, Gblksize):
dat = ft_ao._ft_aopair_kpts(cell, Gv[p0:p1], shls_slice, aosym,
b, gxyz[p0:p1], Gvbase, kpt,
adapted_kptjs, out=buf)
nG = p1 - p0
for k, ji in enumerate(adapted_ji_idx):
aoao = dat[k].reshape(nG,ncol)
pqkR = numpy.ndarray((ncol,nG), buffer=pqkRbuf)
pqkI = numpy.ndarray((ncol,nG), buffer=pqkIbuf)
pqkR[:] = aoao.real.T
pqkI[:] = aoao.imag.T
lib.dot(kLR[p0:p1].T, pqkR.T, -1, j3cR[k], 1)
lib.dot(kLI[p0:p1].T, pqkI.T, -1, j3cR[k], 1)
if not (is_zero(kpt) and gamma_point(adapted_kptjs[k])):
lib.dot(kLR[p0:p1].T, pqkI.T, -1, j3cI[k], 1)
lib.dot(kLI[p0:p1].T, pqkR.T, 1, j3cI[k], 1)
for k, ji in enumerate(adapted_ji_idx):
if is_zero(kpt) and gamma_point(adapted_kptjs[k]):
v = j3cR[k]
else:
v = j3cR[k] + j3cI[k] * 1j
if j2ctag == 'CD':
v = scipy.linalg.solve_triangular(j2c, v, lower=True, overwrite_b=True)
feri['j3c/%d/%d'%(ji,istep)] = v
else:
feri['j3c/%d/%d'%(ji,istep)] = lib.dot(j2c, v)
# low-dimension systems
if j2c_negative is not None:
feri['j3c-/%d/%d'%(ji,istep)] = lib.dot(j2c_negative, v)
with lib.call_in_background(pw_contract) as compute:
col1 = 0
for istep, sh_range in enumerate(shranges):
log.debug1('int3c2e [%d/%d], AO [%d:%d], ncol = %d', \
istep+1, len(shranges), *sh_range)
bstart, bend, ncol = sh_range
col0, col1 = col1, col1+ncol
j3cR = []
j3cI = []
for k, idx in enumerate(adapted_ji_idx):
v = [fswap['j3c-junk/%d/%d'%(idx,i)][0,col0:col1].T for i in range(nsegs)]
v = fuse(numpy.vstack(v))
if is_zero(kpt) and cell.dimension == 3:
for i in numpy.where(vbar != 0)[0]:
v[i] -= vbar[i] * ovlp[k][col0:col1]
j3cR.append(numpy.asarray(v.real, order='C'))
if is_zero(kpt) and gamma_point(adapted_kptjs[k]):
j3cI.append(None)
else:
j3cI.append(numpy.asarray(v.imag, order='C'))
v = None
compute(istep, sh_range, j3cR, j3cI)
for ji in adapted_ji_idx:
del(fswap['j3c-junk/%d'%ji])
# Wrapped around boundary and symmetry between k and -k can be used
# explicitly for the metric integrals. We consider this symmetry
# because it is used in the df_ao2mo module when contracting two 3-index
# integral tensors to the 4-index 2e integral tensor. If the symmetry
# related k-points are treated separately, the resultant 3-index tensors
# may have inconsistent dimension due to the numerial noise when handling
# linear dependency of j2c.
def conj_j2c(cholesky_j2c):
j2c, j2c_negative, j2ctag = cholesky_j2c
if j2c_negative is None:
return j2c.conj(), None, j2ctag
else:
return j2c.conj(), j2c_negative.conj(), j2ctag
a = cell.lattice_vectors() / (2*numpy.pi)
def kconserve_indices(kpt):
'''search which (kpts+kpt) satisfies momentum conservation'''
kdif = numpy.einsum('wx,ix->wi', a, uniq_kpts + kpt)
kdif_int = numpy.rint(kdif)
mask = numpy.einsum('wi->i', abs(kdif - kdif_int)) < KPT_DIFF_TOL
uniq_kptji_ids = numpy.where(mask)[0]
return uniq_kptji_ids
done = numpy.zeros(len(uniq_kpts), dtype=bool)
for k, kpt in enumerate(uniq_kpts):
if done[k]:
continue
log.debug1('Cholesky decomposition for j2c at kpt %s', k)
cholesky_j2c = cholesky_decomposed_metric(k)
# The k-point k' which has (k - k') * a = 2n pi. Metric integrals have the
# symmetry S = S
uniq_kptji_ids = kconserve_indices(-kpt)
log.debug1("Symmetry pattern (k - %s)*a= 2n pi", kpt)
log.debug1(" make_kpt for uniq_kptji_ids %s", uniq_kptji_ids)
for uniq_kptji_id in uniq_kptji_ids:
if not done[uniq_kptji_id]:
make_kpt(uniq_kptji_id, cholesky_j2c)
done[uniq_kptji_ids] = True
# The k-point k' which has (k + k') * a = 2n pi. Metric integrals have the
# symmetry S = S*
uniq_kptji_ids = kconserve_indices(kpt)
log.debug1("Symmetry pattern (k + %s)*a= 2n pi", kpt)
log.debug1(" make_kpt for %s", uniq_kptji_ids)
cholesky_j2c = conj_j2c(cholesky_j2c)
for uniq_kptji_id in uniq_kptji_ids:
if not done[uniq_kptji_id]:
make_kpt(uniq_kptji_id, cholesky_j2c)
done[uniq_kptji_ids] = True
feri.close()
# valence_exp = 1. are typically the Gaussians in the valence
VALENCE_EXP = getattr(__config__, 'pbc_df_mdf_valence_exp', 1.0)
def _mesh_for_valence(cell, valence_exp=VALENCE_EXP):
'''Energy cutoff estimation'''
precision = cell.precision * 10
Ecut_max = 0
for i in range(cell.nbas):
l = cell.bas_angular(i)
es = cell.bas_exp(i).copy()
es[es>valence_exp] = valence_exp
cs = abs(cell.bas_ctr_coeff(i)).max(axis=1)
ke_guess = gto.cell._estimate_ke_cutoff(es, l, cs, precision)
Ecut_max = max(Ecut_max, ke_guess.max())
mesh = tools.cutoff_to_mesh(cell.lattice_vectors(), Ecut_max)
mesh = numpy.min((mesh, cell.mesh), axis=0)
if cell.dimension < 2 or cell.low_dim_ft_type == 'inf_vacuum':
mesh[cell.dimension:] = cell.mesh[cell.dimension:]
return _round_off_to_odd_mesh(mesh)
del(VALENCE_EXP)
class MDF(df.DF):
'''Gaussian and planewaves mixed density fitting
'''
def __init__(self, cell, kpts=numpy.zeros((1,3))):
self.cell = cell
self.stdout = cell.stdout
self.verbose = cell.verbose
self.max_memory = cell.max_memory
self.kpts = kpts # default is gamma point
self.kpts_band = None
self._auxbasis = None
self.mesh = _mesh_for_valence(cell)
# In MDF, fitting PWs (self.mesh), and parameters eta and exp_to_discard
# are related to each other. The compensated function does not need to
# be very smooth. It just needs to be expanded by the specified PWs
# (self.mesh). self.eta is estimated on the fly based on the value of
# self.mesh.
self.eta = None
# Any functions which are more diffused than the compensated Gaussian
# are linearly dependent to the PWs. They can be removed from the
# auxiliary set without affecting the accuracy of MDF. exp_to_discard
# can be set to the value of self.eta
self.exp_to_discard = None
# The following attributes are not input options.
self.exxdiv = None # to mimic KRHF/KUHF object in function get_coulG
self.auxcell = None
self.blockdim = getattr(__config__, 'df_df_DF_blockdim', 240)
self.linear_dep_threshold = df.LINEAR_DEP_THR
self._j_only = False
# If _cderi_to_save is specified, the 3C-integral tensor will be saved in this file.
self._cderi_to_save = tempfile.NamedTemporaryFile(dir=lib.param.TMPDIR)
# If _cderi is specified, the 3C-integral tensor will be read from this file
self._cderi = None
self._keys = set(self.__dict__.keys())
@property
def eta(self):
if self._eta is not None:
return self._eta
else:
cell = self.cell
if cell.dimension == 0:
return 0.2
ke_cutoff = tools.mesh_to_cutoff(cell.lattice_vectors(), self.mesh)
ke_cutoff = ke_cutoff[:cell.dimension].min()
return aft.estimate_eta_for_ke_cutoff(cell, ke_cutoff, cell.precision)
@eta.setter
def eta(self, x):
self._eta = x
@property
def exp_to_discard(self):
if self._exp_to_discard is not None:
return self._exp_to_discard
else:
return self.eta
@exp_to_discard.setter
def exp_to_discard(self, x):
self._exp_to_discard = x
_make_j3c = _make_j3c
# Note: Special exxdiv by default should not be used for an arbitrary
# input density matrix. When the df object was used with the molecular
# post-HF code, get_jk was often called with an incomplete DM (e.g. the
# core DM in CASCI). An SCF level exxdiv treatment is inadequate for
# post-HF methods.
def get_jk(self, dm, hermi=1, kpts=None, kpts_band=None,
with_j=True, with_k=True, exxdiv=None):
if kpts is None:
if numpy.all(self.kpts == 0):
# Gamma-point calculation by default
kpts = numpy.zeros(3)
else:
kpts = self.kpts
kpts = numpy.asarray(kpts)
if kpts.shape == (3,):
return mdf_jk.get_jk(self, dm, hermi, kpts, kpts_band, with_j,
with_k, exxdiv)
vj = vk = None
if with_k:
vk = mdf_jk.get_k_kpts(self, dm, hermi, kpts, kpts_band, exxdiv)
if with_j:
vj = mdf_jk.get_j_kpts(self, dm, hermi, kpts, kpts_band)
return vj, vk
get_eri = get_ao_eri = mdf_ao2mo.get_eri
ao2mo = get_mo_eri = mdf_ao2mo.general
ao2mo_7d = mdf_ao2mo.ao2mo_7d
def update_mp(self):
pass
def update_cc(self):
pass
def update(self):
pass
################################################################################
# With this function to mimic the molecular DF.loop function, the pbc gamma
# point DF object can be used in the molecular code
def loop(self, blksize=None):
for dat in aft.AFTDF.loop(self, blksize):
yield dat
for dat in df.DF.loop(self, blksize):
yield dat
def get_naoaux(self):
return df.DF.get_naoaux(self) + aft.AFTDF.get_naoaux(self)
|
[
"osirpt.sun@gmail.com"
] |
osirpt.sun@gmail.com
|
e45a01330d9e90fa76dea147d9fc060e42d10c77
|
9044b440bed2b8407ed9e04f7fb9d3d2a7593136
|
/vision/classification/slim/image_models/finetune/train.py
|
b15420b6bf71de14a447e1b40980949e6c95830b
|
[] |
no_license
|
xuzhezhaozhao/ai
|
d4264f5d15cc5fa514e81adb06eb83731a0ca818
|
925cbd31ad79f8827e2c3c706f4b51910f9f85d1
|
refs/heads/master
| 2022-01-22T07:04:29.082590
| 2022-01-17T06:49:39
| 2022-01-17T06:49:39
| 136,691,051
| 5
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,638
|
py
|
#! /usr/bin/env python
# -*- coding=utf8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import input_data
import hook
import build_model_fn
def build_estimator(opts):
"""Build estimator."""
num_samples_per_epoch = len(input_data.read_txt_file(
opts.train_data_path, False))
save_checkpoints_secs = None
if opts.save_checkpoints_secs > 0:
save_checkpoints_secs = opts.save_checkpoints_secs
save_checkpoints_steps = None
if opts.save_checkpoints_steps > 0 and opts.save_checkpoints_epoches > 0:
raise ValueError("save_checkpoints_steps and save_checkpoints_epoches "
"should not be both set.")
if opts.save_checkpoints_steps > 0:
save_checkpoints_steps = opts.save_checkpoints_steps
if opts.save_checkpoints_epoches > 0:
save_checkpoints_steps = int(opts.save_checkpoints_epoches *
num_samples_per_epoch / opts.batch_size)
config_keys = {}
config_keys['model_dir'] = opts.model_dir
config_keys['tf_random_seed'] = None
config_keys['save_summary_steps'] = opts.save_summary_steps
config_keys['save_checkpoints_secs'] = save_checkpoints_secs
config_keys['save_checkpoints_steps'] = save_checkpoints_steps
config_keys['session_config'] = None
config_keys['keep_checkpoint_max'] = opts.keep_checkpoint_max
config_keys['keep_checkpoint_every_n_hours'] = 10000
config_keys['log_step_count_steps'] = opts.log_step_count_steps
estimator_keys = {}
estimator_keys['model_fn'] = build_model_fn.model_fn
estimator_keys['params'] = {
'opts': opts,
'num_samples_per_epoch': num_samples_per_epoch
}
config = tf.estimator.RunConfig(**config_keys)
estimator_keys['config'] = config
estimator = tf.estimator.Estimator(**estimator_keys)
return estimator
def create_hooks(opts):
"""Create profile hooks."""
save_steps = opts.profile_steps
meta_hook = hook.MetadataHook(save_steps=save_steps,
output_dir=opts.model_dir)
profile_hook = tf.train.ProfilerHook(save_steps=save_steps,
output_dir=opts.model_dir,
show_dataflow=True,
show_memory=True)
hooks = [meta_hook, profile_hook] if opts.use_profile_hook else []
return hooks
def train_and_eval_in_local_mode(opts, estimator, hooks):
"""Train and eval model in lcoal mode."""
build_train_input_fn = input_data.build_train_input_fn(
opts, opts.train_data_path)
build_eval_input_fn = input_data.build_eval_input_fn(
opts, opts.eval_data_path)
num_samples_per_epoch = len(
input_data.read_txt_file(opts.train_data_path, False))
num_steps_per_epoch = num_samples_per_epoch / opts.batch_size
if opts.max_train_steps > 0:
max_steps = opts.max_train_steps
else:
max_steps = opts.epoch*num_steps_per_epoch
tf.logging.info('max_steps = {}'.format(max_steps))
max_steps_without_decrease = int(
opts.max_epoches_without_decrease*num_steps_per_epoch)
early_stopping_min_steps = int(
opts.early_stopping_min_epoches*num_steps_per_epoch)
run_every_steps = int(
opts.early_stopping_run_every_epoches*num_steps_per_epoch)
early_stopping_hook = tf.contrib.estimator.stop_if_no_decrease_hook(
estimator, "loss",
max_steps_without_decrease=max_steps_without_decrease,
run_every_secs=None,
min_steps=early_stopping_min_steps,
run_every_steps=run_every_steps)
hooks.append(early_stopping_hook)
train_spec = tf.estimator.TrainSpec(
input_fn=build_train_input_fn,
max_steps=max_steps,
hooks=hooks)
eval_spec = tf.estimator.EvalSpec(
input_fn=build_eval_input_fn,
steps=None,
name='eval',
start_delay_secs=3,
throttle_secs=opts.throttle_secs)
result = tf.estimator.train_and_evaluate(estimator, train_spec, eval_spec)
return result
def export_model_in_local_mode(opts, estimator):
"""Export model in local mode."""
# export model
tf.logging.info("Beginning export model ...")
estimator.export_savedmodel(
opts.export_model_dir,
serving_input_receiver_fn=input_data.build_serving_input_fn(opts))
tf.logging.info("Export model OK")
def train(opts, export=False):
"""Train model."""
estimator = build_estimator(opts)
hooks = create_hooks(opts)
result = train_and_eval_in_local_mode(opts, estimator, hooks)
if export:
export_model_in_local_mode(opts, estimator)
return result
def predict(opts):
tf.logging.info("Begin predict ...")
estimator = build_estimator(opts)
build_predict_input_fn = input_data.build_predict_input_fn(
opts, opts.predict_data_path)
checkpoint_path = opts.predict_checkpoint_path
if tf.gfile.IsDirectory(opts.predict_checkpoint_path):
checkpoint_path = tf.train.latest_checkpoint(checkpoint_path)
results = estimator.predict(
input_fn=build_predict_input_fn,
checkpoint_path=checkpoint_path,
yield_single_examples=True)
with open(opts.predict_output, 'w') as fout, \
open(opts.predict_data_path, 'r') as fin:
for result in results:
src = fin.readline().strip()
fout.write(src + ' ')
fout.write(str(result['score'][1]) + '\n')
tf.logging.info("Predict done")
|
[
"zhezhaoxu@tencent.com"
] |
zhezhaoxu@tencent.com
|
422c8872c99d21c97cee55dbcd14f7f7444269bc
|
8e3faa9ffc472918a368baf08baab0fdda818aa7
|
/numpy/basic.py
|
3e02c6ae932f4dae0f25316f711d743d0042af0f
|
[] |
no_license
|
seasea4/test
|
beb74151046ba2da4204103eb47645fdcba42c65
|
0f3b1d6c5a6a5e8eefce24fc92ed825eb8d2e043
|
refs/heads/master
| 2021-08-28T10:52:38.441418
| 2017-12-12T01:54:56
| 2017-12-12T01:54:56
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 205
|
py
|
import numpy as np
a = np.array([[1,2,3],[4,5,6],[7,8,9]])
print(a)
print(a.flags)
print("dimension" , a.ndim)
print("要素数", a.size)
print("shape", a.shape)
print("要素のデータ型", a.dtype)
|
[
"platong@ocean.local"
] |
platong@ocean.local
|
ff0b74594ba1af28c3b2e0dc7d13cc69e7044588
|
247c7e157b08d14fb64f69fc1cde6c0da6dc3e24
|
/pytrace/reader/trace_walker.py
|
9ad35f38fb6e142e87da200e2fed539f49fc8eaf
|
[] |
no_license
|
cloax/pytrace
|
6212722231668e0caa31bf97167836f2c74afc68
|
fc9f2057c8a50e95cb80ece3ef28029639bdd82d
|
refs/heads/master
| 2021-01-15T22:52:46.629506
| 2012-08-16T08:07:16
| 2012-08-16T08:07:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,885
|
py
|
import time
import urwid
from .tables import DB
TIME_FORMAT = "%Y/%m/%d %H:%M:%S"
def prettify(trace):
time_str = time.strftime(TIME_FORMAT + ",{:.6f}".format(trace.time - int(trace.time)),
time.localtime(trace.time))
func_prefix = (trace.depth + 3) * ' ' + ('--> ' if trace.type == 'call' else ' <-- ')
args = sum([[('name', arg.name.value),
' = ',
('type', arg.type.value),
': ',
('value', arg.value.value),
(', ')] for arg in trace.args], [])
if args:
args.pop()
return urwid.Columns([('fixed', 28, urwid.Text(('time', time_str))),
('fixed', 10, urwid.Text(('tid', str(trace.tid)))),
('fixed', 24, urwid.Text(('module', trace.func.module.value), wrap='clip', align='right')),
urwid.Text([func_prefix, ('func', trace.func.name), '('] + args + [')'], wrap='clip')],
dividechars=1)
class TraceWalker(object):
CACHE_SIZE = 500 # records
def __init__(self, prepare_cb=lambda x: x):
self.prepare_cb = prepare_cb
self._filter = None
self.db = DB()
self.refresh_length()
self._fetch(0, self.CACHE_SIZE)
self.end_index = min(self.CACHE_SIZE, len(self))
def set_prepare_callback(self, prepare_cb):
self.prepare_cb = prepare_cb
self.cache = map(self.prepare_cb, self.cache)
def set_filter(self, filter=None):
self._filter = filter
self.refresh_length()
self._fetch(0, self.CACHE_SIZE)
def refresh_length(self):
self.length = self.db.count(self._filter)
def _prepare(self, trace):
if trace.type == 'overflow':
return urwid.Columns([urwid.Text('Traces lost. Consider excluding hot modules or functions.')])
return self.prepare_cb(prettify(trace))
def _fetch(self, start, end):
self.cache = map(self._prepare, self.db.find(start, end, self._filter))
self.start_index = start
self.end_index = end
def __len__(self):
return self.length
def __getitem__(self, i):
if not (self.start_index <= i < self.end_index):
if i == self.end_index:
end = min(self.end_index + self.CACHE_SIZE / 2, self.length)
start = max(end - self.CACHE_SIZE, 0)
self.cache = self.cache[-(self.CACHE_SIZE / 2):] + map(self._prepare, self.db.find(start + self.CACHE_SIZE / 2, end, self._filter))
self.start_index = start
self.end_index = end
else:
self._fetch(max(0, i - (self.CACHE_SIZE / 2)),
min(self.length, i + (self.CACHE_SIZE / 2)))
return self.cache[i - self.start_index]
|
[
"alonho@gmail.com"
] |
alonho@gmail.com
|
4dc4f06dcd7437e04f8a1ff2544e56c38996a789
|
43f5b8676f7666164a21be0a00aef6b93fb32c82
|
/Load Balancer/Balanceador/pack_analysis.py
|
b47e4bd8be6f53184d0b610f92c385f7aa0998f5
|
[] |
no_license
|
ericrm24/showcase
|
efe5d22ffc681286990f8e3303941470d454d633
|
f1c9682a944c2e2012bf6ca768b23f1d575dfa8c
|
refs/heads/master
| 2023-03-18T08:36:59.124840
| 2021-03-07T18:38:23
| 2021-03-07T18:38:23
| 345,256,029
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,776
|
py
|
# Tomado de http://www.bitforestinfo.com/2017/01/how-to-write-simple-packet-sniffer.html
import socket
import struct
import binascii
class unpack:
def __cinit__(self):
self.data=None
# Ethernet Header
def eth_header(self, data):
storeobj=data
storeobj=struct.unpack("!6s6sH",storeobj)
destination_mac=binascii.hexlify(storeobj[0])
source_mac=binascii.hexlify(storeobj[1])
eth_protocol=storeobj[2]
data={"Destination Mac":destination_mac,
"Source Mac":source_mac,
"Protocol":eth_protocol}
return data
# ICMP HEADER Extraction
def icmp_header(self, data):
icmph=struct.unpack('!BBH', data)
icmp_type = icmph[0]
code = icmph[1]
checksum = icmph[2]
data={'ICMP Type':icmp_type,
"Code":code,
"CheckSum":checksum}
return data
# UDP Header Extraction
def udp_header(self, data):
storeobj=struct.unpack('!HHHH', data)
source_port = storeobj[0]
dest_port = storeobj[1]
length = storeobj[2]
checksum = storeobj[3]
data={"Source Port":source_port,
"Destination Port":dest_port,
"Length":length,
"CheckSum":checksum}
return data
# IP Header Extraction
def ip_header(self, data):
storeobj=struct.unpack("!BBHHHBBH4s4s", data)
_version=storeobj[0]
_tos=storeobj[1]
_total_length =storeobj[2]
_identification =storeobj[3]
_fragment_Offset =storeobj[4]
_ttl =storeobj[5]
_protocol =storeobj[6]
_header_checksum =storeobj[7]
_source_address =socket.inet_ntoa(storeobj[8])
_destination_address =socket.inet_ntoa(storeobj[9])
data={'Version':_version,
"Tos":_tos,
"Total Length":_total_length,
"Identification":_identification,
"Fragment":_fragment_Offset,
"TTL":_ttl,
"Protocol":_protocol,
"Header CheckSum":_header_checksum,
"Source Address":_source_address,
"Destination Address":_destination_address}
return data
# Tcp Header Extraction
def tcp_header(self, data):
storeobj=struct.unpack('!HHLLBBHHH',data)
_source_port =storeobj[0]
_destination_port =storeobj[1]
_sequence_number =storeobj[2]
_acknowledge_number =storeobj[3]
_offset_reserved =storeobj[4]
_tcp_flag =storeobj[5]
_window =storeobj[6]
_checksum =storeobj[7]
_urgent_pointer =storeobj[8]
data={"Source Port":_source_port,
"Destination Port":_destination_port,
"Sequence Number":_sequence_number,
"Acknowledge Number":_acknowledge_number,
"Offset & Reserved":_offset_reserved,
"Tcp Flag":_tcp_flag,
"Window":_window,
"CheckSum":_checksum,
"Urgent Pointer":_urgent_pointer
}
return data
# Mac Address Formating
def mac_formater(a):
b = "%.2x:%.2x:%.2x:%.2x:%.2x:%.2x" % (ord(a[0]), ord(a[1]), ord(a[2]), ord(a[3]), ord(a[4]) , ord(a[5]))
return b
# Get Host Address
def get_host(q):
try:
k=socket.gethostbyaddr(q)
except:
k='Unknown'
return k
|
[
"eric.rios@ucr.ac.cr"
] |
eric.rios@ucr.ac.cr
|
d398a866182a6242b7f52dfb076bc0db0fe74ef4
|
534aa8162cb988278eea3f41ce37c9bd36dfbb45
|
/mysite/urls.py
|
69ba143f5627b2bf90d1e2806c034d010e628131
|
[] |
no_license
|
Artemchik98/cook_book_vs_9_30
|
ac5de23a6f58276dbbcf707524e7bd23da6e6969
|
c9a5abe6f5b07439d72a4f943d1b920959ce2f74
|
refs/heads/master
| 2023-06-01T19:12:52.613553
| 2021-06-20T08:28:56
| 2021-06-20T08:28:56
| 356,616,025
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 951
|
py
|
"""mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('',include('blog.urls',namespace='blog')),
]+static( settings.MEDIA_URL, document_root = settings.MEDIA_ROOT )
|
[
"artem.svistelnik@gmail.com"
] |
artem.svistelnik@gmail.com
|
cf895f73715b3636d625dbe313c3f6691fe2b0e4
|
ef3d00b8665e75c57ac4c3c933835a8237b3b979
|
/1.py
|
547bd98fc48a6563832902da4fc314f324521263
|
[] |
no_license
|
andywei513/python_project_andy1
|
f12610f0da2913a09b4607659cbe558201f60d16
|
0ec1972affb13570ea9192fb3367c3d4c45cb78c
|
refs/heads/master
| 2020-03-08T03:42:08.229297
| 2018-04-04T10:26:04
| 2018-04-04T10:26:04
| 127,898,426
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 251
|
py
|
# -*- coding:utf-8 -*-
# python project for andy
# Author:'ahuang3'
# Time: 2018/4/3, 13:02
import numpy as np
import matplotlib.pyplot as plt
x = np.random.rand(1,10)
y = np.random.randn(1,10)
plt.plot(x)
plt.plot(y)
plt.show()
print (x)
#TODO
|
[
"andy.huang@morningstar.com"
] |
andy.huang@morningstar.com
|
27d7feffa6fc2ef4d11f58d87c17bf40d8a87a90
|
22b409e532df1ece6d509e9d7a3369f087038c0e
|
/service-3/tests/test_app.py
|
ef4b533b28d18823bb1863b46baaafd16870fd44
|
[] |
no_license
|
BMustafa97/Project2QAApp
|
d56096414a5734af0763a8209b82a0927f76b822
|
3714a1e9fb6606bbbb1a4d6680f42ff57c52930d
|
refs/heads/main
| 2023-03-14T04:16:02.267373
| 2021-03-07T20:00:04
| 2021-03-07T20:00:04
| 341,344,821
| 1
| 0
| null | 2021-03-02T11:29:06
| 2021-02-22T21:39:34
|
Python
|
UTF-8
|
Python
| false
| false
| 796
|
py
|
import sys
from app import app
import unittest
from flask import url_for
from flask_testing import TestCase
from unittest.mock import patch
class TestBase(TestCase):
def create_app(self):
app.config['SQLALCHEMY_DATABASE_URI'] ="sqlite:///testdb.sqlite"
return app
# Pass in configurations for test database
#class TestPages(TestBase):
# def test_home(self):
# response = self.client.get(url_for('slogans'))
# self.assertEqual(response.status_code, 200)
class TestCase(TestBase):
def test_get(self):
with patch("requests.get") as g:
g.return_value.text = "We're always on your mind."
response = self.client.get(url_for("sloganstest"))
self.assertIn(b"We're always on your mind.", response.data)
|
[
"bilalmustafa1797@gmail.com"
] |
bilalmustafa1797@gmail.com
|
8fdb4d9c7e1167bf5251de6dd6debabbebd1071f
|
09b42f77ffc0dfde5febc9d72d0df70cdebd2d56
|
/landlab/grid/tests/test_raster_funcs/test_is_on_grid.py
|
d0f2d7c9bed16095103cdc71c4adb1ad630e1730
|
[
"MIT"
] |
permissive
|
srblack72/landlab
|
44b26f85014c2a9993f2997386156978bf1fe7f6
|
55ccf7f899961b5bf495d1b077a699e2a97ebfb1
|
refs/heads/master
| 2021-01-16T21:31:00.699587
| 2014-05-22T20:34:47
| 2014-05-22T20:34:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,408
|
py
|
import numpy as np
from numpy.testing import assert_array_equal
from nose import with_setup
from nose.tools import (assert_is, assert_equal, assert_is_instance,
assert_raises)
from landlab.grid import raster_funcs as rfuncs
from landlab import RasterModelGrid
def test_with_arrays():
rmg = RasterModelGrid(4, 5, dx=2.0)
coords = (np.array([1., -1.]), np.array([1., -1.]))
assert_array_equal(rfuncs.is_coord_on_grid(rmg, coords),
np.array([True, False]))
def test_just_inside():
rmg = RasterModelGrid(4, 5, dx=2.0)
assert_equal(rfuncs.is_coord_on_grid(rmg, (0., 4.)), True)
assert_equal(rfuncs.is_coord_on_grid(rmg, (8. - 1e-12, 4.)), True)
assert_equal(rfuncs.is_coord_on_grid(rmg, (3., 0.)), True)
assert_equal(rfuncs.is_coord_on_grid(rmg, (3., 6. - 1e-12)), True)
def test_just_outside():
rmg = RasterModelGrid(4, 5, dx=2.0)
assert_equal(rfuncs.is_coord_on_grid(rmg, (0. - 1e-12, 4.)), False)
assert_equal(rfuncs.is_coord_on_grid(rmg, (8., 4.)), False)
assert_equal(rfuncs.is_coord_on_grid(rmg, (3., 0. - 1e-12)), False)
assert_equal(rfuncs.is_coord_on_grid(rmg, (3., 6.)), False)
def test_just_x():
rmg = RasterModelGrid(4, 5, dx=2.0)
assert_equal(rfuncs.is_coord_on_grid(rmg, (4., 1.e6), axes=(1, )), True)
assert_equal(rfuncs.is_coord_on_grid(rmg, (-1., 1.), axes=(1, )), False)
|
[
"huttone@093548ea-dc74-4ebb-8037-4be9f23db00a"
] |
huttone@093548ea-dc74-4ebb-8037-4be9f23db00a
|
b7710b0c72571ba7df963198d2e9e46d1fbcc2be
|
99aa48b929961a3e8ac238a1f51ce0b2499af2b7
|
/exercises/dominoes/example.py
|
031034f28ce3111e6846aae43160f6fd2ddd5c5b
|
[
"MIT",
"Python-2.0"
] |
permissive
|
JodieHaywood/python-6
|
b4dfbadcddd97d81e8bcb16b315395610f7fb8be
|
b71bfb6f82fb43d49752635d89eab843cb627e4c
|
refs/heads/master
| 2020-03-30T13:24:16.929668
| 2018-10-02T12:45:25
| 2018-10-02T12:45:25
| 151,271,387
| 1
| 0
|
MIT
| 2018-10-02T14:46:03
| 2018-10-02T14:46:02
| null |
UTF-8
|
Python
| false
| false
| 814
|
py
|
from itertools import permutations
from functools import reduce
def swap(a, b):
return (b, a)
def build_chain(chain, domino):
if chain is not None:
last = chain[-1]
if len(chain) == 1 and last[0] == domino[0]:
return [swap(*last), domino]
elif len(chain) == 1 and last[0] == domino[1]:
return [swap(*last), swap(*domino)]
elif last[1] == domino[0]:
return chain + [domino]
elif last[1] == domino[1]:
return chain + [swap(*domino)]
return None
def chain(dominoes):
if not any(dominoes):
return []
for perm in permutations(dominoes):
chain = reduce(build_chain, perm[1:], [perm[0]])
if chain is not None and chain[0][0] == chain[-1][1]:
return chain
return None
|
[
"nathan.parsons@warwick.ac.uk"
] |
nathan.parsons@warwick.ac.uk
|
944dd21d731631667b2b61b7df4bbb9c9272ea4d
|
f0d6efe035d4c2ed1ea6bb6d1d5a613b8630a025
|
/lib/jsonrpc/flexjsonrpc/__init__.py
|
53ece394443611d381a3d2a3a98aed5682669d8f
|
[
"BSD-2-Clause-Views",
"BSD-3-Clause"
] |
permissive
|
bemoss/BEMOSS3.5
|
d24c1c5587e5081092cc97250db45645363da4e4
|
75a09bc5d0a2ec0ae994ac900a93dc027b527860
|
refs/heads/master
| 2021-08-15T23:05:40.661118
| 2021-03-29T20:28:14
| 2021-03-29T20:28:14
| 91,000,462
| 81
| 38
|
NOASSERTION
| 2021-03-29T20:29:54
| 2017-05-11T16:25:43
|
Python
|
UTF-8
|
Python
| false
| false
| 2,921
|
py
|
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2013, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
#}}}
from core import *
|
[
"aribemoss@gmail.com"
] |
aribemoss@gmail.com
|
b2595d9eccaf22427e7e16962a002d011843363f
|
c2df9e04adec78e789d1fbdb0711c45e5b9263a7
|
/venv/Lib/site-packages/matplotlib/tests/test_texmanager.py
|
d24f7dc27a562a23298a3978078f1dbbcabf9e93
|
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
AdarshSai/Final_Project
|
433009a2f416e894ee3be85cd9317cb8e8df5516
|
f966834ca72dd232102ed500ef47ef2b3bdbed5b
|
refs/heads/main
| 2023-01-23T12:21:41.342074
| 2020-11-19T22:24:15
| 2020-11-19T22:24:15
| 308,898,012
| 0
| 1
|
MIT
| 2020-11-19T22:24:17
| 2020-10-31T14:19:58
|
Python
|
UTF-8
|
Python
| false
| false
| 475
|
py
|
import matplotlib.pyplot as plt
from matplotlib.texmanager import TexManager
def test_fontconfig_preamble():
"""
Test that the preamble is included in _fontconfig
"""
plt.rcParams['text.usetex'] = True
tm1 = TexManager()
font_config1 = tm1.get_font_config()
plt.rcParams['text.latex.preamble'] = '\\usepackage{txfonts}'
tm2 = TexManager()
font_config2 = tm2.get_font_config()
assert font_config1 != font_config2
|
[
"adarshsaig@gmail.com"
] |
adarshsaig@gmail.com
|
817789a1e9c9374b23fe9c891aab32d72d899bf4
|
6e846afa2c76c3430ed5d5cee3f331b704bc644a
|
/ex20.py
|
603f722df10fe90bd9f9dce33e08b22cc2120f9f
|
[] |
no_license
|
EmilyY123/learnPythonTheHardWay
|
486c0e55163c2666568bb6f721865e0f63174cc8
|
b8d88b0f28590a3194af9fa45fef0dbc796d70f5
|
refs/heads/master
| 2016-09-05T23:58:55.160329
| 2015-04-26T23:28:47
| 2015-04-26T23:28:47
| 33,806,702
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 889
|
py
|
## functions and files
from sys import argv
script, input_file = argv
def print_all(f):
print f.read()
def rewind(f):
f.seek(0)
def print_a_line(line_count,f):
print line_count, f.readline()
current_file = open(input_file)
print "First let's print the whole file:\n"
print_all(current_file)
print "Now let's rewind, kind of like a tape."
rewind(current_file)
## if not rewind file, i.e. current_file(0), set the pointer at the file to the begining of the file, it will at the end of the file. then when try to print the three lines, as following, it will not give any result.
print "Let's print three lines:"
current_line = 1
print_a_line(current_line, current_file)
current_line += current_line
#current_line = current_line + 1
print_a_line(current_line, current_file)
current_line += current_line
#current_line = current_line + 1
print_a_line(current_line, current_file)
|
[
"leonfliu@gmail.com"
] |
leonfliu@gmail.com
|
6130db32bea26cc4fd7d4373d721ab934c68b46b
|
75eefd794c83d56f113a615d68d11c03f05375d2
|
/0x0F-python-object_relational_mapping/13-model_state_delete_a.py
|
82a2613f4284591ba9231fe4ad163087930d38dc
|
[] |
no_license
|
andresjjn/holbertonschool-higher_level_programming
|
681229f48742a58198682d84124afd0cb46d739d
|
623b1e016629c0d3e7770535792ef545343a5013
|
refs/heads/master
| 2022-12-19T02:19:33.921961
| 2020-09-25T00:18:22
| 2020-09-25T00:18:22
| 259,374,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 684
|
py
|
#!/usr/bin/python3
"""Script that lists all State objects from the database hbtn_0e_6_usa"""
import sys
from model_state import Base, State
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format
(sys.argv[1], sys.argv[2], sys.argv[3]),
pool_pre_ping=True)
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
session = Session()
states = session.query(State).filter(State.name.contains('a'))\
.delete(synchronize_session=False)
session.commit()
session.close()
|
[
"andresjt93@gmail.com"
] |
andresjt93@gmail.com
|
de387f75b9153d81353f74324c32842675a55b8c
|
888e79392cb660be5799cc5bd25d76bcfa9e2e2c
|
/doctorus/doctorus/doctype/actividad/test_actividad.py
|
64e868e23ae78de54542b56cedaaeb515a1bd9a4
|
[
"MIT"
] |
permissive
|
Nirchains/doctorus
|
269eadee5754612c521d1c6193d5fe7bbfdb3b8a
|
38d39270742dfdae6597a06713952df01a2c3e9d
|
refs/heads/master
| 2020-03-17T07:09:30.046005
| 2019-05-08T06:51:50
| 2019-05-08T06:51:50
| 133,386,354
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 216
|
py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018, HISPALIS DIGITAL and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestActividad(unittest.TestCase):
pass
|
[
"nirchains@gmail.com"
] |
nirchains@gmail.com
|
866e74750ea247f7bbe24203f017bd1d800b882d
|
f4fe0733d433cb650869cc502b58a9a81d657306
|
/section03/02-reference.py
|
91a0e465d3b41ff3490436ceb127203c2ab2dcc8
|
[] |
no_license
|
jj-a/basicPython
|
c14bd1331bba1e0b3f2eee40053cbf15aab83dd2
|
50758d6faa95409d6a1d3f3cc183a142cc1a6312
|
refs/heads/master
| 2020-04-26T16:30:59.322970
| 2019-03-13T09:14:10
| 2019-03-13T09:14:10
| 173,681,661
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,006
|
py
|
# section03 / 02-reference.py
# 값에 의한 호출 (Call by Value) : 정수, 실수, 논리값, 문자열
# 객체 참조에 의한 호출 (Call by Object Reference): 리스트, 튜플, 딕셔너리
# Call by Object Reference = Call by Sharing
# Call by Object Reference는 자바의 Call by Reference와 다름 ***
a = 100
b = 200
print(a, b)
# 객체의 참조 (list, dictionary, tuple)
foo = [1, 2, 3]
bar = foo
print(foo, bar)
foo = [4, 5, 6]
print(foo, bar)
bar = [7, 8, 9]
print(foo, bar)
# 참조된 복사본을 변경하면 원본도 함께 변경 (리스트, 딕셔너리) ->?????????
# 리스트 복사 방법 1
bar = [1, 2, 3]
cp1 = [0, 0, 0]
cp1[0] = bar[0]
cp1[1] = bar[1]
cp1[2] = bar[2]
print(bar, cp1)
cp1[2] = 1000
print(bar, cp1)
# 원본에 영향 없이 리스트, 딕셔너리 객체를 복사하고자 하는 경우
# 슬라이싱
cp2 = bar[:]
print(bar, cp2)
cp2[1] = 12345
print(bar, cp2)
# 함수
cp3 = bar.copy()
cp3[0] = 789
print(bar, cp3) # 원본에 영향없음
|
[
"jxxbom@gmail.com"
] |
jxxbom@gmail.com
|
c61e108e71fa3ecad09dfae82bc33ff67ac2e895
|
555d5a4ab56fcb5c7d1b87697266b33bf019f69b
|
/functions.py
|
66ba7fbe64944f5db6b9fb9d28601a25b26ac9e1
|
[] |
no_license
|
bencyn/python_level_one
|
beaa3db4353430ed2f96023f65f99265004e1175
|
bddcc9018eb56bda57500dcf7fe097663b4097db
|
refs/heads/master
| 2020-04-03T04:31:16.243095
| 2018-10-27T22:49:33
| 2018-10-27T22:49:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 370
|
py
|
def my_func(param1='default'):
"""
THIS IS THE DOCSTRING
asdf
asdf
asdf
"""
print("my first python function! {}".format(param1))
my_func(param1='bencyn')
# types
def addNum(num1,num2):
if type(num1)==type(num2)==type(10):
return num1+num2
else:
return "Sorry, I need integers!"
result = addNum("2",'3')
print(result)
|
[
"bensonnjung39@gmail.com"
] |
bensonnjung39@gmail.com
|
3af1b9068b5b218404c88d47ba2e3c366b0626ec
|
fb333187da3b193f5f6d4920291b30049155ba90
|
/firebreak/game.py
|
6cdc24e8d86b89af87b42eeaa22fe2da3444b5ee
|
[] |
no_license
|
rseldon/firebreak
|
5549649b37187937d0f95d683027acf11a26113f
|
7fe0e011b28bd75e9b8343dc5a704766f5cf1ce1
|
refs/heads/master
| 2020-03-10T23:43:57.202754
| 2019-02-21T13:09:16
| 2019-03-02T02:47:29
| 129,647,002
| 0
| 0
| null | 2019-03-02T02:52:43
| 2018-04-15T20:16:27
|
Python
|
UTF-8
|
Python
| false
| false
| 2,181
|
py
|
#!/usr/bin/env python3
from collections import namedtuple
from enum import Enum
from random import shuffle
class Color(Enum):
RED = 91
GREEN = 92
BLUE = 93
YELLOW = 94
WHITE = 95
RAINBOW = 96
Card = namedtuple('Card', ['color', 'rank'])
DEFAULT_COLORS = {Color.RED, Color.BLUE, Color.GREEN, Color.YELLOW, Color.WHITE}
DEFAULT_COLOR_COMPOSITION = {
1: 3,
2: 2,
3: 2,
4: 2,
5: 1,
}
class Board:
def __init__(self, deck=None, max_clues=8, bombs=3):
self.deck = deck or Deck()
self.max_clues = max_clues
self.num_clues = max_clues
self.num_bombs = bombs
self.discards = {}
self.zones = {color: 0 for color in self.deck.colors}
def discard(self, card):
self.discards[card] = self.discards.get(card, 0) + 1
self.num_clues = min(self.max_clues, self.num_clues + 1)
def play_card(self, card):
if self.zones[card.color] == card.rank - 1:
self.zones[card.color] += 1
if card.rank == 5:
self.num_clues = min(self.max_clues, self.num_clues + 1)
else:
self.num_bombs -= 1
self.discards[card] = self.discards.get(card, 0) + 1
def give_clue(self):
assert self.num_clues > 0, "Tried to give a clue when there were no available clue tokens!"
self.num_clues -= 1
class Deck:
def __init__(self, colors=DEFAULT_COLORS, color_composition=DEFAULT_COLOR_COMPOSITION):
self.colors = colors
self.cards_remaining = 0
self.card_counts = {}
for color in self.colors:
for rank, count in color_composition.items():
self.card_counts[Card(color, rank)] = count
self.cards_remaining += count
self.__shuffled = [card_id for card_id, count in self.card_counts.items() for i in range(count)]
shuffle(self.__shuffled)
def draw_card(self):
assert self.cards_remaining > 0, "Tried to draw from an empty deck."
card_to_deal = self.__shuffled[self.cards_remaining - 1]
self.card_counts[card_to_deal] -= 1
self.cards_remaining -= 1
return card_to_deal
|
[
"ryan.seldon@gmail.com"
] |
ryan.seldon@gmail.com
|
f6486ce6ff06211908b6be8003c1460abe1d9e0d
|
5055f9df409767783de33a020e3eb1a5c378e429
|
/looping/loop2.py
|
f74add30f4846df3bbc64bd5d65b6c52a142f851
|
[] |
no_license
|
andrewsokolovski/Python-learning
|
5c981886898039eb57e61f38cf892bc23644a586
|
b6e530a5bafe03e79222ebf0e41a7fe18fca4881
|
refs/heads/master
| 2023-07-08T20:19:06.073362
| 2021-08-19T09:17:03
| 2021-08-19T09:17:03
| 300,217,166
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 107
|
py
|
import turtle
turtle.shape('turtle')
for i in range(0, 5):
turtle.forward(100)
turtle.right(72)
|
[
"72194996+andrewsokolovski@users.noreply.github.com"
] |
72194996+andrewsokolovski@users.noreply.github.com
|
86e6438502f84c5aafa15800b1dd4a1b2feba977
|
9c4f4b4b7dd63d1d8c2eb4f2587e2a5bbae4bcdc
|
/tests/test_secuencia_valida.py
|
ff4b8932e6b63d79e3a64c6c727c7c0c4605b474
|
[] |
no_license
|
luciomondelli/CuatroEnLinea
|
806e72f00654269cc7e6b15a01bed6561c18cb16
|
a24b5fe40c5a583563d9c71d95e9cb1fb913e28c
|
refs/heads/main
| 2023-06-14T07:45:18.246115
| 2021-07-01T16:37:02
| 2021-07-01T16:37:02
| 358,373,072
| 0
| 0
| null | 2021-05-24T22:32:21
| 2021-04-15T19:39:41
|
Python
|
UTF-8
|
Python
| false
| false
| 148
|
py
|
from src.CuatroEnLinea import columnaValida
def test_columna_Valida():
secuencia = [1, 2, 6, 2, 5, 4, 2]
assert columnaValida(secuencia) == True
|
[
"luciomondelli123@gmail.com"
] |
luciomondelli123@gmail.com
|
c9eba986ef40c22a55a583905c95b5f619a319a1
|
c9558831ad86603f0efc27b25b05706984a3338e
|
/calculate(distance,no_of_passengers).py
|
9b9bf9f48af9851ad39efb1ff290d9bba457ab40
|
[] |
no_license
|
ygohil2350/infytq-Python_Fandamentals-
|
b7c3bcf3cfbfb9041fafe824454329d5ed3ecd96
|
82d69aecf479ea62139095f74cea56ed6f07989a
|
refs/heads/main
| 2023-03-26T04:19:26.405065
| 2021-03-19T08:01:27
| 2021-03-19T08:01:27
| 346,596,252
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 431
|
py
|
#lex_auth_012693816779112448160
def calculate(distance,no_of_passengers):
#Remove pass and write your logic here
cost=0
cost=70*distance/10
rest=0
rest=no_of_passengers*80
if cost<=rest:
return abs(cost - rest)
else:
return -1
#Provide different values for distance, no_of_passenger and test your program
distance=20
no_of_passengers=50
print(calculate(distance,no_of_passengers))
|
[
"noreply@github.com"
] |
ygohil2350.noreply@github.com
|
5bf9421e0c8fc17eaca5fbf2da651488df0f890b
|
441244ceae643308d3ec61ebeef84367bc8e7007
|
/3_1.py
|
7e6cb032552c630e44c61a3016a86f1b3adc6a24
|
[] |
no_license
|
SealSiil/AdventOfCode2018
|
f0fed698a35027b4ba1294e09b8cf913e8d0428f
|
1e4e3306f1d1f8a1f35f120fa430bdd1efb5e1f6
|
refs/heads/master
| 2020-04-09T11:12:31.099286
| 2018-12-11T20:13:38
| 2018-12-11T20:13:38
| 160,299,812
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 912
|
py
|
import numpy as np
import re
def cleaning(data):
match = pattern.match(data)
Claims[int(match.group(1))] = ([int(match.group(2)),int(match.group(3))], [int(match.group(4)),int(match.group(5))])
def stake(marker, edgePos, edgeSize):
for w in range((edgePos[0] + 1),(edgePos[0] + 1 + edgeSize[0])) :
for h in range((edgePos[1] + 1),(edgePos[1] + 1 + edgeSize[1])):
if fabric[w,h] == 0:
fabric[w,h] = marker
else:
fabric[w,h] = -1
with open("Input3_1.txt") as f:
lines = f.readlines()
fabric = np.zeros((2000,2000))
Claims = {}
pattern = r"[#](\d*)\s[@]\s(\d*)[,](\d*)[:]\s(\d*)[x](\d*)\s*"
pattern = re.compile(pattern)
for line in lines:
cleaning(line)
for key in Claims.keys():
marker = key
edgePos, edgeSize = Claims[key]
stake(marker, edgePos, edgeSize)
print(np.count_nonzero(fabric == -1))
|
[
"jason.a.siil@gmail.com"
] |
jason.a.siil@gmail.com
|
138aa5edcea9579e2c34ec3974387a021d223d93
|
4103dc87a5309946920e964a37cce2dd637bcdfe
|
/mindelay/association/__init__.py
|
9261e8cb30e2b91d168d9d61c4cd7e2174e5e8cc
|
[] |
no_license
|
donglao/mindelay
|
8415e465ad04a43cf272f5a6eb2581ca710c61ac
|
0c97edf872f470943f90c9a4dede19632f86fee0
|
refs/heads/master
| 2020-07-01T09:16:33.378687
| 2019-10-03T17:34:55
| 2019-10-03T17:34:55
| 201,123,622
| 31
| 7
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 40
|
py
|
# association
from .update import update
|
[
"noreply@github.com"
] |
donglao.noreply@github.com
|
d06c40ecaf072a5bad0a3bfcdf2cff9f0960317d
|
ccb4cb8358fb896a88bbf0c6771462d898d7a492
|
/examples/goce_reentry_chart.py
|
decf8f0416fb3a95317f8d7eb65579f41c578074
|
[
"MIT"
] |
permissive
|
skyfielders/python-skyfield
|
a30d34a680dcd285bc8cd39cedc2629f792d5821
|
61fb6324e312715e20aa75ec24dc87286442be1a
|
refs/heads/master
| 2023-08-31T13:10:32.863587
| 2023-08-10T14:25:56
| 2023-08-10T14:25:56
| 7,924,113
| 1,040
| 204
|
MIT
| 2023-08-28T19:44:50
| 2013-01-30T21:19:21
|
Python
|
UTF-8
|
Python
| false
| false
| 2,026
|
py
|
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.dates import HourLocator, DateFormatter
from skyfield.api import load, EarthSatellite
# Labels for both date and hour on the x axis, and km on y.
def label_dates_and_hours(axes):
axes.xaxis.set_major_locator(HourLocator([0]))
axes.xaxis.set_minor_locator(HourLocator([0, 6, 12, 18]))
axes.xaxis.set_major_formatter(DateFormatter('0h\n%Y %b %d\n%A'))
axes.xaxis.set_minor_formatter(DateFormatter('%Hh'))
for label in ax.xaxis.get_ticklabels(which='both'):
label.set_horizontalalignment('left')
axes.yaxis.set_major_formatter('{x:.0f} km')
axes.tick_params(which='both', length=0)
# Load the satellite's final TLE entry.
sat = EarthSatellite(
'1 34602U 09013A 13314.96046236 .14220718 20669-5 50412-4 0 930',
'2 34602 096.5717 344.5256 0009826 296.2811 064.0942 16.58673376272979',
'GOCE',
)
# Build the time range `t` over which to plot, plus other values.
ts = load.timescale()
t = ts.tt_jd(np.arange(sat.epoch.tt - 2.0, sat.epoch.tt + 2.0, 0.005))
reentry = ts.utc(2013, 11, 11, 0, 16)
earth_radius_km = 6371.0
# Compute geocentric positions for the satellite.
g = sat.at(t)
valid = [m is None for m in g.message]
# Start a new figure.
fig, ax = plt.subplots()
# Draw the blue curve.
x = t.utc_datetime()
y = np.where(valid, g.distance().km - earth_radius_km, np.nan)
ax.plot(x, y)
# Label the TLE epoch.
x = sat.epoch.utc_datetime()
y = sat.at(sat.epoch).distance().km - earth_radius_km
ax.plot(x, y, 'k.')
ax.text(x, y - 9, 'Epoch of TLE data ', ha='right')
# Label the official moment of reentry.
x = reentry.utc_datetime()
y = sat.at(reentry).distance().km - earth_radius_km
ax.plot(x, y, 'r.')
ax.text(x, y + 6, ' Moment of re-entry', c='r')
# Grid lines and labels.
ax.grid(which='both')
ax.set(title='GOCE satellite: altitude above sea level', xlabel='UTC')
label_dates_and_hours(ax)
# Render the plot to a PNG file.
fig.savefig('goce-reentry.png', bbox_inches='tight')
|
[
"brandon@rhodesmill.org"
] |
brandon@rhodesmill.org
|
10d1a3ab2353a80a6de7080d63ff12ec30391eee
|
6ad1fa724d3875ac841eb01418ca653f7937c460
|
/Python1807AXF-master/app/migrations/0003_mustbuy.py
|
7c9452f0a4fc8e4daf11e4e96abe828a7ac5398b
|
[] |
no_license
|
hlyhly1998/hly
|
eb422369d508952d6c4f822ca89796f2925192d1
|
fc6b564f465e8a76bde5083628e4b650b44d13a2
|
refs/heads/master
| 2022-12-06T15:05:17.951507
| 2019-06-26T02:27:57
| 2019-06-26T02:27:57
| 145,086,404
| 1
| 0
| null | 2022-11-22T02:39:10
| 2018-08-17T07:20:17
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 762
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2018-09-25 01:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0002_nav'),
]
operations = [
migrations.CreateModel(
name='Mustbuy',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('img', models.CharField(max_length=100)),
('name', models.CharField(max_length=100)),
('trackid', models.CharField(max_length=20)),
],
options={
'db_table': 'axf_mustbuy',
},
),
]
|
[
"huanglingyun111@163.com"
] |
huanglingyun111@163.com
|
eef01ca5d4c017c8baef7d64dc217c6b276ef324
|
e6edf0f1631d59a3f5e4b826a51d16f14ddb17c9
|
/runcheck.py
|
6b07578fdb766957ba8de9137439c39808d26f70
|
[] |
no_license
|
ParkKiHoon/Pyhton-for-ai
|
db7d60472c8b8380dbfb35942d09e8ce4f5f081d
|
ffbe44aeefc413000fcb8d57836e47b8b15f2980
|
refs/heads/main
| 2023-07-17T01:18:38.155566
| 2021-09-02T10:37:55
| 2021-09-02T10:37:55
| 402,381,865
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,059
|
py
|
#!/root/anaconda3/bin python3
import os
import subprocess
print("0")
process_read = os.popen("ps -ef | grep aimain.py | grep -v 'grep'").readlines()
print("1")
# ps -ef 명령어를 이용해서 현재 프로세스를 출력한 후, 그 중 run24h.py 문자열이 포함된 줄만 모은다.
# grep 명령어 자체도 프로세스에 나타나므로 grep -v를 이용해서 제외한다.
check_process = str(process_read)
print("2")
# 문자열로 변환한다.
text_location=check_process.find("aimain.py")
print("3")
# run24h.py가 몇번째 문자열인지 찾아낸다. 만약 문자열이 없으면, 즉 프로세스가 존재하지 않을 경우에는 -1을 반환한다.
if ( text_location == -1 ):
print("4")
print("Process not found!")
os.system("nohup /root/anaconda3/bin/python3 /home/rsa-key-20210828/aimain.py &")
# 해당 프로그램을 다시 실행한다. 백그라운드에서 실행할 경우 &기호를 붙인다.
print("Program restarted!")
else:
print("5")
print("Process exists. Location is",text_location)
|
[
"vpfmtlsl@naver.com"
] |
vpfmtlsl@naver.com
|
12ee2af586a5389598d17d3a8e2eb199ba981004
|
bd2696ebd08022b8fa126d963661fdf0792e2a0c
|
/L10_packet_dir/test_Job_02_from_L10_unittest.py
|
ae6f62abb582aa18cb4a2eb44c1d333c30227a43
|
[
"MIT"
] |
permissive
|
github-Ilfat/All_Lesson_of_Python
|
653d737085c7f1bd381b2d704f280374edeab957
|
fbab364fe91e05e08658662b16470a1809b6b2b0
|
refs/heads/master
| 2021-05-17T15:54:17.353396
| 2020-05-07T14:40:17
| 2020-05-07T14:40:17
| 250,856,289
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,720
|
py
|
import unittest
from For_Job_01_L10_Game_fool_card_desc import Game_fool_card_desc
class Test_Game_fool_card_desc_unittest(unittest.TestCase):
def setUp(self):
# Присвоение значений для параметров класса "-=СТОЛ КАРТОЧНОЙ ИГРЫ "В ДУРАКА"=-"
name1='Комп1' #──── имя игрока 1
name2='Комп2' #──── имя игрока 2
gmr1c = [0,0,0,6,0,0,0,0,0] #──── ранг карты, игрока 1
gmr1s = [0,0,0,3,0,0,0,0,0] #──── масть карты, игрока 1
dsc_c = [0,1,0,9,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,0] #──── ранг карты, для позиций с 1 по 4 и 18
dsc_s = [0,1,0,2,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0] #──── масть карты, для позиций с 1 по 4 и 18
gmr2c = [0,0,0,8,0,0,0,0,0] #──── ранг карты, игрока 2
gmr2s = [0,0,0,3,0,0,0,0,0] #──── масть карты, игрока 2
cld_n=36
# Запуск класса "-=СТОЛ КАРТОЧНОЙ ИГРЫ "В ДУРАКА"=-"
self.x=Game_fool_card_desc(name1, gmr1c, gmr1s, name2, gmr2c, gmr2s, dsc_c, dsc_s, cld_n)
self.x.blk_chk()
self.x.Dsc_img()
# Присвоение атрибутов для этого класса, с целью тестирования целевого класса
self.card=[' ','§',' 6',' 7',' 8',' 9','10',' V',' D',' K',' T']
self.suit=[' ','§','♣','\033[31m♦\033[0m','\033[31m♥\033[0m','♠']
self.name1=name1
self.name2=name1
self.gmr1c=gmr1c
self.gmr1s=gmr1s
self.dsc_c=dsc_c
self.dsc_s=dsc_s
self.gmr2c=gmr2c
self.gmr2s=gmr2s
self.cld_n=cld_n
print('Sets for start test completed!')
def test_blk_chk(self):
if len(self.gmr1c)==len(self.gmr1s) and len(self.gmr2c)==len(self.gmr2s) and len(self.dsc_c)==len(self.dsc_s):
self.assertTrue(self.x.chk_error == 0)
else: self.assertFalse(self.x.chk_error == 1)
# значения атрибута данного класса
if self.x.dc[5]!=0:
# проверка значений атрибутов целевого класса
self.assertEqual(self.x.action1, 'ходит!')
self.assertEqual(self.x.action2, 'отбивает!')
# значения атрибута данного класса
if self.x.ds[5]!=0:
# проверка значений атрибутов целевого класса
self.assertEqual(self.x.action1, 'отбивает!')
self.assertEqual(self.x.action2, 'ходит!')
def test_init(self):
# проверка значений атрибутов целевого класса
self.assertEqual(self.x.dc[1], 1) # "Закрытая карта" (колода)
self.assertEqual(self.x.ds[1], 1) # "§" (рисунок рубашки)
self.assertEqual(self.x.dc[3], 9) # "Король"
self.assertTrue(self.x.ds[3] == 2) # "Крести"
def test_Dsc_img(self):
# флаг включения режима отображения "наложение карт"
# значения атрибутов данного класса
if (self.dsc_c[5]!=0 and self.dsc_s[6]!=0) or (self.dsc_s[5]!=0 and self.dsc_c[6]!=0):
# проверка значения атрибута целевого класса
self.assertEqual(self.x.ds[0], 1)
def teardown(self):
print('All tests has completed!')
|
[
"s-ilfat-h@mail.ru"
] |
s-ilfat-h@mail.ru
|
b0b3c1aaa329c0e7f591d7df292153004d55f266
|
66e3b5dfd66b677a88c59e181e7b0edd563fd738
|
/ie/dit/soc/bridgingmodule2013/boolean/soln6.1.py
|
b29781add6e56e989c6c30c476330f375547599d
|
[] |
no_license
|
bjg/bridgingmodule2013
|
0b23dbe5170aa577c046418599cea69b32161213
|
6233e5c7b5f553c2d1179ad71d2c5f30f932c321
|
refs/heads/master
| 2020-09-13T07:50:40.512937
| 2013-09-06T11:12:22
| 2013-09-06T11:12:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 133
|
py
|
def halfadder(a, b):
return [a ^ b, a & b]
for pairs in [[0, 0], [0, 1], [1, 0], [1, 1]]:
s, c = halfadder(*pairs)
print c, s
|
[
"brian.j.gillespie@gmail.com"
] |
brian.j.gillespie@gmail.com
|
3b600461905bbc4961263bfe2745dd295cc11579
|
d9296d3b420d8f5c1aeca094d00dd6bc38a3d57d
|
/read_statistics/migrations/0001_initial.py
|
ea8634dbdab68cbb44d0ce86241b1fce182ee74d
|
[] |
no_license
|
Anthony88888/mysite
|
57f5f40530886b12cf1364c10c6206983b022c6c
|
7130715ef3acac054b96fa22dcf19fec1f31e019
|
refs/heads/master
| 2023-01-09T12:15:11.720225
| 2020-10-25T14:48:35
| 2020-10-25T14:48:35
| 305,168,092
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 776
|
py
|
# Generated by Django 2.0.13 on 2020-10-06 16:27
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
]
operations = [
migrations.CreateModel(
name='ReadNum',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('read_num', models.IntegerField(default=0)),
('object_id', models.PositiveIntegerField()),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='contenttypes.ContentType')),
],
),
]
|
[
"admin@example.com"
] |
admin@example.com
|
3b9031d40184e01f4ecd36f3a2235fa27257fe43
|
8c4bbe6b7551100f18504d50280c12f58e88ee40
|
/Homework1/Homework1/OLDsolutions/testing.py
|
4a568cf7c317c6668ce1c0f7e5aa6e4e729ef5c5
|
[] |
no_license
|
AvisekNaug/AI6360
|
814724948e604c855470baf3c2763c47b878da12
|
7c81dc709ebac59203ac93f7aed629ad5b2d89e5
|
refs/heads/master
| 2021-06-27T11:14:03.789356
| 2019-07-30T17:51:28
| 2019-07-30T17:51:28
| 97,511,561
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,053
|
py
|
import pprint
pp = pprint.PrettyPrinter(indent=4)
def puzz_astar(start,end):
"""
A* algorithm
"""
#weight = 1.0
Open = [[0,heuristic_2(start),start]] #optional: heuristic_1
Closed = []
Nodes_Expanded=0
#incumbent = [99999999]
while Open:
i = 0
for j in range(1, len(Open)):
if Open[i][0] +Open[i][1] > Open[j][0] + Open[j][1]:
i = j
path = Open[i]
Open = Open[:i] + Open[i+1:]
endnode = path[-1]
Closed.append(endnode)
Nodes_Expanded += 1
if endnode == end:
break
for k in moves(endnode):
if k in Closed:
continue
gk = path[0]+1
hk = heuristic_2(k)
newpath = [gk]+[hk]+path[2:]+[k]
Open.append(newpath)
print "#Expanded nodes:", Nodes_Expanded-1
print "Solution:"
pp.pprint(path)
def moves(mat):
"""
Returns a list of all possible moves
"""
output = []
m = eval(mat)
i = 0
while 0 not in m[i]: i += 1
j = m[i].index(0); #blank space (zero)
if i > 0:
m[i][j], m[i-1][j] = m[i-1][j], m[i][j]; #move up
output.append(str(m))
m[i][j], m[i-1][j] = m[i-1][j], m[i][j];
if i < 3:
m[i][j], m[i+1][j] = m[i+1][j], m[i][j] #move down
output.append(str(m))
m[i][j], m[i+1][j] = m[i+1][j], m[i][j]
if j > 0:
m[i][j], m[i][j-1] = m[i][j-1], m[i][j] #move left
output.append(str(m))
m[i][j], m[i][j-1] = m[i][j-1], m[i][j]
if j < 3:
m[i][j], m[i][j+1] = m[i][j+1], m[i][j] #move right
output.append(str(m))
m[i][j], m[i][j+1] = m[i][j+1], m[i][j]
return output
def heuristic_1(puzz):
"""
Counts the number of misplaced tiles
"""
misplaced = 0
compare = 1
m = eval(puzz)
for i in range(4):
for j in range(4):
if m[i][j] != compare and m[i][j] != 0:
misplaced += 1
compare += 1
return misplaced
def heuristic_2(puzz):
"""
Manhattan distance
"""
distance = 0
m = eval(puzz)
for i in range(4):
for j in range(4):
if m[i][j] == 0: continue
distance += abs(i - ((m[i][j]-1)/4)) + abs(j - ((m[i][j]-1)%4));
#print distance
return distance
if __name__ == '__main__':
#puzzle = str([[9, 5, 7, 4],[1, 0, 3, 8], [13, 10, 2, 12],[14, 6, 11, 15]])
puzzle = str([[3, 6, 9, 4],[5, 2,8, 11], [10, 0, 15, 7],[13, 1, 14, 12]])
end = str([[1, 2, 3, 4],[5, 6, 7, 8], [9, 10, 11, 12],[13, 14, 15, 0]])
puzz_astar(puzzle,end)
|
[
"noreply@github.com"
] |
AvisekNaug.noreply@github.com
|
ac743aaa34f496401df9479367ce482c7d2ac7a9
|
13048946876a285f2fe7bdfbf168e8ab28abcf6e
|
/Searching2.py
|
5f29bd9569eac2af0d5fb803f1a378ea222956d4
|
[] |
no_license
|
G0LDF0X/Python-Searching
|
3f823677038454555a8c4dad72bd17a8af745879
|
c8003ebf73e6b15cbf2ed030db74e032134f051c
|
refs/heads/main
| 2023-08-02T04:03:27.931838
| 2021-10-08T12:13:08
| 2021-10-08T12:13:08
| 414,869,849
| 0
| 0
| null | null | null | null |
UHC
|
Python
| false
| false
| 3,264
|
py
|
# 내가 필요한 함수와 라이브러리를 import한다.
from bs4 import BeautifulSoup
from requests import get
from re import compile, findall, sub
# Just a function that turns lists into strings because the re.findall() really annoys me
def lststr(lst):
string = ''
return string.join(lst)
# 이 .py 파일을 실행하면 실행될 main method이다.
def main():
# 혐오 표현을 "게시물 검색"으로 서치하면 나오는 결과 페이지 링크이다.
link = input("DCINSIDE 혐오표현 검색 페이지 링크: ")
#link = "https://search.dcinside.com/post/q/.ED.96.89.EB.B3.B5"
"""뽐뿌 사이트 예시(검색 키워드 : 강아지) :
메인 페이지에서 바로 강아지 키워드를 검색했을 때 나오는 주소
https://www.ppomppu.co.kr/search_bbs.php?keyword=%B0%AD%BE%C6%C1%F6
"""
i = 1
"""모든 페이지의 검색 결과의 링크를 return 하는데 뽐뿌는 게시글 수가 천 단위가 넘길래...
너무 내용이 많으면 while(1)을 while(i < 500) 정도로 바꾸세요
그럼 500페이지까지의 링크를 출력합니다.
이것도 너무 많으면 100으로 바꿔도 됩니다."""
while(1):
link = link + "&bbs_cate=2&search_type=sub_memo&order_typedate&page_no="+str(i)
#그 페이지의 html을 text로 가져온다.
html_text = get(link).text
# 그 html 내용을 좀 더 다루기 쉽게 BeautifulSoup로 만든다.
soup = BeautifulSoup(html_text, 'lxml')
span_list = soup.findAll('span', class_ = 'title')
if not span_list:
break
# 그 안에서 링크만 쏙 빼오고 싶기 때문에 regular expression(정규 표현)을 사용한다.
r = compile('href="(.*?)"')
# for loop을 위해 비워둔 리스트
url_list = []
for link2 in span_list:
# re library는 string을 input으로 받아야하기 때문에 str()을 사용한다.
link2 = str(link2)
# re library를 사용해 링크 부분만 빼온다. 그러나 findall()은 list를 return한다. ㅂㄷㅂㄷ!!!
url = r.findall(link2)
# 그래서 아까 만들어둔 함수를 사용한다 ㅎ
url_str = lststr(url)
#비워둔 리스트에 추가해준다.
url_list.append(url_str)
#다른 사이트에서도 이런 문제가 발생하는 지는 모르겠지만, "&"가 "&"로 바뀌어 가져와지는 현상이 일어났다.
#그것도 고쳐주자. 안 그러면 링크가 먹통이다.
new_list = []
#링크에서 에러나는 부분들을 고쳐준다.
for url in url_list:
url = "https://www.ppomppu.co.kr"+url
new_list.append(sub('&', "&", url))
# 제대로 모든 게 됐는 지 확인하는 for loop.
for url in new_list:
print(url)
i += 1
#url들을 txt파일에 저장한다.
with open("url_list.txt", "w") as f:
for url in new_list:
f.write(url + "\n")
#이 파링을 실행하면 main method를 실행하도록 한다.
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
G0LDF0X.noreply@github.com
|
307f0c8f91fd180198a905ec44e1075657a0281a
|
819c070a222d2d19dca43a0bd28cca1e76e64da4
|
/devel/lib/python2.7/dist-packages/rbx1_nav/cfg/CalibrateLinearConfig.py
|
2cde3f07a9f379cd020cb890ec6a8be4edd35a6d
|
[] |
no_license
|
danielzhangcs/catkin_ws
|
069c21b1fea7177d41e31cf13994082818d6e38c
|
01ab11727b0928085a0b42d1cafb1ceac79748f2
|
refs/heads/main
| 2023-01-04T14:00:53.955168
| 2020-10-22T00:46:54
| 2020-10-22T00:46:54
| 303,494,038
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,947
|
py
|
## *********************************************************
##
## File autogenerated for the rbx1_nav package
## by the dynamic_reconfigure package.
## Please do not edit.
##
## ********************************************************/
from dynamic_reconfigure.encoding import extract_params
inf = float('inf')
config_description = {'upper': 'DEFAULT', 'lower': 'groups', 'srcline': 246, 'name': 'Default', 'parent': 0, 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'cstate': 'true', 'parentname': 'Default', 'class': 'DEFAULT', 'field': 'default', 'state': True, 'parentclass': '', 'groups': [], 'parameters': [{'srcline': 291, 'description': 'Test distance in meters', 'max': 2.0, 'cconsttype': 'const double', 'ctype': 'double', 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'name': 'test_distance', 'edit_method': '', 'default': 1.0, 'level': 0, 'min': 0.0, 'type': 'double'}, {'srcline': 291, 'description': 'Robot speed in meters per second', 'max': 0.3, 'cconsttype': 'const double', 'ctype': 'double', 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'name': 'speed', 'edit_method': '', 'default': 0.15, 'level': 0, 'min': 0.0, 'type': 'double'}, {'srcline': 291, 'description': 'Error tolerance to goal distance in meters', 'max': 0.1, 'cconsttype': 'const double', 'ctype': 'double', 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'name': 'tolerance', 'edit_method': '', 'default': 0.01, 'level': 0, 'min': 0.0, 'type': 'double'}, {'srcline': 291, 'description': 'Linear correction factor', 'max': 3.0, 'cconsttype': 'const double', 'ctype': 'double', 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'name': 'odom_linear_scale_correction', 'edit_method': '', 'default': 1.0, 'level': 0, 'min': 0.0, 'type': 'double'}, {'srcline': 291, 'description': 'Check to start the test', 'max': True, 'cconsttype': 'const bool', 'ctype': 'bool', 'srcfile': '/opt/ros/melodic/lib/python2.7/dist-packages/dynamic_reconfigure/parameter_generator_catkin.py', 'name': 'start_test', 'edit_method': '', 'default': False, 'level': 0, 'min': False, 'type': 'bool'}], 'type': '', 'id': 0}
min = {}
max = {}
defaults = {}
level = {}
type = {}
all_level = 0
#def extract_params(config):
# params = []
# params.extend(config['parameters'])
# for group in config['groups']:
# params.extend(extract_params(group))
# return params
for param in extract_params(config_description):
min[param['name']] = param['min']
max[param['name']] = param['max']
defaults[param['name']] = param['default']
level[param['name']] = param['level']
type[param['name']] = param['type']
all_level = all_level | param['level']
|
[
"danielzhang@brandeis.edu"
] |
danielzhang@brandeis.edu
|
503a11282b2b012d89e3014060423162487ba9a6
|
fec863b67ec1ae65da7111bd8c77d0ab2ef1f6ce
|
/movie recommendation system/.history/model3_20210430162616.py
|
ef78677ec57da3e3bcb5a7edf1bc1dcf42a79f03
|
[] |
no_license
|
kannan768/movie-recommendation-system
|
e6cf71620e25a0185fed3b37896137f1f39b0801
|
7460d440d44e77390e459ab10c535b6971c9c3ab
|
refs/heads/main
| 2023-05-14T02:21:50.930672
| 2021-06-09T05:02:30
| 2021-06-09T05:02:30
| 375,225,316
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,918
|
py
|
#item-item filtering
#colloborative filtering
from math import sqrt
import pandas as pd
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import linear_kernel
from sklearn.metrics import pairwise_distances
from scipy.spatial.distance import cosine, correlation
ratings = pd.read_csv('m1-1m/ratings.dat', sep='::', names=['userId', 'movieId', 'rating', 'timestamp'],engine = 'python', encoding = 'latin-1')
users = pd.read_csv('m1-1m/users.dat', sep='::', names=['userId', 'gender', 'age', 'occupation', 'zipcode'],engine = 'python', encoding = 'latin-1')
movies = pd.read_csv('m1-1m/movies.dat', sep='::', names=['movieId', 'title', 'genres'],engine = 'python', encoding = 'latin-1')
df_movies=movies
df_ratings=ratings
df_movies_ratings=pd.merge(df_movies, df_ratings)
ratings_matrix_items = df_movies_ratings.pivot_table(index=['movieId'],columns=['userId'],values='rating').reset_index(drop=True)
ratings_matrix_items.fillna( 0, inplace = True )
movie_similarity = 1 - pairwise_distances( ratings_matrix_items.to_numpy(), metric="cosine" )
np.fill_diagonal( movie_similarity, 0 )
ratings_matrix_items = pd.DataFrame( movie_similarity )
def item_similarity(movieName):
try:
user_inp=movieName
inp=df_movies[df_movies['title']==user_inp].index.tolist()
inp=inp[0]
df_movies['similarity'] = ratings_matrix_items.iloc[inp]
df_movies.columns = ['movie_id', 'title', 'release_date','similarity']
except:
print("Sorry, the movie is not in the database!")
def recommendedMoviesAsperItemSimilarity(user_id):
user_movie= df_movies_ratings[(df_movies_ratings.userId==user_id) & df_movies_ratings.rating.isin([5,4.5])][['title']]
user_movie=user_movie.iloc[0,0]
item_similarity(user_movie)
sorted_movies_as_per_userChoice=df_movies.sort_values( ["similarity"], ascending = False )
sorted_movies_as_per_userChoice=sorted_movies_as_per_userChoice[sorted_movies_as_per_userChoice['similarity'] >=0.45]['movie_id']
recommended_movies=list()
df_recommended_item=pd.DataFrame()
user2Movies= df_ratings[df_ratings['userId']== user_id]['movieId']
for movieId in sorted_movies_as_per_userChoice:
if movieId not in user2Movies:
df_new= df_ratings[(df_ratings.movieId==movieId)]
df_recommended_item=pd.concat([df_recommended_item,df_new])
best10=df_recommended_item.sort_values(['rating'], ascending = False )[1:10]
return best10['movieId']
def movieIdToTitle(listMovieIDs):
movie_titles= list()
for id in listMovieIDs:
movie_titles.append(df_movies[df_movies['movie_id']==id]['title'])
return movie_titles
user_id=50
print("Recommended movies,:\n",movieIdToTitle(recommendedMoviesAsperItemSimilarity(user_id)))
|
[
"kannanbsk1609080@gmail.com"
] |
kannanbsk1609080@gmail.com
|
7d1b5a7a4c9b2473875d05659a41073fc769e6de
|
825afdb29be726e5b9dec219d203579ca7d23f8a
|
/tests/web_bottle_ssl_test.py
|
3e5f5a780fcfd58bf3d017bd463cb0839262a4f8
|
[
"MIT"
] |
permissive
|
tmetsch/smart_home
|
d8c16241af8e48ab3b16c2812ec7d48866e56929
|
582c225f8659029529a9c6f1c1aa551d0b2ec906
|
refs/heads/master
| 2022-10-01T09:30:22.494708
| 2022-09-14T10:20:41
| 2022-09-14T10:20:41
| 103,826,614
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 868
|
py
|
"""
Unittest for bottle SSL wrapper.
"""
import threading
import unittest
import bottle
from web import bottle_ssl
class SecureServerAdapterTest(unittest.TestCase):
"""
Testcase for the SecureServerAdapter class.
"""
def setUp(self):
self.cut = bottle_ssl.SecureServerAdapter('tests/files/key.pem',
'tests/files/cert.pem')
self.app = bottle.app
def test_run_for_success(self):
"""
Test for success.
"""
thread = threading.Thread(target=self.cut.run, args=self.app)
thread.setDaemon(True)
thread.start()
def test_run_for_failure(self):
"""
Test for failure.
"""
pass # N/A
def test_run_for_sanity(self):
"""
Test for sanity.
"""
pass # TODO: implement
|
[
"tmetsch@gmail.com"
] |
tmetsch@gmail.com
|
b40dca379be236bcc74b9e56cefe1271d2baf212
|
132bfa4b6736b6c91a6be8d9116a65d581222868
|
/autoapp.py
|
7d83e42e53be5b762ed08570c9f90bab591bcf3a
|
[] |
no_license
|
banphlet/Flask-Tensorflow
|
3a735c88fcb0807e7704f0f3d62547e181463716
|
6b0feb8078eedf163c0d28d56a09c16d8d63ad90
|
refs/heads/master
| 2022-01-24T01:53:37.867200
| 2019-07-21T11:17:37
| 2019-07-21T11:17:37
| 197,902,306
| 5
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 63
|
py
|
from app import app
if __name__ == "__main__":
app.run()
|
[
"banphlet4@gmail.com"
] |
banphlet4@gmail.com
|
916d5024feee51c4aecbfdb8b0b81be68f04a159
|
7f9a21c118dba8f0aea05f46ecccbc0677127159
|
/perpustakaan/wsgi.py
|
257e84276066c731bbdb15d0fbe7503096b37ac9
|
[] |
no_license
|
Anggiyasti/perpustakaan
|
3af4c7e120b3a09d7f0670586cb04e0e785eb590
|
cbf327b3d1eeb363b0ea2dd132f83df9eb068bb5
|
refs/heads/master
| 2021-05-03T10:46:19.530274
| 2016-10-04T10:53:48
| 2016-10-04T10:53:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
"""
WSGI config for perpustakaan project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "perpustakaan.settings")
application = get_wsgi_application()
|
[
"putrianggiyasti@gmail.com"
] |
putrianggiyasti@gmail.com
|
e5b6a2d65e3ac4422c05b239dc066aa8ec10effa
|
1b442d616c3666c329d2ff85a08e63fb670d18d0
|
/PY01032.py
|
424ff6c9fb458d867254f044722bf4d870686b81
|
[] |
no_license
|
nhuantho/PythonCodePtit
|
5ce233e6ad756d3dbb79384b450478123256a80f
|
6e0b5fef13b727d568cfaa0afee1a1506761e644
|
refs/heads/main
| 2023-08-20T08:22:19.179586
| 2021-10-29T04:10:00
| 2021-10-29T04:10:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,288
|
py
|
s='''1 3 5 7 9 15 17 21 27 31 33 45 51 63 65 73 85 93 99 107 119 127 129 153 165 189 195 219 231 255 257 273 297 313 325 341 365 381 387 403 427 443 455 471 495 511 513 561 585 633 645 693 717 765 771 819 843 891 903 951 975 1023 1025 1057 1105 1137 1161 1193 1241 1273 1285 1317 1365 1397 1421 1453 1501 1533 1539 1571 1619 1651 1675 1707 1755 1787 1799 1831 1879 1911 1935 1967 2015 2047 2049 2145 2193 2289 2313 2409 2457 2553 2565 2661 2709 2805 2829 2925 2973 3069 3075 3171 3219 3315 3339 3435 3483 3579 3591 3687 3735 3831 3855 3951 3999 4095 4097 4161 4257 4321 4369 4433 4529 4593 4617 4681 4777 4841 4889 4953 5049 5113 5125 5189 5285 5349 5397 5461 5557 5621 5645 5709 5805 5869 5917 5981 6077 6141 6147 6211 6307 6371 6419 6483 6579 6643 6667 6731 6827 6891 6939 7003 7099 7163 7175 7239 7335 7399 7447 7511 7607 7671 7695 7759 7855 7919 7967 8031 8127 8191 8193 8385 8481 8673 8721 8913 9009 9201 9225 9417 9513 9705 9753 9945 10041 10233 10245 10437 10533 10725 10773 10965 11061 11253 11277 11469 11565 11757 11805 11997 12093 12285 12291 12483 12579 12771 12819 13011 13107 13299 13323 13515 13611 13803 13851 14043 14139 14331 14343 14535 14631 14823 14871 15063 15159 15351 15375 15567 15663 15855 15903 16095 16191 16383 16385 16513 16705 16833 16929 17057 17249 17377 17425 17553 17745 17873 17969 18097 18289 18417 18441 18569 18761 18889 18985 19113 19305 19433 19481 19609 19801 19929 20025 20153 20345 20473 20485 20613 20805 20933 21029 21157 21349 21477 21525 21653 21845 21973 22069 22197 22389 22517 22541 22669 22861 22989 23085 23213 23405 23533 23581 23709 23901 24029 24125 24253 24445 24573 24579 24707 24899 25027 25123 25251 25443 25571 25619 25747 25939 26067 26163 26291 26483 26611 26635 26763 26955 27083 27179 27307 27499 27627 27675 27803 27995 28123 28219 28347 28539 28667 28679 28807 28999 29127 29223 29351 29543 29671 29719 29847 30039 30167 30263 30391 30583 30711 30735 30863 31055 31183 31279 31407 31599 31727 31775 31903 32095 32223 32319 32447 32639 32767 32769 33153 33345 33729 33825 34209 34401 34785 34833 35217 35409 35793 35889 36273 36465 36849 36873 37257 37449 37833 37929 38313 38505 38889 38937 39321 39513 39897 39993 40377 40569 40953 40965 41349 41541 41925 42021 42405 42597 42981 43029 43413 43605 43989 44085 44469 44661 45045 45069 45453 45645 46029 46125 46509 46701 47085 47133 47517 47709 48093 48189 48573 48765 49149 49155 49539 49731 50115 50211 50595 50787 51171 51219 51603 51795 52179 52275 52659 52851 53235 53259 53643 53835 54219 54315 54699 54891 55275 55323 55707 55899 56283 56379 56763 56955 57339 57351 57735 57927 58311 58407 58791 58983 59367 59415 59799 59991 60375 60471 60855 61047 61431 61455 61839 62031 62415 62511 62895 63087 63471 63519 63903 64095 64479 64575 64959 65151 65535 65537 65793 66177 66433 66625 66881 67265 67521 67617 67873 68257 68513 68705 68961 69345 69601 69649 69905 70289 70545 70737 70993 71377 71633 71729 71985 72369 72625 72817 73073 73457 73713 73737 73993 74377 74633 74825 75081 75465 75721 75817 76073 76457 76713 76905 77161 77545 77801 77849 78105 78489 78745 78937 79193 79577 79833 79929 80185 80569 80825 81017 81273 81657 81913 81925 82181 82565 82821 83013 83269 83653 83909 84005 84261 84645 84901 85093 85349 85733 85989 86037 86293 86677 86933 87125 87381 87765 88021 88117 88373 88757 89013 89205 89461 89845 90101 90125 90381 90765 91021 91213 91469 91853 92109 92205 92461 92845 93101 93293 93549 93933 94189 94237 94493 94877 95133 95325 95581 95965 96221 96317 96573 96957 97213 97405 97661 98045 98301 98307 98563 98947 99203 99395 99651 100035 100291 100387 100643 101027 101283 101475 101731 102115 102371 102419 102675 103059 103315 103507 103763 104147 104403 104499 104755 105139 105395 105587 105843 106227 106483 106507 106763 107147 107403 107595 107851 108235 108491 108587 108843 109227 109483 109675 109931 110315 110571 110619 110875 111259 111515 111707 111963 112347 112603 112699 112955 113339 113595 113787 114043 114427 114683 114695 114951 115335 115591 115783 116039 116423 116679 116775 117031 117415 117671 117863 118119 118503 118759 118807 119063 119447 119703 119895 120151 120535 120791 120887 121143 121527 121783 121975 122231 122615 122871 122895 123151 123535 123791 123983 124239 124623 124879 124975 125231 125615 125871 126063 126319 126703 126959 127007 127263 127647 127903 128095 128351 128735 128991 129087 129343 129727 129983 130175 130431 130815 131071 131073 131841 132225 132993 133185 133953 134337 135105 135201 135969 136353 137121 137313 138081 138465 139233 139281 140049 140433 141201 141393 142161 142545 143313 143409 144177 144561 145329 145521 146289 146673 147441 147465 148233 148617 149385 149577 150345 150729 151497 151593 152361 152745 153513 153705 154473 154857 155625 155673 156441 156825 157593 157785 158553 158937 159705 159801 160569 160953 161721 161913 162681 163065 163833 163845 164613 164997 165765 165957 166725 167109 167877 167973 168741 169125 169893 170085 170853 171237 172005 172053 172821 173205 173973 174165 174933 175317 176085 176181 176949 177333 178101 178293 179061 179445 180213 180237 181005 181389 182157 182349 183117 183501 184269 184365 185133 185517 186285 186477 187245 187629 188397 188445 189213 189597 190365 190557 191325 191709 192477 192573 193341 193725 194493 194685 195453 195837 196605 196611 197379 197763 198531 198723 199491 199875 200643 200739 201507 201891 202659 202851 203619 204003 204771 204819 205587 205971 206739 206931 207699 208083 208851 208947 209715 210099 210867 211059 211827 212211 212979 213003 213771 214155 214923 215115 215883 216267 217035 217131 217899 218283 219051 219243 220011 220395 221163 221211 221979 222363 223131 223323 224091 224475 225243 225339 226107 226491 227259 227451 228219 228603 229371 229383 230151 230535 231303 231495 232263 232647 233415 233511 234279 234663 235431 235623 236391 236775 237543 237591 238359 238743 239511 239703 240471 240855 241623 241719 242487 242871 243639 243831 244599 244983 245751 245775 246543 246927 247695 247887 248655 249039 249807 249903 250671 251055 251823 252015 252783 253167 253935 253983 254751 255135 255903 256095 256863 257247 258015 258111 258879 259263 260031 260223 260991 261375 262143 262145 262657 263425 263937 264321 264833 265601 266113 266305 266817 267585 268097 268481 268993 269761 270273 270369 270881 271649 272161 272545 273057 273825 274337 274529 275041 275809 276321 276705 277217 277985 278497 278545 279057 279825 280337 280721 281233 282001 282513 282705 283217 283985 284497 284881 285393 286161 286673 286769 287281 288049 288561 288945 289457 290225 290737 290929 291441 292209 292721 293105 293617 294385 294897 294921 295433 296201 296713 297097 297609 298377 298889 299081 299593 300361 300873 301257 301769 302537 303049 303145 303657 304425 304937 305321 305833 306601 307113 307305 307817 308585 309097 309481 309993 310761 311273 311321 311833 312601 313113 313497 314009 314777 315289 315481 315993 316761 317273 317657 318169 318937 319449 319545 320057 320825 321337 321721 322233 323001 323513 323705 324217 324985 325497 325881 326393 327161 327673 327685 328197 328965 329477 329861 330373 331141 331653 331845 332357 333125 333637 334021 334533 335301 335813 335909 336421 337189 337701 338085 338597 339365 339877 340069 340581 341349 341861 342245 342757 343525 344037 344085 344597 345365 345877 346261 346773 347541 348053 348245 348757 349525 350037 350421 350933 351701 352213 352309 352821 353589 354101 354485 354997 355765 356277 356469 356981 357749 358261 358645 359157 359925 360437 360461 360973 361741 362253 362637 363149 363917 364429 364621 365133 365901 366413 366797 367309 368077 368589 368685 369197 369965 370477 370861 371373 372141 372653 372845 373357 374125 374637 375021 375533 376301 376813 376861 377373 378141 378653 379037 379549 380317 380829 381021 381533 382301 382813 383197 383709 384477 384989 385085 385597 386365 386877 387261 387773 388541 389053 389245 389757 390525 391037 391421 391933 392701 393213 393219 393731 394499 395011 395395 395907 396675 397187 397379 397891 398659 399171 399555 400067 400835 401347 401443 401955 402723 403235 403619 404131 404899 405411 405603 406115 406883 407395 407779 408291 409059 409571 409619 410131 410899 411411 411795 412307 413075 413587 413779 414291 415059 415571 415955 416467 417235 417747 417843 418355 419123 419635 420019 420531 421299 421811 422003 422515 423283 423795 424179 424691 425459 425971 425995 426507 427275 427787 428171 428683 429451 429963 430155 430667 431435 431947 432331 432843 433611 434123 434219 434731 435499 436011 436395 436907 437675 438187 438379 438891 439659 440171 440555 441067 441835 442347 442395 442907 443675 444187 444571 445083 445851 446363 446555 447067 447835 448347 448731 449243 450011 450523 450619 451131 451899 452411 452795 453307 454075 454587 454779 455291 456059 456571 456955 457467 458235 458747 458759 459271 460039 460551 460935 461447 462215 462727 462919 463431 464199 464711 465095 465607 466375 466887 466983 467495 468263 468775 469159 469671 470439 470951 471143 471655 472423 472935 473319 473831 474599 475111 475159 475671 476439 476951 477335 477847 478615 479127 479319 479831 480599 481111 481495 482007 482775 483287 483383 483895 484663 485175 485559 486071 486839 487351 487543 488055 488823 489335 489719 490231 490999 491511 491535 492047 492815 493327 493711 494223 494991 495503 495695 496207 496975 497487 497871 498383 499151 499663 499759 500271 501039 501551 501935 502447 503215 503727 503919 504431 505199 505711 506095 506607 507375 507887 507935 508447 509215 509727 510111 510623 511391 511903 512095 512607 513375 513887 514271 514783 515551 516063 516159 516671 517439 517951 518335 518847 519615 520127 520319 520831 521599 522111 522495 523007 523775 524287 524289 525825 526593 528129 528513 530049 530817 532353 532545 534081 534849 536385 536769 538305 539073 540609 540705 542241 543009 544545 544929 546465 547233 548769 548961 550497 551265 552801 553185 554721 555489 557025 557073 558609 559377 560913 561297 562833 563601 565137 565329 566865 567633 569169 569553 571089 571857 573393 573489 575025 575793 577329 577713 579249 580017 581553 581745 583281 584049 585585 585969 587505 588273 589809 589833 591369 592137 593673 594057 595593 596361 597897 598089 599625 600393 601929 602313 603849 604617 606153 606249 607785 608553 610089 610473 612009 612777 614313 614505 616041 616809 618345 618729 620265 621033 622569 622617 624153 624921 626457 626841 628377 629145 630681 630873 632409 633177 634713 635097 636633 637401 638937 639033 640569 641337 642873 643257 644793 645561 647097 647289 648825 649593 651129 651513 653049 653817 655353 655365 656901 657669 659205 659589 661125 661893 663429 663621 665157 665925 667461 667845 669381 670149 671685 671781 673317 674085 675621 676005 677541 678309 679845 680037 681573 682341 683877 684261 685797 686565 688101 688149 689685 690453 691989 692373 693909 694677 696213 696405 697941 698709 700245 700629 702165 702933 704469 704565 706101 706869 708405 708789 710325 711093 712629 712821 714357 715125 716661 717045 718581 719349 720885 720909 722445 723213 724749 725133 726669 727437 728973 729165 730701 731469 733005 733389 734925 735693 737229 737325 738861 739629 741165 741549 743085 743853 745389 745581 747117 747885 749421 749805 751341 752109 753645 753693 755229 755997 757533 757917 759453 760221 761757 761949 763485 764253 765789 766173 767709 768477 770013 770109 771645 772413 773949 774333 775869 776637 778173 778365 779901 780669 782205 782589 784125 784893 786429 786435 787971 788739 790275 790659 792195 792963 794499 794691 796227 796995 798531 798915 800451 801219 802755 802851 804387 805155 806691 807075 808611 809379 810915 811107 812643 813411 814947 815331 816867 817635 819171 819219 820755 821523 823059 823443 824979 825747 827283 827475 829011 829779 831315 831699 833235 834003 835539 835635 837171 837939 839475 839859 841395 842163 843699 843891 845427 846195 847731 848115 849651 850419 851955 851979 853515 854283 855819 856203 857739 858507 860043 860235 861771 862539 864075 864459 865995 866763 868299 868395 869931 870699 872235 872619 874155 874923 876459 876651 878187 878955 880491 880875 882411 883179 884715 884763 886299 887067 888603 888987 890523 891291 892827 893019 894555 895323 896859 897243 898779 899547 901083 901179 902715 903483 905019 905403 906939 907707 909243 909435 910971 911739 913275 913659 915195 915963 917499 917511 919047 919815 921351 921735 923271 924039 925575 925767 927303 928071 929607 929991 931527 932295 933831 933927 935463 936231 937767 938151 939687 940455 941991 942183 943719 944487 946023 946407 947943 948711 950247 950295 951831 952599 954135 954519 956055 956823 958359 958551 960087 960855 962391 962775 964311 965079 966615 966711 968247 969015 970551 970935 972471 973239 974775 974967 976503 977271 978807 979191 980727 981495 983031 983055 984591 985359 986895 987279 988815 989583 991119 991311 992847 993615 995151 995535 997071 997839 999375 999471 1001007 1001775 1003311 1003695 1005231 1005999 1007535 1007727 1009263 1010031 1011567 1011951 1013487 1014255 1015791 1015839 1017375 1018143 1019679 1020063 1021599 1022367 1023903 1024095 1025631 1026399 1027935 1028319 1029855 1030623 1032159 1032255 1033791 1034559 1036095 1036479 1038015 1038783 1040319 1040511 1042047 1042815 1044351 1044735 1046271 1047039 1048575 1048577 1049601 1051137 1052161 1052929 1053953 1055489 1056513 1056897 1057921 1059457 1060481 1061249 1062273 1063809 1064833 1065025 1066049 1067585 1068609 1069377 1070401 1071937 1072961 1073345 1074369 1075905 1076929 1077697 1078721 1080257 1081281 1081377 1082401 1083937 1084961 1085729 1086753 1088289 1089313 1089697 1090721 1092257 1093281 1094049 1095073 1096609 1097633 1097825 1098849 1100385 1101409 1102177 1103201 1104737 1105761 1106145 1107169 1108705 1109729 1110497 1111521 1113057 1114081 1114129 1115153 1116689 1117713 1118481 1119505 1121041 1122065 1122449 1123473 1125009 1126033 1126801 1127825 1129361 1130385 1130577 1131601 1133137 1134161 1134929 1135953 1137489 1138513 1138897 1139921 1141457 1142481 1143249 1144273 1145809 1146833 1146929 1147953 1149489 1150513 1151281 1152305 1153841 1154865 1155249 1156273 1157809 1158833 1159601 1160625 1162161 1163185 1163377 1164401 1165937 1166961 1167729 1168753 1170289 1171313 1171697 1172721 1174257 1175281 1176049 1177073 1178609 1179633 1179657 1180681 1182217 1183241 1184009 1185033 1186569 1187593 1187977 1189001 1190537 1191561 1192329 1193353 1194889 1195913 1196105 1197129 1198665 1199689 1200457 1201481 1203017 1204041 1204425 1205449 1206985 1208009 1208777 1209801 1211337 1212361 1212457 1213481 1215017 1216041 1216809 1217833 1219369 1220393 1220777 1221801 1223337 1224361 1225129 1226153 1227689 1228713 1228905 1229929 1231465 1232489 1233257 1234281 1235817 1236841 1237225 1238249 1239785 1240809 1241577 1242601 1244137 1245161 1245209 1246233 1247769 1248793 1249561 1250585 1252121 1253145 1253529 1254553 1256089 1257113 1257881 1258905 1260441 1261465 1261657 1262681 1264217 1265241 1266009 1267033 1268569 1269593 1269977 1271001 1272537 1273561 1274329 1275353 1276889 1277913 1278009 1279033 1280569 1281593 1282361 1283385 1284921 1285945 1286329 1287353 1288889 1289913 1290681 1291705 1293241 1294265 1294457 1295481 1297017 1298041 1298809 1299833 1301369 1302393 1302777 1303801 1305337 1306361 1307129 1308153 1309689 1310713 1310725 1311749 1313285 1314309 1315077 1316101 1317637 1318661 1319045 1320069 1321605 1322629 1323397 1324421 1325957 1326981 1327173 1328197 1329733 1330757 1331525 1332549 1334085 1335109 1335493 1336517 1338053 1339077 1339845 1340869 1342405 1343429 1343525 1344549 1346085 1347109 1347877 1348901 1350437 1351461 1351845 1352869 1354405 1355429 1356197 1357221 1358757 1359781 1359973 1360997 1362533 1363557 1364325 1365349 1366885 1367909 1368293 1369317 1370853 1371877 1372645 1373669 1375205 1376229 1376277 1377301 1378837 1379861 1380629 1381653 1383189 1384213 1384597 1385621 1387157 1388181 1388949 1389973 1391509 1392533 1392725 1393749 1395285 1396309 1397077 1398101 1399637 1400661 1401045 1402069 1403605 1404629 1405397 1406421 1407957 1408981 1409077 1410101 1411637 1412661 1413429 1414453 1415989 1417013 1417397 1418421 1419957 1420981 1421749 1422773 1424309 1425333 1425525 1426549 1428085 1429109 1429877 1430901 1432437 1433461 1433845 1434869 1436405 1437429 1438197 1439221 1440757 1441781 1441805 1442829 1444365 1445389 1446157 1447181 1448717 1449741 1450125 1451149 1452685 1453709 1454477 1455501 1457037 1458061 1458253 1459277 1460813 1461837 1462605 1463629 1465165 1466189 1466573 1467597 1469133 1470157 1470925 1471949 1473485 1474509 1474605 1475629 1477165 1478189 1478957 1479981 1481517 1482541 1482925 1483949 1485485 1486509 1487277 1488301 1489837 1490861 1491053 1492077 1493613 1494637 1495405 1496429 1497965 1498989 1499373 1500397 1501933 1502957 1503725 1504749 1506285 1507309 1507357 1508381 1509917 1510941 1511709 1512733 1514269 1515293 1515677 1516701 1518237 1519261 1520029 1521053 1522589 1523613 1523805 1524829 1526365 1527389 1528157 1529181 1530717 1531741 1532125 1533149 1534685 1535709 1536477 1537501 1539037 1540061 1540157 1541181 1542717 1543741 1544509 1545533 1547069 1548093 1548477 1549501 1551037 1552061 1552829 1553853 1555389 1556413 1556605 1557629 1559165 1560189 1560957 1561981 1563517 1564541 1564925 1565949 1567485 1568509 1569277 1570301 1571837 1572861 1572867 1573891 1575427 1576451 1577219 1578243 1579779 1580803 1581187 1582211 1583747 1584771 1585539 1586563 1588099 1589123 1589315 1590339 1591875 1592899 1593667 1594691 1596227 1597251 1597635 1598659 1600195 1601219 1601987 1603011 1604547 1605571 1605667 1606691 1608227 1609251 1610019 1611043 1612579 1613603 1613987 1615011 1616547 1617571 1618339 1619363 1620899 1621923 1622115 1623139 1624675 1625699 1626467 1627491 1629027 1630051 1630435 1631459 1632995 1634019 1634787 1635811 1637347 1638371 1638419 1639443 1640979 1642003 1642771 1643795 1645331 1646355 1646739 1647763 1649299 1650323 1651091 1652115 1653651 1654675 1654867 1655891 1657427 1658451 1659219 1660243 1661779 1662803 1663187 1664211 1665747 1666771 1667539 1668563 1670099 1671123 1671219 1672243 1673779 1674803 1675571 1676595 1678131 1679155 1679539 1680563 1682099 1683123 1683891 1684915 1686451 1687475 1687667 1688691 1690227 1691251 1692019 1693043 1694579 1695603 1695987 1697011 1698547 1699571 1700339 1701363 1702899 1703923 1703947 1704971 1706507 1707531 1708299 1709323 1710859 1711883 1712267 1713291 1714827 1715851 1716619 1717643 1719179 1720203 1720395 1721419 1722955 1723979 1724747 1725771 1727307 1728331 1728715 1729739 1731275 1732299 1733067 1734091 1735627 1736651 1736747 1737771 1739307 1740331 1741099 1742123 1743659 1744683 1745067 1746091 1747627 1748651 1749419 1750443 1751979 1753003 1753195 1754219 1755755 1756779 1757547 1758571 1760107 1761131 1761515 1762539 1764075 1765099 1765867 1766891 1768427 1769451 1769499 1770523 1772059 1773083 1773851 1774875 1776411 1777435 1777819 1778843 1780379 1781403 1782171 1783195 1784731 1785755 1785947 1786971 1788507 1789531 1790299 1791323 1792859 1793883 1794267 1795291 1796827 1797851 1798619 1799643 1801179 1802203 1802299 1803323 1804859 1805883 1806651 1807675 1809211 1810235 1810619 1811643 1813179 1814203 1814971 1815995 1817531 1818555 1818747 1819771 1821307 1822331 1823099 1824123 1825659 1826683 1827067 1828091 1829627 1830651 1831419 1832443 1833979 1835003 1835015 1836039 1837575 1838599 1839367 1840391 1841927 1842951 1843335 1844359 1845895 1846919 1847687 1848711 1850247 1851271 1851463 1852487 1854023 1855047 1855815 1856839 1858375 1859399 1859783 1860807 1862343 1863367 1864135 1865159 1866695 1867719 1867815 1868839 1870375 1871399 1872167 1873191 1874727 1875751 1876135 1877159 1878695 1879719 1880487 1881511 1883047 1884071 1884263 1885287 1886823 1887847 1888615 1889639 1891175 1892199 1892583 1893607 1895143 1896167 1896935 1897959 1899495 1900519 1900567 1901591 1903127 1904151 1904919 1905943 1907479 1908503 1908887 1909911 1911447 1912471 1913239 1914263 1915799 1916823 1917015 1918039 1919575 1920599 1921367 1922391 1923927 1924951 1925335 1926359 1927895 1928919 1929687 1930711 1932247 1933271 1933367 1934391 1935927 1936951 1937719 1938743 1940279 1941303 1941687 1942711 1944247 1945271 1946039 1947063 1948599 1949623 1949815 1950839 1952375 1953399 1954167 1955191 1956727 1957751 1958135 1959159 1960695 1961719 1962487 1963511 1965047 1966071 1966095 1967119 1968655 1969679 1970447 1971471 1973007 1974031 1974415 1975439 1976975 1977999 1978767 1979791 1981327 1982351 1982543 1983567 1985103 1986127 1986895 1987919 1989455 1990479 1990863 1991887 1993423 1994447 1995215 1996239 1997775 1998799 1998895 1999919'''
arr=[int(x) for x in s.split()]
a, b, M=map(int, input().split())
if a==0: dem=1
else: dem=0
if M==2:
i=0
for i in arr:
if i>b: break
if i<=b and i>=a:dem+=1
elif M==3:
x=[1, 6643, 1422773]
i=0
for i in x:
if i>b: break
if i<=b and i>=a:dem+=1
else:
x=[1]
i=0
for i in x:
if i>b: break
if i<=b and i>=a:dem+=1
print(dem)
|
[
"nhuanbc.b19cn488@gmail.com"
] |
nhuanbc.b19cn488@gmail.com
|
010a6576a6d25c570dd63ee9354806d80522854d
|
a016c7a98f42adaff0c6df9eabccfc16ced31766
|
/Automatical report output weekly/X604.py
|
ab7354167fbffd934b434ed4ecb7e76d515ca6d6
|
[] |
no_license
|
zhoumengxiong/AutoOutputReport
|
b22bb094a28f498e20f9a07f8fe691b2029894bc
|
94159de7e6b80bcd724f63068446b7181524e789
|
refs/heads/master
| 2021-09-22T17:02:32.347520
| 2018-09-12T10:41:08
| 2018-09-12T10:41:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,157
|
py
|
# -*- coding: utf-8 -*-
import pymysql
from openpyxl import load_workbook
import datetime
from openpyxl.styles import Border, Side, Font, Alignment
def style_range(ws, cell_range, border, fill, font, alignment):
"""
Apply styles to a range of cells as if they were a single cell.
:param alignment: An openpyxl alignment object
:param ws: Excel worksheet instance
:param cell_range: An excel range to style (e.g. A1:F20)
:param border: An openpyxl Border
:param fill: An openpyxl PatternFill or GradientFill
:param font: An openpyxl Font object
"""
top = Border(top=border.top)
left = Border(left=border.left)
right = Border(right=border.right)
bottom = Border(bottom=border.bottom)
first_cell = ws[cell_range.split(":")[0]]
if alignment:
ws.merge_cells(cell_range)
first_cell.alignment = alignment
rows = ws[cell_range]
if font:
first_cell.font = font
for cell in rows[0]:
cell.border = cell.border + top
for cell in rows[-1]:
cell.border = cell.border + bottom
for row in rows:
l = row[0]
r = row[-1]
l.border = l.border + left
r.border = r.border + right
if fill:
for c in row:
c.fill = fill
def getNowYearWeek():
# 当前时间年第几周的计算
time_now = datetime.datetime.now()
NowYearWeek = time_now.isocalendar()
return NowYearWeek
def write_sale_act_fail(key1, value1):
args = (key1, last_wk, model)
args1 = (value1, last_wk1, model)
# 查询当周坏机透视表
cursor.execute(sql1, args)
# 读取所有
failure = cursor.fetchall() # 形成一个二维tuple
ws = wb[value1]
# 坏机失效一次因匹配
for i in range(len(failure)):
for r in range(40, 66):
if ws.cell(row=r, column=1).value == failure[i][0]:
ws.cell(row=r, column=col).value = failure[i][1]
# 执行SQL查询语句
cursor.execute(sql2, args)
act = cursor.fetchall() # 形成一个二维tuple
# 写入当周激活量
ws.cell(row=37, column=col).value = act[0][0]
cursor.execute(sql3, args1)
sale = cursor.fetchall() # 形成一个二维tuple
# 写入前一周销量数据
ws.cell(row=23, column=col - 1).value = sale[0][0]
# 清空P41:Q65范围值
for row in ws.iter_rows(min_row=41, min_col=28, max_col=29, max_row=65):
for cell in row:
cell.value = ''
# 写入当周的一次因透视表
sum = 41
for i in range(len(failure)):
ws.cell(row=sum, column=28).value = failure[i][0]
ws.cell(row=sum, column=29).value = failure[i][1]
sum = sum + 1
def write_repair_rate(value1):
ws = wb[value1]
# 计算并写入返修率
sale_sum = 0
fail_sum = 0
act_sum = 0
PCBA = 0
LCD = 0
TP = 0
BAT = 0
for m in range(4, col + 1):
# 计算total接机
for r in range(40, 66):
if isinstance(ws.cell(row=r, column=m).value, int):
fail_sum = ws.cell(row=r, column=m).value + fail_sum
if isinstance(ws.cell(row=23, column=m).value, int): # 计算total销量
sale_sum = ws.cell(row=23, column=m).value + sale_sum
if isinstance(ws.cell(row=37, column=m).value, int): # 计算total激活量
act_sum = ws.cell(row=37, column=m).value + act_sum
if isinstance(ws.cell(row=41, column=m).value, int): # 计算total PCBA坏机数
PCBA = ws.cell(row=41, column=m).value + PCBA
if isinstance(ws.cell(row=46, column=m).value, int): # 计算total LCD坏机数
LCD = ws.cell(row=46, column=m).value + LCD
if isinstance(ws.cell(row=43, column=m).value, int): # 计算total TP坏机数
TP = ws.cell(row=43, column=m).value + TP
if isinstance(ws.cell(row=45, column=m).value, int): # 计算total BAT坏机数
BAT = ws.cell(row=45, column=m).value + BAT
# 计算返修率
if sale_sum > 0:
ws.cell(row=22, column=m).value = ws.cell(row=66, column=m).value
if act_sum == 0:
ws.cell(row=21, column=m).value = fail_sum / sale_sum
else:
ws.cell(row=36, column=m).value = ws.cell(row=66, column=m).value
ws.cell(row=21, column=m).value = fail_sum / sale_sum
ws.cell(row=31, column=m).value = fail_sum / act_sum
ws.cell(row=32, column=m).value = PCBA / act_sum
ws.cell(row=33, column=m).value = LCD / act_sum
ws.cell(row=34, column=m).value = TP / act_sum
ws.cell(row=35, column=m).value = BAT / act_sum
# 合并单元格样式设置
cells = ['A20:A23', 'A25:A37', 'B20:C20', 'B21:C21', 'B22:C22', 'B23:C23', 'B25:C25', 'B36:B37',
'AB39:AD39', 'B26:B30', 'B31:B35']
thin = Side(border_style="thin", color="000000")
border = Border(top=thin, left=thin, right=thin, bottom=thin)
font = Font(b=False, color="000000", name="Segoe UI Semibold", size='9')
al = Alignment(horizontal="center", vertical="center")
for area in cells:
style_range(ws, area, border=border, fill=None, font=font, alignment=al)
# 计算激活量和销量查询周数
last_wk = str(18) + str(getNowYearWeek()[1] - 1)
last_wk1 = str(18) + str(getNowYearWeek()[1] - 2)
# 计算当前周所在的列号
# col=getNowYearWeek()[1]-1-16
model = "X604"
countries = {'Egypt': '埃及', 'Uganda': '乌干达', 'Cote dIvoire': '科特迪瓦', 'Kenya': '肯尼亚', 'Morocco': '摩洛哥',
'Pakistan': '巴基斯坦', 'Nigeria': '尼日利亚', 'Ghana': '加纳', 'Tanzania': '坦桑尼亚', 'Zambia': '赞比亚'}
config = {
'host': "192.168.1.105", # 本地的话就是这个
'user': "Dream", # 输入你的数据库账号
'password': "Dream123$", # 以及数据库密码
'db': "raw_data", # 数据库名(database名)
'charset': 'utf8mb4' # 读取中文不想乱码的话,记得设置这个
}
# 打开数据库连接
db = pymysql.connect(**config) # 使用关键字参数特性,这样好看一些
# 使用 cursor() 方法创建一个游标对象 cursor
cursor = db.cursor()
# 创建SQL查询语句
sql1 = """select level1cause,count(*) as QTY from grandtotal where CountryName=%s and week=%s
and model=%s group by Level1Cause order by QTY DESC"""
sql2 = """select sum(qty) from activation_qty where country=%s and week=%s and model=%s"""
sql3 = """select sum(shipqty) from sale where SaleCoutry=%s and week=%s and model2=%s"""
path = r"F:\Transsion\Repair rate report\repair_rate_report\transition\week23 X604 repair rate(warranty)1.xlsx"
wb = load_workbook(filename=path)
# 计算当前周所在的列号
col = getNowYearWeek()[1] - 1 - (int(wb.active.cell(row=20, column=4).value[2:]) - 4)
# 向Workbook写入上周坏机匹配,激活量及上上周的销量
for key, value in countries.items():
write_sale_act_fail(key, value)
wb.save(filename=path)
wb = load_workbook(filename=path)
for value in countries.values():
write_repair_rate(value)
wb.save(filename=path)
|
[
"zhoumengxiong@outlook.com"
] |
zhoumengxiong@outlook.com
|
a54a1d9b4da5c754793e3a51b8280cbe710d82f8
|
af071b50e49cf1c89a7cdba43f1a92161600adab
|
/pingpong/admin.py
|
b4faacbe9445a9fe3e5c9aee56de3dd9c6d02c1e
|
[] |
no_license
|
astaric/pingpong
|
6674add96737e9589bd4e6cac4785d6e6f84c236
|
bd4909a0e9527caece38cd5b7870f3893797f45c
|
refs/heads/master
| 2021-01-21T19:35:55.798938
| 2018-11-16T14:59:58
| 2018-11-16T14:59:58
| 6,784,560
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,456
|
py
|
from django.contrib import admin
from .models import Player, Category, GroupMember, Group
class PlayerAdmin(admin.ModelAdmin):
list_display = ['full_name', 'category']
ordering = ['id']
list_filter = ['category']
class CategoryAdmin(admin.ModelAdmin):
ordering = ('gender', 'min_age')
prepopulated_fields = {"name": ("gender", "min_age", "max_age")}
class GroupMemberAdmin(admin.ModelAdmin):
list_display = ('player', 'group', 'place', 'leader')
list_editable = ['place']
list_filter = ['group']
ordering = ('group', 'place', '-leader', 'player__surname')
def queryset(self, request):
return self.model.objects.exclude(group=None)
def __init__(self, *args, **kwargs):
super(GroupMemberAdmin, self).__init__(*args, **kwargs)
self.list_display_links = (None, )
class GroupMemberInline(admin.TabularInline):
model = GroupMember
fields = ('player', 'leader', 'place')
#readonly_fields = ('player', 'leader')
extra = 0
def queryset(self, request):
return self.model.objects.order_by('group', 'place', '-leader', 'player__surname')
class GroupAdmin(admin.ModelAdmin):
ordering = ('category', 'id')
inlines = (
GroupMemberInline,
)
class Media:
css = {'all': ('css/hide_admin_original.css',)}
admin.site.register(Player, PlayerAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Group, GroupAdmin)
|
[
"anze.staric@gmail.com"
] |
anze.staric@gmail.com
|
b660bfe2f136e27d1264ed5c5751e3353662e6eb
|
4513c7ab29ee4379ee22ba3c8d244ae6eb8207dc
|
/tastmate/settings.py
|
5afc4f22b5b1568d76f44ec39f2dc0d1f3253eca
|
[] |
no_license
|
duyanh249/taskmate
|
70e83a195af730cd15163ed4b4aaf2f7c67126a9
|
529d91a3368c2523c621705544cc366a2c63713b
|
refs/heads/master
| 2023-07-14T22:27:20.493137
| 2021-08-10T14:24:33
| 2021-08-10T14:24:33
| 393,057,788
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,686
|
py
|
"""
Django settings for tastmate project.
Generated by 'django-admin startproject' using Django 3.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
import environ
import django_heroku
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
env=environ.Env(SECRET_KEY=str)
environ.Env.read_env(os.path.join(BASE_DIR, '.env'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env('DJANGO_SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env('DJANGO_DEBUG')
ALLOWED_HOSTS = [env('DJANGO_ALLOWED_HOSTS')]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'todolist_app',
'users_app',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tastmate.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [(os.path.join(BASE_DIR, 'templates')),],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tastmate.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_ROOT=os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS=[os.path.join(BASE_DIR, 'static')]
CRISPY_TEMPLATE_PACK='bootstrap4'
LOGIN_REDIRECT_URL='todolist'
LOGIN_URL='login'
django_heroku.settings(locals())
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"duyanhbmehust@gmail.com"
] |
duyanhbmehust@gmail.com
|
a48974d41c1667c0b092f366d4efcc8a8d480fcd
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_248/ch119_2020_03_30_20_53_23_088219.py
|
f49b4da0a032fcc39a7720976214d6d9206f89d1
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 165
|
py
|
lista[0]*n
lista[0]=1
t=0
while t<n:
lista[t+1]=lista[t]*x/n
t+=1
def calcula_euler(lista,n):
soma_das_notas = sum(lista)
print(soma_das_notas)
|
[
"you@example.com"
] |
you@example.com
|
fc17c5d9f4350ec9d4472375aea8d04b216e0ed2
|
4eee308593cb45abdfedecb3c80438584504cfed
|
/trainerbid/trainer/views.py
|
5b3fc9ef17ac3b580db1810124e186237d388ea7
|
[] |
no_license
|
sikha-jayanth/Trainer-Bidding
|
46ffb94f1af1a83f322e2b7cf1ff167e6c7150ee
|
fe43e6e9781d0da51a2805b7fbfb7b1dbb9b1af5
|
refs/heads/main
| 2023-01-21T01:13:38.866317
| 2020-11-30T22:16:30
| 2020-11-30T22:16:30
| 317,160,150
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,793
|
py
|
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import User
from django.shortcuts import render, redirect
from trainer.forms import RegistrationForm, PersonProfileForm, ApplicationForm, FilterApplicationForm
from django.contrib import messages
from institute.models import Requirements
from trainer.models import Application
from django.contrib.auth.decorators import login_required
from django.forms import forms
# Create your views here.
from trainer.models import PersonProfile
def trainerRegistration(request):
form = RegistrationForm()
context = {}
context["form"] = form
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
return redirect("login")
else:
context["form"] = form
return render(request, "trainer/registration.html", context)
return render(request, "trainer/registration.html", context)
def trainerLogin(request):
if request.method == 'POST':
uname = request.POST.get('uname')
pwd = request.POST.get('pwd')
user = authenticate(request, username=uname, password=pwd)
if user is not None:
login(request, user)
return redirect("trainerhome")
else:
messages.info(request, 'invalid credentials!')
return render(request, "trainer/login.html")
return render(request, "trainer/login.html")
@login_required(login_url='login')
def trainerHome(request):
return render(request, 'trainer/trainerhome.html')
@login_required(login_url='login')
def trainerLogout(request):
logout(request)
return redirect("login")
@login_required(login_url='login')
def trainerProfile(request):
context = {}
user = User.objects.get(username=request.user)
fname = user.first_name
lname = user.last_name
fullname = fname + " " + lname
email = user.email
form = PersonProfileForm(initial={'user': request.user, 'name': fullname, 'email': email})
context["form"] = form
if request.method == 'POST':
form = PersonProfileForm(request.POST)
if form.is_valid():
form.save()
return redirect("viewprofile")
else:
context["form"] = form
return render(request, "trainer/createprofile.html", context)
return render(request, "trainer/createprofile.html", context)
@login_required(login_url='login')
def viewProfile(request):
profile = PersonProfile.objects.get(user=request.user)
context = {}
context["profile"] = profile
return render(request, "trainer/viewprofile.html", context)
@login_required(login_url='login')
def updateProfile(request):
profile = PersonProfile.objects.get(user=request.user)
form = PersonProfileForm(instance=profile)
context = {}
context["form"] = form
if request.method == 'POST':
form = PersonProfileForm(instance=profile, data=request.POST)
if form.is_valid():
form.save()
return redirect("viewprofile")
else:
context["form"] = form
return render(request, "trainer/updateprofile.html", context)
return render(request, "trainer/updateprofile.html", context)
@login_required(login_url='login')
def matchingJobs(request):
context = {}
profile = PersonProfile.objects.get(user=request.user)
skill = profile.skill
requirements = Requirements.objects.filter(skill_needed=skill)
context["requirements"] = requirements
return render(request, "trainer/listjobs.html", context)
@login_required(login_url='login')
def applyJob(request, pk):
context = {}
profile = PersonProfile.objects.get(user=request.user)
job = Requirements.objects.get(id=pk)
form = ApplicationForm(
initial={'jobid': job.jobid, 'job_title': job.job_title, 'location': job.location, 'user': request.user,
'name': profile.name,
'skill': profile.skill, 'years_of_experience': profile.years_of_experience,
'qualification': profile.qualification, 'cgpa': profile.cgpa, 'email': profile.email,
'phone': profile.phone})
context["form"] = form
if request.method == 'POST':
form = ApplicationForm(request.POST)
if form.is_valid():
form.save()
return render(request, "trainer/msgapplied.html")
else:
context["form"] = form
return render(request, "trainer/applyjob.html", context)
return render(request, "trainer/applyjob.html", context)
@login_required(login_url='login')
def viewApplications(request):
context = {}
form = FilterApplicationForm()
context["form"] = form
queryset = Application.objects.filter(user=request.user)
count = queryset.count()
context["count"] = count
context["applications"] = queryset
return render(request, "trainer/viewapplications.html", context)
@login_required(login_url='login')
def filterApplications(request):
context = {}
form = FilterApplicationForm()
context["form"] = form
if request.method == 'POST':
form = FilterApplicationForm(request.POST)
if form.is_valid():
status = form.cleaned_data['status']
queryset = Application.objects.filter(status=status, user=request.user)
count = queryset.count()
context["applications"] = queryset
context["count"] = count
return render(request, "trainer/viewapplications.html", context)
else:
context["form"] = form
return render(request, "trainer/viewapplications.html", context)
return render(request, "trainer/viewapplications.html", context)
|
[
"sikha.c.jayanth@gmail.com"
] |
sikha.c.jayanth@gmail.com
|
9d93842fa5abd98b0fd609d5f9ab9f8f4e3ae9cc
|
1dde2ae19e8c50a6c49ac1929ef4f0d7948fc47b
|
/recursion and dynamic programming/39_combination_sum.py
|
87ddd881f9b13f2b5f26c8dbc83fe72f9cbab57a
|
[] |
no_license
|
ryanSoftwareEngineer/algorithms
|
694786e970e99120bdd94ca5722ad91aed846589
|
9dbff6f4a40ba99c7310966e63f66e14174bdb61
|
refs/heads/master
| 2023-04-21T10:09:30.683266
| 2021-05-13T02:55:30
| 2021-05-13T02:55:30
| 330,536,908
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,191
|
py
|
'''
Given an array of distinct integers candidates and a target integer target, return a list of all unique combinations of candidates where the chosen numbers sum to target. You may return the combinations in any order.
The same number may be chosen from candidates an unlimited number of times. Two combinations are unique if the frequency of at least one of the chosen numbers is different.
It is guaranteed that the number of unique combinations that sum up to target is less than 150 combinations for the given input.
'''
# recursive
class Solution(object):
def combinationSum(self, candidates, target):
return self.__sum_util(candidates, [], 0, target, [])
def __sum_util(self, nums, temp_arr, index, target, result):
if target <0:
return
if target == 0:
result.append(temp_arr[:])
return
for i in range(index, len(nums)):
temp_arr.append(nums[i])
self.__sum_util(nums, temp_arr, i, target-nums[i], result)
temp_arr.pop()
return result
cand = [2, 5, 7]
a = Solution()
b= a.combinationSum(cand, 12)
print(b)
'''
output: [[2, 2, 2, 2, 2, 2], [2, 5, 5], [5, 7]]
'''
|
[
"ryansoftwareengineer@gmail.com"
] |
ryansoftwareengineer@gmail.com
|
57b176a71b273a1c9636c541ba74fd7a62612b4b
|
ebd5c4632bb5f85c9e3311fd70f6f1bf92fae53f
|
/PORMain/panda/direct/extensions/NurbsCurveEvaluator-extensions.py
|
86eb757f5c5e803e4e77288108ddd26264177ebb
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
BrandonAlex/Pirates-Online-Retribution
|
7f881a64ec74e595aaf62e78a39375d2d51f4d2e
|
980b7448f798e255eecfb6bd2ebb67b299b27dd7
|
refs/heads/master
| 2020-04-02T14:22:28.626453
| 2018-10-24T15:33:17
| 2018-10-24T15:33:17
| 154,521,816
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 799
|
py
|
"""
NurbsCurveEvaluator-extensions module: contains methods to extend
functionality of the NurbsCurveEvaluator class
"""
def getKnots(self):
"""Returns the knot vector as a Python list of floats"""
knots = []
for i in xrange(self.getNumKnots()):
knots.append(self.getKnot(i))
return knots
def getVertices(self, relTo = None):
"""Returns the vertices as a Python list of Vec4's, relative
to the indicated space if given."""
verts = []
if relTo:
for i in xrange(self.getNumVertices()):
verts.append(self.getVertex(i, relTo))
else:
for i in xrange(self.getNumVertices()):
verts.append(self.getVertex(i))
return verts
|
[
"brandoncarden12345@gmail.com"
] |
brandoncarden12345@gmail.com
|
7fa0f27926cbf57b538b600e2206d0b2a2b8f3ea
|
e4de6ed06a591368ab61f1c2f196e9ea6a67db8c
|
/server/programmes/tests.py
|
a08fc5df86ea0ab11c8d2441bbc9dda20723dabe
|
[] |
no_license
|
TimMcKenzieNZ/PRP
|
f638e8c22d79ea57bca0e4600b550c7123070cd5
|
0543d92ab45586c45f8954368236b138ce67dd80
|
refs/heads/master
| 2020-04-28T02:16:33.384289
| 2019-03-10T23:20:05
| 2019-03-10T23:20:05
| 174,892,362
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,301
|
py
|
from django.test import TestCase, Client
from django.core.exceptions import ValidationError
import datetime
from contextlib import contextmanager
from .modelAttributeValidators import validate_date_not_in_past, validate_progress, validate_date_not_in_future, validate_duration_is_positive
from .models import Project, Programme, Deliverable, Update, Initiative, TeamMember
from .utils import generate_update_log
class ValidationErrorTestMixin(object):
@contextmanager
def assertValidationErrors(self, fields):
"""
Asserts a Validation Error is raised containing all (and only) the specified fields
"""
try:
yield
raise AssertionError("ValidationError not raised") # We are expecting a Validation Error!
except ValidationError as e:
self.assertEqual(set(fields), set(e.message_dict.keys())) # check all given fields are in error
class ProjectValidationTests(ValidationErrorTestMixin, TestCase):
# This test raises Integrity errors if you try to save the project
def test_project_essential_fields_are_not_none(self):
"""
Tests if validation errors are thrown for undefined fields
"""
p = Project(name="Witch-blair", description="Handcam shinanigans", cost=10)
with self.assertValidationErrors(['end_date', 'progress', 'image', 'priority', 'start_date']):
p.full_clean()
class ValidatorTests(TestCase):
def test_validate_process_edge_case_1(self):
"""
Should not error for values >= 0 and <= 1
"""
error_thrown = False
try:
validate_progress(-0.0)
except ValidationError:
error_thrown = True
self.assertFalse(error_thrown)
def test_validate_process_edge_case_2(self):
"""
Should not error for values >= 0 and <= 1
"""
with self.assertRaises(ValidationError) as e:
validate_progress(1.00000000000001)
self.assertEqual(e.exception.message, 'Progress 1.00000000000001 must be between 0 and 1 (inclusive)')
def test_validate_process_edge_case_3(self):
"""
Should not error for values >= 0 and <= 1
"""
with self.assertRaises(ValidationError) as e:
validate_progress(-0.00000000000000000000000001)
self.assertEqual(e.exception.message, 'Progress -1e-26 must be between 0 and 1 (inclusive)')
def test_validate_date_not_in_past(self):
"""
Checking edge case when dates are the same
"""
date = datetime.datetime.today().date()
error_thrown = False
try:
validate_date_not_in_past(date)
except ValidationError:
error_thrown = True
self.assertFalse(error_thrown)
def test_validate_date_not_in_future(self):
"""
Checking edge case when dates are the same
"""
date = datetime.datetime.today().date()
error_thrown = False
try:
validate_date_not_in_future(date)
except ValidationError:
error_thrown = True
self.assertFalse(error_thrown)
def test_validate_duration_is_positive(self):
"""
Checking edge case when dates are the same
"""
date = datetime.datetime.today().date()
p = Programme(name="TV", description="Television", vision="the future", image="nope", start_date=date, end_date=date, slug="TV")
error_thrown = False
try:
validate_duration_is_positive(Programme, p)
except ValidationError:
error_thrown = True
self.assertFalse(error_thrown)
def test_generate_update_log_blue_sky(self):
"""
Checking if an update object is created as expected when an existing deliverable is updated
"""
date = datetime.datetime.today().date()
t = TeamMember(email="this@this.com", position='tester obviously', image ="nope", contact_number=1)
t.save()
i = Initiative(name='Init', description="desc", start_date=date,
end_date=date, progress=0, status='Not Started', adapter_id=1, adapter_ref=1, order=1)
i.save()
initiative = Initiative.objects.get(pk=1)
d = Deliverable(name="del", description="desc", end_date=date, progress=0, team_impact='none', pm_impact='none',
sponsor_impact='none', order=1, status='Not Started', status_message='', initiative=initiative)
d.author=TeamMember.objects.get(pk=1)
d.log="testing"
d.save()
# An update is created at deliverable creation
update_num = Update.objects.all().count()
self.assertEqual(update_num, 1)
# Everytime the deliverable is saved, an update is created
generate_update_log(Deliverable, d)
update_num = Update.objects.all().count()
self.assertEqual(update_num, 2)
class APITests(TestCase):
"""Tests the server API"""
def setUp(self):
self.c = Client()
def test_search_programmes(self):
response = self.c.get('/api/programmes')
self.assertEquals(response.status_code, 200)
def test_post_programme_should_fail(self):
data = {
"data": {
"type": "programme",
"attributes": {
"name": "name",
"description": "desc",
"vision": "vision",
"image": "nope",
"start_date": "2018-04-04",
"end_date": "2018-04-04",
"slug": "slug",
"members": []
}
}
}
response = self.c.post('/api/programmes', data)
self.assertEquals(response.status_code, 405) # 405: Method not supported by resource
# def test_deliverable_update(self):
# data = {
# "data": {
# "type": "deliverable",
# "attributes": {
# "sponsor_impact": "moderate",
# "team_impact": "moderate",
# "pm_impact": "moderate"
# }
# }
# }
# response = self.c.put('/api/deliverables/1', data)
# self.assertEquals(response.status_code, 201)
# def test_post_update(self):
# data = {
# "data": {
# "type": "updates",
# "attributes": {
# "description": "Testing.",
# "date": "2018-04-04",
# "log": "Sponser impact changed from 'None' to 'High'. PM impact changed from 'None' to 'Moderate'.",
# "author": "http://localhost:8000/api/team_members/1",
# "deliverable": "http://localhost:8000/api/deliverables/14"
# }
# }
# }
# response = self.c.post('/api/updates', data)
# print(response)
# self.assertEquals(response.status_code, 200)
|
[
"tam94@uclive.ac.nz"
] |
tam94@uclive.ac.nz
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.