blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
49f0d8598d3691d682ad5d073f674ee884af6210
|
7d80898ba7fb3f1336e5344e251a62df46a5f3bb
|
/easy_thumbnails/tests/mockoptim.py
|
7e164e30c5eff1d58ab00daf219d88a118af94c8
|
[] |
permissive
|
flyingelephantlab/easy-thumbnails
|
dffa9b87138799a7796d3ff1e5e34670eaf32ed2
|
5f59b9f66797f6c54c241e5910748562f59d0336
|
refs/heads/master
| 2021-05-20T17:01:59.951758
| 2014-07-09T13:00:57
| 2014-07-09T13:00:57
| 17,428,489
| 0
| 0
|
BSD-3-Clause
| 2020-09-21T10:04:40
| 2014-03-05T04:59:57
|
Python
|
UTF-8
|
Python
| false
| false
| 475
|
py
|
#!/usr/bin/env python
import sys
# This file does nothing except to open the file as specified on the command line, reading it
# into a buffer and writing the same content back to the file.
# This script is used to simulate the optimization of an image file without actually doing it.
if len(sys.argv) < 2:
raise Exception('Missing filename')
with open(sys.argv[1], 'rb') as reader:
buf = reader.read()
with open(sys.argv[1], 'wb') as writer:
writer.write(buf)
|
[
"jacob.rief@gmail.com"
] |
jacob.rief@gmail.com
|
d79f5662a154f40d78134256625e3c848768ae52
|
32da5ff379796b0a97ee7885a5186277b434dd66
|
/app/repositories/product.py
|
4588675eacae10aa336defc107ef141f99079d0f
|
[
"MIT"
] |
permissive
|
VadymHutei/ukubuka-back
|
0f11fa061b271530ca357e7cf6b84fa374af97b5
|
acd56c545b50fb65ed764c19bdd03a42be969ce4
|
refs/heads/master
| 2021-06-24T19:00:57.212722
| 2020-03-04T07:20:08
| 2020-03-04T07:20:08
| 219,042,002
| 0
| 0
|
MIT
| 2021-03-20T02:07:44
| 2019-11-01T18:34:38
|
Python
|
UTF-8
|
Python
| false
| false
| 1,184
|
py
|
from core import Repository
from core.sql_query_builder import SelectQuery
class ProductRepo(Repository):
def getProducts(self, params):
query = SelectQuery()
query.fields(
'p.id',
'p.category_id',
'p.model',
'p.added',
'p.is_active',
'pt.name',
'pt.description',
)
query.table(('products', 'p'))
if 'is_active' in params:
query.where(('p.is_active', '=', params['is_active']))
if 'category_id' in params:
query.whereIn('p.category_id', params['category_id'])
if 'language' in params:
query.leftJoin(
('products_text', 'pt'),
('pt.product_id', '=', 'p.id')
)
query.where(('pt.language', '=', params['language']))
query_string = query.render()
connection = self._getConnection()
try:
with connection.cursor() as cursor:
cursor.execute(query_string)
products_result = cursor.fetchall()
return products_result
finally:
connection.close()
|
[
"hutei@live.com"
] |
hutei@live.com
|
7f17a2d70426f2080efb8edbe9d33f06a4593db6
|
ce76b3ef70b885d7c354b6ddb8447d111548e0f1
|
/take_point_from_few_problem/eye/bad_child.py
|
33037f5511eb235421a6bebd11f83729b53ba0da
|
[] |
no_license
|
JingkaiTang/github-play
|
9bdca4115eee94a7b5e4ae9d3d6052514729ff21
|
51b550425a91a97480714fe9bc63cb5112f6f729
|
refs/heads/master
| 2021-01-20T20:18:21.249162
| 2016-08-19T07:20:12
| 2016-08-19T07:20:12
| 60,834,519
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
py
|
#! /usr/bin/env python
def find_good_man_on_important_life(str_arg):
important_work(str_arg)
print('year')
def important_work(str_arg):
print(str_arg)
if __name__ == '__main__':
find_good_man_on_important_life('same_time')
|
[
"jingkaitang@gmail.com"
] |
jingkaitang@gmail.com
|
4193669d59d702301c8a680af941436e487d83a2
|
d668209e9951d249020765c011a836f193004c01
|
/tools/pnnx/tests/test_F_silu.py
|
21a124a8e92bd41fafa2a6f67e6d6904f4780e51
|
[
"BSD-3-Clause",
"Zlib",
"BSD-2-Clause"
] |
permissive
|
Tencent/ncnn
|
d8371746c00439304c279041647362a723330a79
|
14b000d2b739bd0f169a9ccfeb042da06fa0a84a
|
refs/heads/master
| 2023-08-31T14:04:36.635201
| 2023-08-31T04:19:23
| 2023-08-31T04:19:23
| 95,879,426
| 18,818
| 4,491
|
NOASSERTION
| 2023-09-14T15:44:56
| 2017-06-30T10:55:37
|
C++
|
UTF-8
|
Python
| false
| false
| 1,826
|
py
|
# Tencent is pleased to support the open source community by making ncnn available.
#
# Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
import torch
import torch.nn as nn
import torch.nn.functional as F
def silu_forward_0(x):
return x * torch.sigmoid(x)
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
def forward(self, x, y, z, w):
x = F.silu(x)
y = F.silu(y)
z = F.silu(z)
w = silu_forward_0(w)
return x, y, z, w
def test():
net = Model()
net.eval()
torch.manual_seed(0)
x = torch.rand(1, 16)
y = torch.rand(12, 2, 16)
z = torch.rand(1, 3, 12, 16)
w = torch.rand(1, 5, 7, 9, 11)
a = net(x, y, z, w)
# export torchscript
mod = torch.jit.trace(net, (x, y, z, w))
mod.save("test_F_silu.pt")
# torchscript to pnnx
import os
os.system("../src/pnnx test_F_silu.pt inputshape=[1,16],[12,2,16],[1,3,12,16],[1,5,7,9,11]")
# pnnx inference
import test_F_silu_pnnx
b = test_F_silu_pnnx.test_inference()
for a0, b0 in zip(a, b):
if not torch.allclose(a0, b0, 1e-4, 1e-4):
return False
return True
if __name__ == "__main__":
if test():
exit(0)
else:
exit(1)
|
[
"noreply@github.com"
] |
Tencent.noreply@github.com
|
51b14d817462d85e0370586302393e45bb08c73c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03167/s988207480.py
|
4a82be30d78cd2283ea05f9de6e152cb9db37709
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 514
|
py
|
import queue
INF=10**8
R=10**9+7
h,w=map(int,input().split())
maze=[input() for _ in range(h)]
d=[[0 for i in range(h)] for _ in range(w)]
sx=0
sy=0
gx=w-1
gy=h-1
q=queue.Queue()
dx=[1,0]
dy=[0,1]
q.put((sx,sy))
d[sy][sx]=1
while not q.empty():
x,y=q.get()
if (x,y)==(gx,gy):break
for i in range(2):
nx=x+dx[i]
ny=y+dy[i]
if nx<w and ny<h:
if maze[ny][nx]=='.':
if d[nx][ny]==0:
q.put((nx,ny))
d[nx][ny]=d[x][y]
else:d[nx][ny]+=d[x][y]
print(d[gx][gy]%R)
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
b84d858f4aa17600b5469867581765b7f8caccc6
|
a4bc88fe6c64f1a50822c034b015f0e0cb20e475
|
/bin/pip2.7
|
ccf21c7810645f04550b8bd35f744a8377e78e73
|
[] |
no_license
|
ptascio/DORIS
|
aa7e23478fbb870dc14da2eb1a12232d9a1092ad
|
77fc73a43328dbdcd609ba9a217c2b79cc9025ce
|
refs/heads/master
| 2021-01-23T01:29:38.262934
| 2017-06-10T23:59:09
| 2017-06-10T23:59:09
| 92,875,571
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 233
|
7
|
#!/home/ubuntu/workspace/flask_todo/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"Administrator@Administrators-MacBook-Pro-3.local"
] |
Administrator@Administrators-MacBook-Pro-3.local
|
92420392817d87ef383e175120625ac406cbd18b
|
d6971cc1772d972b4f52c1130ac0d6b3a62040f0
|
/test.py
|
bd1447e244264a73722958f4ac0f21e6164180b1
|
[] |
no_license
|
robertcv/PyRecommender
|
08f7bc3ae86da7ccd9f1c2526d0882eb2bea9aa0
|
615f33ed9cc1c6c5195d7027e789e7bd543bc857
|
refs/heads/master
| 2021-08-23T20:54:33.949934
| 2017-12-06T13:59:53
| 2017-12-06T13:59:53
| 76,566,346
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,114
|
py
|
from Recommender import Recommender
r = Recommender()
r.parseMovieDB("db/movies.dat", "db/user_ratedmovies-timestamps.dat")
# print('____________________________________________________________')
# r.averagePrediction(100)
# print('____________________________________________________________')
# r.averagePrediction(100, user='1')
# print('____________________________________________________________')
# r.ItemBasedPredictionFit(10, 0.3)
# print('____________________________________________________________')
# r.ItemBasedRecommendUser('75', n=10)
# print('____________________________________________________________')
# r.ItemBasedBest(n=5)
# print('____________________________________________________________')
# r.UserBasedPredictionFit(10, 0.3)
# print('____________________________________________________________')
# r.UserBasedBest(n=5)
# print('____________________________________________________________')
# r.UserBasedRecommendUser('6393', n=10) #6393, 54767
# print('____________________________________________________________')
# r.SlopeOnePredictionFit()
# print('____________________________________________________________')
# r.SlopeOneRecommendUser('6393', n=10)
# print('____________________________________________________________')
# r.Evaluet(10, 0.3)
# print('____________________________________________________________')
# r.HybridPredictionFit(10, 0.3)
# print('____________________________________________________________')
# r.HybridRecommendUser('6393', n=10)
# print('____________________________________________________________')
r.MatrixFactorizationFit()
# print('____________________________________________________________')
# r.MatrixFactorizationRecommendUser('6393')
# print('____________________________________________________________')
r.MatrikFactorizationGraph()
# print('____________________________________________________________')
# r.parseTageDB("db/movie_tags.dat", "db/tags.dat")
# print('____________________________________________________________')
# r.NaiveBayes('6393', 2, n=20)
# print('____________________________________________________________')
|
[
"robert.cvitkovic@gmail.com"
] |
robert.cvitkovic@gmail.com
|
e827dd67a53f765be086d6b0c2fc985cb7c26b89
|
09f54a4efddeadaad050c7834b7d0049ccd28442
|
/LSTM_trainer.py
|
07add9ef03b415b59a309e37a5e30f2bce96c575
|
[] |
no_license
|
pimklaassen/GeoLSTM
|
7b798a33a3576023bd743d8210c700bb6af28f80
|
bf0872c9f4e5cdfbbf2817971f2ae9c87a21089c
|
refs/heads/master
| 2020-06-11T08:03:44.384341
| 2019-10-22T12:43:10
| 2019-10-22T12:43:10
| 193,899,840
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,843
|
py
|
import matplotlib.pyplot as plt
import numpy as np
import sys
import os
import seaborn as sns
from keras.models import Model
from keras.layers import LSTM, Input, RepeatVector, Masking, Dropout, Dense
from keras.backend.tensorflow_backend import set_session
from keras.utils import to_categorical
from tensorflow import Session, ConfigProto
from random import shuffle
from sklearn.metrics import confusion_matrix
from keras import optimizers
set_session(Session(config=ConfigProto(device_count={'GPU': 0})))
labels = []
with open('../Labeled/labels.txt', 'r') as fh:
for line in fh.readlines():
label = line.split(': ')[1].strip()
if label == 'NULL':
labels.append('NULL')
else:
label = int(label) - 1
labels.append(label)
samples = []
for i in range(len(labels) - 1):
sample = np.load('../Labeled/features_{}.npy'.format(i))
if labels[i] == 'NULL':
continue
if len(sample) == 0:
labels[i] = 'NULL'
continue
samples.append(sample)
labels = filter(lambda a: a != 'NULL', labels)
temp = list(zip(samples, labels))
shuffle(temp)
samples, labels = zip(*temp)
labels = np.array(labels)
labels = to_categorical(labels)
def pad_sequences(sequences):
# set dimensions
no_sequences = len(sequences)
timesteps = (max(len(_) for _ in sequences))
features = len(sequences[0][0])
# create template
template = np.zeros((no_sequences, timesteps, features))
# fill in template
for i, sequence in enumerate(sequences):
template[i, :len(sequence), :] = sequence
return template
samples = pad_sequences(samples)
no_samples, timesteps, features = samples.shape
validate_x = samples[:200]
validate_y = labels[:200]
train_x = samples[200:]
train_y = labels[200:]
inputs = Input(shape=(timesteps, features))
mask = Masking(mask_value=0.0)(inputs)
output = LSTM(3, activation='sigmoid')(mask)
LSTM_model = Model(inputs, output)
optimizer = optimizers.Adam()
LSTM_model.compile(optimizer=optimizer, loss='categorical_crossentropy', metrics=['accuracy'])
# fit model
history = LSTM_model.fit(train_x, train_y, epochs=100, batch_size=256, validation_data=(validate_x, validate_y), verbose=2)
prediction = LSTM_model.predict(validate_x)
prediction = np.argmax(prediction, axis=1)
validate_y = np.argmax(validate_y, axis=1)
# sns.heatmap(confusion_matrix(validate_y, prediction), annot=True, fmt='.5g')
# plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['acc'])
plt.plot(history.history['val_loss'])
plt.plot(history.history['val_acc'])
plt.title('model loss & accuracy')
plt.ylabel('loss & accuracy')
plt.xlabel('epoch')
plt.legend(['training_loss', 'training_loss_accuracy', 'validation_loss', 'validation_accuracy'], loc='upper left')
plt.show()
|
[
"Pim.o.klaassen@gmail.com"
] |
Pim.o.klaassen@gmail.com
|
fde33bfeecbf3dbbe09c75ba20f22c7de38830ef
|
5aeb13e3185fd7eaae50ae4da72766dffe37ab60
|
/beautiful_soup/img_scrape.py
|
54f7ca1a92dee425de1341bb96f495d9026410c2
|
[] |
no_license
|
RedHenDev/python
|
f9787210e10f80eae84c7153ffc50845a92468a3
|
a13c75c66f031e59246f5b87cd53a33ad51f9a2f
|
refs/heads/master
| 2023-05-25T09:41:26.155720
| 2023-05-17T21:38:17
| 2023-05-17T21:38:17
| 212,212,352
| 3
| 5
| null | 2022-11-24T14:42:56
| 2019-10-01T22:38:38
|
Python
|
UTF-8
|
Python
| false
| false
| 415
|
py
|
import requests
from bs4 import BeautifulSoup
import os
url = "https://ccc.tela.org.uk/"
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
img_tags = soup.find_all('img')
urls = [img['src'] for img in img_tags]
for i, url in enumerate(urls):
print('found another...')
response = requests.get(url)
with open(f'img{i}.jpg', 'wb') as f:
f.write(response.content)
|
[
"neobn@hotmail.com"
] |
neobn@hotmail.com
|
8757120c76e640b2957970e2df698b93891257da
|
ce4005ce4e91cc59733a52e908cba935eb11582e
|
/tests/Node.py
|
55e3c6aaa0069b2f95d3e7959c3f0ef156b61c1f
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
Arielce/dio
|
a4322e0e47d9aa2bea36ac29a290290fdac8d0d3
|
eb8035664f605783f86b41d34006aeb9ef861f13
|
refs/heads/master
| 2020-04-03T11:35:24.038809
| 2018-10-29T14:37:57
| 2018-10-29T14:37:57
| 155,226,338
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 55,731
|
py
|
import copy
import decimal
import subprocess
import time
import os
import re
import datetime
import json
from core_symbol import CORE_SYMBOL
from testUtils import Utils
from testUtils import Account
from testUtils import EnumType
from testUtils import addEnum
from testUtils import unhandledEnumType
class ReturnType(EnumType):
pass
addEnum(ReturnType, "raw")
addEnum(ReturnType, "json")
class BlockType(EnumType):
pass
addEnum(BlockType, "head")
addEnum(BlockType, "lib")
# pylint: disable=too-many-public-methods
class Node(object):
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-arguments
def __init__(self, host, port, pid=None, cmd=None, walletMgr=None, enableMongo=False, mongoHost="localhost", mongoPort=27017, mongoDb="dcctest"):
self.host=host
self.port=port
self.pid=pid
self.cmd=cmd
if Utils.Debug: Utils.Print("new Node host=%s, port=%s, pid=%s, cmd=%s" % (self.host, self.port, self.pid, self.cmd))
self.killed=False # marks node as killed
self.enableMongo=enableMongo
self.mongoHost=mongoHost
self.mongoPort=mongoPort
self.mongoDb=mongoDb
self.endpointHttp="http://%s:%d" % (self.host, self.port)
self.endpointArgs="--url %s" % (self.endpointHttp)
self.mongoEndpointArgs=""
self.infoValid=None
self.lastRetrievedHeadBlockNum=None
self.lastRetrievedLIB=None
self.transCache={}
self.walletMgr=walletMgr
self.missingTransaction=False
if self.enableMongo:
self.mongoEndpointArgs += "--host %s --port %d %s" % (mongoHost, mongoPort, mongoDb)
def dccClientArgs(self):
walletArgs=" " + self.walletMgr.getWalletEndpointArgs() if self.walletMgr is not None else ""
return self.endpointArgs + walletArgs + " " + Utils.MiscdccClientArgs
def __str__(self):
#return "Host: %s, Port:%d, Pid:%s, Cmd:\"%s\"" % (self.host, self.port, self.pid, self.cmd)
return "Host: %s, Port:%d" % (self.host, self.port)
@staticmethod
def validateTransaction(trans):
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
executed="executed"
def printTrans(trans, status):
Utils.Print("ERROR: Valid transaction should be \"%s\" but it was \"%s\"." % (executed, status))
Utils.Print("Transaction: %s" % (json.dumps(trans, indent=1)))
transStatus=Node.getTransStatus(trans)
assert transStatus == executed, printTrans(trans, transStatus)
@staticmethod
def __printTransStructureError(trans, context):
Utils.Print("ERROR: Failure in expected transaction structure. Missing trans%s." % (context))
Utils.Print("Transaction: %s" % (json.dumps(trans, indent=1)))
class Context:
def __init__(self, obj, desc):
self.obj=obj
self.sections=[obj]
self.keyContext=[]
self.desc=desc
def __json(self):
return "%s=\n%s" % (self.desc, json.dumps(self.obj, indent=1))
def __keyContext(self):
msg=""
for key in self.keyContext:
if msg=="":
msg="["
else:
msg+="]["
msg+=key
if msg!="":
msg+="]"
return msg
def __contextDesc(self):
return "%s%s" % (self.desc, self.__keyContext())
def add(self, newKey):
assert isinstance(newKey, str), print("ERROR: Trying to use %s as a key" % (newKey))
subSection=self.sections[-1]
assert isinstance(subSection, dict), print("ERROR: Calling \"add\" method when context is not a dictionary. %s in %s" % (self.__contextDesc(), self.__json()))
assert newKey in subSection, print("ERROR: %s%s does not contain key \"%s\". %s" % (self.__contextDesc(), key, self.__json()))
current=subSection[newKey]
self.sections.append(current)
self.keyContext.append(newKey)
return current
def index(self, i):
assert isinstance(i, int), print("ERROR: Trying to use \"%s\" as a list index" % (i))
cur=self.getCurrent()
assert isinstance(cur, list), print("ERROR: Calling \"index\" method when context is not a list. %s in %s" % (self.__contextDesc(), self.__json()))
listLen=len(cur)
assert i < listLen, print("ERROR: Index %s is beyond the size of the current list (%s). %s in %s" % (i, listLen, self.__contextDesc(), self.__json()))
return self.sections.append(cur[i])
def getCurrent(self):
return self.sections[-1]
@staticmethod
def getTransStatus(trans):
cntxt=Node.Context(trans, "trans")
cntxt.add("processed")
cntxt.add("receipt")
return cntxt.add("status")
@staticmethod
def getTransBlockNum(trans):
cntxt=Node.Context(trans, "trans")
cntxt.add("processed")
cntxt.add("action_traces")
cntxt.index(0)
return cntxt.add("block_num")
@staticmethod
def stdinAndCheckOutput(cmd, subcommand):
"""Passes input to stdin, executes cmd. Returns tuple with return code(int), stdout(byte stream) and stderr(byte stream)."""
assert(cmd)
assert(isinstance(cmd, list))
assert(subcommand)
assert(isinstance(subcommand, str))
outs=None
errs=None
ret=0
try:
popen=subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
outs,errs=popen.communicate(input=subcommand.encode("utf-8"))
ret=popen.wait()
except subprocess.CalledProcessError as ex:
msg=ex.output
return (ex.returncode, msg, None)
return (ret, outs, errs)
@staticmethod
def normalizeJsonObject(extJStr):
tmpStr=extJStr
tmpStr=re.sub(r'ObjectId\("(\w+)"\)', r'"ObjectId-\1"', tmpStr)
tmpStr=re.sub(r'ISODate\("([\w|\-|\:|\.]+)"\)', r'"ISODate-\1"', tmpStr)
tmpStr=re.sub(r'NumberLong\("(\w+)"\)', r'"NumberLong-\1"', tmpStr)
return tmpStr
@staticmethod
def runMongoCmdReturnJson(cmd, subcommand, trace=False, exitOnError=False):
"""Run mongodb subcommand and return response."""
assert(cmd)
assert(isinstance(cmd, list))
assert(subcommand)
assert(isinstance(subcommand, str))
retId,outs,errs=Node.stdinAndCheckOutput(cmd, subcommand)
if retId is not 0:
errorMsg="mongodb call failed. cmd=[ %s ] subcommand=\"%s\" - %s" % (", ".join(cmd), subcommand, errs)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
Utils.Print("ERROR: %s" % (errMsg))
return None
outStr=Node.byteArrToStr(outs)
if not outStr:
return None
extJStr=Utils.filterJsonObjectOrArray(outStr)
if not extJStr:
return None
jStr=Node.normalizeJsonObject(extJStr)
if not jStr:
return None
if trace: Utils.Print ("RAW > %s"% (outStr))
if trace: Utils.Print ("JSON> %s"% jStr)
try:
jsonData=json.loads(jStr)
except json.decoder.JSONDecodeError as _:
Utils.Print ("ERROR: JSONDecodeError")
Utils.Print ("Raw MongoDB response: > %s"% (outStr))
Utils.Print ("Normalized MongoDB response: > %s"% (jStr))
raise
return jsonData
@staticmethod
def getTransId(trans):
"""Retrieve transaction id from dictionary object."""
assert trans
assert isinstance(trans, dict), print("Input type is %s" % type(trans))
assert "transaction_id" in trans, print("trans does not contain key %s. trans={%s}" % ("transaction_id", json.dumps(trans, indent=2, sort_keys=True)))
transId=trans["transaction_id"]
return transId
@staticmethod
def isTrans(obj):
"""Identify if this is a transaction dictionary."""
if obj is None or not isinstance(obj, dict):
return False
return True if "transaction_id" in obj else False
@staticmethod
def byteArrToStr(arr):
return arr.decode("utf-8")
def validateAccounts(self, accounts):
assert(accounts)
assert(isinstance(accounts, list))
for account in accounts:
assert(account)
assert(isinstance(account, Account))
if Utils.Debug: Utils.Print("Validating account %s" % (account.name))
accountInfo=self.getdccAccount(account.name, exitOnError=True)
try:
if not self.enableMongo:
assert(accountInfo["account_name"] == account.name)
else:
assert(accountInfo["name"] == account.name)
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("account validation failed. account: %s" % (account.name))
raise
# pylint: disable=too-many-branches
def getBlock(self, blockNum, silentErrors=False, exitOnError=False):
"""Given a blockId will return block details."""
assert(isinstance(blockNum, int))
if not self.enableMongo:
cmdDesc="get block"
cmd="%s %d" % (cmdDesc, blockNum)
msg="(block number=%s)" % (blockNum);
return self.processCldccCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=msg)
else:
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.blocks.findOne( { "block_num": %d } )' % (blockNum)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
block=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if block is not None:
return block
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db node get block. %s" % (msg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
return None
def getBlockByIdMdb(self, blockId, silentErrors=False):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.blocks.findOne( { "block_id": "%s" } )' % (blockId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during db get block by id. %s" % (msg))
return None
return None
def isBlockPresent(self, blockNum, blockType=BlockType.head):
"""Does node have head_block_num/last_irreversible_block_num >= blockNum"""
assert isinstance(blockNum, int)
assert isinstance(blockType, BlockType)
assert (blockNum > 0)
info=self.getInfo(silentErrors=True, exitOnError=True)
node_block_num=0
try:
if blockType==BlockType.head:
node_block_num=int(info["head_block_num"])
elif blockType==BlockType.lib:
node_block_num=int(info["last_irreversible_block_num"])
else:
unhandledEnumType(blockType)
except (TypeError, KeyError) as _:
Utils.Print("Failure in get info parsing %s block. %s" % (blockType.type, info))
raise
present = True if blockNum <= node_block_num else False
if Utils.Debug and blockType==BlockType.lib:
decorator=""
if present:
decorator="is not "
Utils.Print("Block %d is %sfinalized." % (blockNum, decorator))
return present
def isBlockFinalized(self, blockNum):
"""Is blockNum finalized"""
return self.isBlockPresent(blockNum, blockType=BlockType.lib)
# pylint: disable=too-many-branches
def getTransaction(self, transId, silentErrors=False, exitOnError=False, delayedRetry=True):
assert(isinstance(transId, str))
exitOnErrorForDelayed=not delayedRetry and exitOnError
timeout=3
if not self.enableMongo:
cmdDesc="get transaction"
cmd="%s %s" % (cmdDesc, transId)
msg="(transaction id=%s)" % (transId);
for i in range(0,(int(60/timeout) - 1)):
trans=self.processCldccCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnErrorForDelayed, exitMsg=msg)
if trans is not None or not delayedRetry:
return trans
if Utils.Debug: Utils.Print("Could not find transaction with id %s, delay and retry" % (transId))
time.sleep(timeout)
self.missingTransaction=True
# either it is there or the transaction has timed out
return self.processCldccCmd(cmd, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError, exitMsg=msg)
else:
for i in range(0,(int(60/timeout) - 1)):
trans=self.getTransactionMdb(transId, silentErrors=silentErrors, exitOnError=exitOnErrorForDelayed)
if trans is not None or not delayedRetry:
return trans
if Utils.Debug: Utils.Print("Could not find transaction with id %s in mongodb, delay and retry" % (transId))
time.sleep(timeout)
return self.getTransactionMdb(transId, silentErrors=silentErrors, exitOnError=exitOnError)
def getTransactionMdb(self, transId, silentErrors=False, exitOnError=False):
"""Get transaction from MongoDB. Since DB only contains finalized blocks, transactions can take a while to appear in DB."""
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
#subcommand='db.Transactions.findOne( { $and : [ { "trx_id": "%s" }, {"irreversible":true} ] } )' % (transId)
subcommand='db.transactions.findOne( { "trx_id": "%s" } )' % (transId)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if trans is not None:
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db node get trans in mongodb with transaction id=%s. %s" % (transId,msg)
if exitOnError:
Utils.cmdError("" % (errorMsg))
Utils.errorExit("Failed to retrieve transaction in mongodb for transaction id=%s" % (transId))
elif not silentErrors:
Utils.Print("ERROR: %s" % (errorMsg))
return None
def isTransInBlock(self, transId, blockId):
"""Check if transId is within block identified by blockId"""
assert(transId)
assert(isinstance(transId, str))
assert(blockId)
assert(isinstance(blockId, int))
block=self.getBlock(blockId, exitOnError=True)
transactions=None
key=""
try:
if not self.enableMongo:
key="[transactions]"
transactions=block["transactions"]
else:
key="[blocks][transactions]"
transactions=block["block"]["transactions"]
except (AssertionError, TypeError, KeyError) as _:
Utils.Print("block%s not found. Block: %s" % (key,block))
raise
if transactions is not None:
for trans in transactions:
assert(trans)
try:
myTransId=trans["trx"]["id"]
if transId == myTransId:
return True
except (TypeError, KeyError) as _:
Utils.Print("transaction%s not found. Transaction: %s" % (key, trans))
return False
def getBlockIdByTransId(self, transId, delayedRetry=True):
"""Given a transaction Id (string), will return the actual block id (int) containing the transaction"""
assert(transId)
assert(isinstance(transId, str))
trans=self.getTransaction(transId, exitOnError=True, delayedRetry=delayedRetry)
refBlockNum=None
key=""
try:
if not self.enableMongo:
key="[trx][trx][ref_block_num]"
refBlockNum=trans["trx"]["trx"]["ref_block_num"]
else:
key="[ref_block_num]"
refBlockNum=trans["ref_block_num"]
refBlockNum=int(refBlockNum)+1
except (TypeError, ValueError, KeyError) as _:
Utils.Print("transaction%s not found. Transaction: %s" % (key, trans))
return None
headBlockNum=self.getHeadBlockNum()
assert(headBlockNum)
try:
headBlockNum=int(headBlockNum)
except(ValueError) as _:
Utils.Print("ERROR: Block info parsing failed. %s" % (headBlockNum))
raise
if Utils.Debug: Utils.Print("Reference block num %d, Head block num: %d" % (refBlockNum, headBlockNum))
for blockNum in range(refBlockNum, headBlockNum+1):
if self.isTransInBlock(str(transId), blockNum):
if Utils.Debug: Utils.Print("Found transaction %s in block %d" % (transId, blockNum))
return blockNum
return None
def getBlockIdByTransIdMdb(self, transId):
"""Given a transaction Id (string), will return block id (int) containing the transaction. This is specific to MongoDB."""
assert(transId)
assert(isinstance(transId, str))
trans=self.getTransactionMdb(transId)
if not trans: return None
refBlockNum=None
try:
refBlockNum=trans["ref_block_num"]
refBlockNum=int(refBlockNum)+1
except (TypeError, ValueError, KeyError) as _:
Utils.Print("transaction[ref_block_num] not found. Transaction: %s" % (trans))
return None
headBlockNum=self.getHeadBlockNum()
assert(headBlockNum)
try:
headBlockNum=int(headBlockNum)
except(ValueError) as _:
Utils.Print("Info parsing failed. %s" % (headBlockNum))
for blockNum in range(refBlockNum, headBlockNum+1):
if self.isTransInBlock(str(transId), blockNum):
return blockNum
return None
def isTransInAnyBlock(self, transId):
"""Check if transaction (transId) is in a block."""
assert(transId)
assert(isinstance(transId, (str,int)))
# if not self.enableMongo:
blockId=self.getBlockIdByTransId(transId)
# else:
# blockId=self.getBlockIdByTransIdMdb(transId)
return True if blockId else False
def isTransFinalized(self, transId):
"""Check if transaction (transId) has been finalized."""
assert(transId)
assert(isinstance(transId, str))
blockId=self.getBlockIdByTransId(transId)
if not blockId:
return False
assert(isinstance(blockId, int))
return self.isBlockPresent(blockId, blockType=BlockType.lib)
# Create & initialize account and return creation transactions. Return transaction json object
def createInitializeAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False, stakeNet=100, stakeCPU=100, buyRAM=10000, exitOnError=False):
cmdDesc="system newaccount"
cmd='%s -j %s %s %s %s --stake-net "%s %s" --stake-cpu "%s %s" --buy-ram "%s %s"' % (
cmdDesc, creatorAccount.name, account.name, account.ownerPublicKey,
account.activePublicKey, stakeNet, CORE_SYMBOL, stakeCPU, CORE_SYMBOL, buyRAM, CORE_SYMBOL)
msg="(creator account=%s, account=%s)" % (creatorAccount.name, account.name);
trans=self.processCldccCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finalized before transfer can happen
trans = self.transferFunds(creatorAccount, account, Node.currencyIntToStr(stakedDeposit, CORE_SYMBOL), "init")
transId=Node.getTransId(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def createAccount(self, account, creatorAccount, stakedDeposit=1000, waitForTransBlock=False, exitOnError=False):
"""Create account and return creation transactions. Return transaction json object.
waitForTransBlock: wait on creation transaction id to appear in a block."""
cmdDesc="create account"
cmd="%s -j %s %s %s %s" % (
cmdDesc, creatorAccount.name, account.name, account.ownerPublicKey, account.activePublicKey)
msg="(creator account=%s, account=%s)" % (creatorAccount.name, account.name);
trans=self.processCldccCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
if stakedDeposit > 0:
self.waitForTransInBlock(transId) # seems like account creation needs to be finlized before transfer can happen
trans = self.transferFunds(creatorAccount, account, "%0.04f %s" % (stakedDeposit/10000, CORE_SYMBOL), "init")
self.trackCmdTransaction(trans)
transId=Node.getTransId(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def getdccAccount(self, name, exitOnError=False):
assert(isinstance(name, str))
if not self.enableMongo:
cmdDesc="get account"
cmd="%s -j %s" % (cmdDesc, name)
msg="( getdccAccount(name=%s) )" % (name);
return self.processCldccCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError, exitMsg=msg)
else:
return self.getdccAccountFromDb(name, exitOnError=exitOnError)
def getdccAccountFromDb(self, name, exitOnError=False):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.accounts.findOne({"name" : "%s"})' % (name)
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
timeout = 3
for i in range(0,(int(60/timeout) - 1)):
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if trans is not None:
return trans
time.sleep(timeout)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if exitOnError:
Utils.cmdError("Exception during get account from db for %s. %s" % (name, msg))
Utils.errorExit("Failed during get account from db for %s. %s" % (name, msg))
Utils.Print("ERROR: Exception during get account from db for %s. %s" % (name, msg))
return None
def getTable(self, contract, scope, table, exitOnError=False):
cmdDesc = "get table"
cmd="%s %s %s %s" % (cmdDesc, contract, scope, table)
msg="contract=%s, scope=%s, table=%s" % (contract, scope, table);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
def getTableAccountBalance(self, contract, scope):
assert(isinstance(contract, str))
assert(isinstance(scope, str))
table="accounts"
trans = self.getTable(contract, scope, table, exitOnError=True)
try:
return trans["rows"][0]["balance"]
except (TypeError, KeyError) as _:
print("transaction[rows][0][balance] not found. Transaction: %s" % (trans))
raise
def getCurrencyBalance(self, contract, account, symbol=CORE_SYMBOL, exitOnError=False):
"""returns raw output from get currency balance e.g. '99999.9950 CUR'"""
assert(contract)
assert(isinstance(contract, str))
assert(account)
assert(isinstance(account, str))
assert(symbol)
assert(isinstance(symbol, str))
cmdDesc = "get currency balance"
cmd="%s %s %s %s" % (cmdDesc, contract, account, symbol)
msg="contract=%s, account=%s, symbol=%s" % (contract, account, symbol);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg, returnType=ReturnType.raw)
def getCurrencyStats(self, contract, symbol=CORE_SYMBOL, exitOnError=False):
"""returns Json output from get currency stats."""
assert(contract)
assert(isinstance(contract, str))
assert(symbol)
assert(isinstance(symbol, str))
cmdDesc = "get currency stats"
cmd="%s %s %s" % (cmdDesc, contract, symbol)
msg="contract=%s, symbol=%s" % (contract, symbol);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
# Verifies account. Returns "get account" json return object
def verifyAccount(self, account):
assert(account)
if not self.enableMongo:
ret=self.getdccAccount(account.name)
if ret is not None:
account_name=ret["account_name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
else:
return self.verifyAccountMdb(account)
def verifyAccountMdb(self, account):
assert(account)
ret=self.getdccAccountFromDb(account.name)
if ret is not None:
account_name=ret["name"]
if account_name is None:
Utils.Print("ERROR: Failed to verify account creation.", account.name)
return None
return ret
return None
def waitForTransInBlock(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
assert(isinstance(transId, str))
lam = lambda: self.isTransInAnyBlock(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForTransFinalization(self, transId, timeout=None):
"""Wait for trans id to be finalized."""
assert(isinstance(transId, str))
lam = lambda: self.isTransFinalized(transId)
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForNextBlock(self, timeout=None, blockType=BlockType.head):
num=self.getBlockNum(blockType=blockType)
lam = lambda: self.getHeadBlockNum() > num
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForBlock(self, blockNum, timeout=None, blockType=BlockType.head):
lam = lambda: self.getBlockNum(blockType=blockType) > blockNum
ret=Utils.waitForBool(lam, timeout)
return ret
def waitForIrreversibleBlock(self, blockNum, timeout=None, blockType=BlockType.head):
return self.waitForBlock(blockNum, timeout=timeout, blockType=blockType)
# Trasfer funds. Returns "transfer" json return object
def transferFunds(self, source, destination, amountStr, memo="memo", force=False, waitForTransBlock=False, exitOnError=True):
assert isinstance(amountStr, str)
assert(source)
assert(isinstance(source, Account))
assert(destination)
assert(isinstance(destination, Account))
cmd="%s %s -v transfer -j %s %s" % (
Utils.dccClientPath, self.dccClientArgs(), source.name, destination.name)
cmdArr=cmd.split()
cmdArr.append(amountStr)
cmdArr.append(memo)
if force:
cmdArr.append("-f")
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (s))
trans=None
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
self.trackCmdTransaction(trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during funds transfer. %s" % (msg))
if exitOnError:
Utils.cmdError("could not transfer \"%s\" from %s to %s" % (amountStr, source, destination))
Utils.errorExit("Failed to transfer \"%s\" from %s to %s" % (amountStr, source, destination))
return None
if trans is None:
Utils.cmdError("could not transfer \"%s\" from %s to %s" % (amountStr, source, destination))
Utils.errorExit("Failed to transfer \"%s\" from %s to %s" % (amountStr, source, destination))
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
@staticmethod
def currencyStrToInt(balanceStr):
"""Converts currency string of form "12.3456 SYS" to int 123456"""
assert(isinstance(balanceStr, str))
balanceStr=balanceStr.split()[0]
#balance=int(decimal.Decimal(balanceStr[1:])*10000)
balance=int(decimal.Decimal(balanceStr)*10000)
return balance
@staticmethod
def currencyIntToStr(balance, symbol):
"""Converts currency int of form 123456 to string "12.3456 SYS" where SYS is symbol string"""
assert(isinstance(balance, int))
assert(isinstance(symbol, str))
balanceStr="%.04f %s" % (balance/10000.0, symbol)
return balanceStr
def validateFunds(self, initialBalances, transferAmount, source, accounts):
"""Validate each account has the expected SYS balance. Validate cumulative balance matches expectedTotal."""
assert(source)
assert(isinstance(source, Account))
assert(accounts)
assert(isinstance(accounts, list))
assert(len(accounts) > 0)
assert(initialBalances)
assert(isinstance(initialBalances, dict))
assert(isinstance(transferAmount, int))
currentBalances=self.getdccBalances([source] + accounts)
assert(currentBalances)
assert(isinstance(currentBalances, dict))
assert(len(initialBalances) == len(currentBalances))
if len(currentBalances) != len(initialBalances):
Utils.Print("ERROR: validateFunds> accounts length mismatch. Initial: %d, current: %d" % (len(initialBalances), len(currentBalances)))
return False
for key, value in currentBalances.items():
initialBalance = initialBalances[key]
assert(initialBalances)
expectedInitialBalance = value - transferAmount
if key is source:
expectedInitialBalance = value + (transferAmount*len(accounts))
if (initialBalance != expectedInitialBalance):
Utils.Print("ERROR: validateFunds> Expected: %d, actual: %d for account %s" %
(expectedInitialBalance, initialBalance, key.name))
return False
def getdccBalances(self, accounts):
"""Returns a dictionary with account balances keyed by accounts"""
assert(accounts)
assert(isinstance(accounts, list))
balances={}
for account in accounts:
balance = self.getAccountdccBalance(account.name)
balances[account]=balance
return balances
# Gets accounts mapped to key. Returns json object
def getAccountsByKey(self, key, exitOnError=False):
cmdDesc = "get accounts"
cmd="%s %s" % (cmdDesc, key)
msg="key=%s" % (key);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
# Get actions mapped to an account (cldcc get actions)
def getActions(self, account, pos=-1, offset=-1, exitOnError=False):
assert(isinstance(account, Account))
assert(isinstance(pos, int))
assert(isinstance(offset, int))
if not self.enableMongo:
cmdDesc = "get actions"
cmd="%s -j %s %d %d" % (cmdDesc, account.name, pos, offset)
msg="account=%s, pos=%d, offset=%d" % (account.name, pos, offset);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
else:
return self.getActionsMdb(account, pos, offset, exitOnError=exitOnError)
def getActionsMdb(self, account, pos=-1, offset=-1, exitOnError=False):
assert(isinstance(account, Account))
assert(isinstance(pos, int))
assert(isinstance(offset, int))
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand='db.action_traces.find({$or: [{"act.data.from":"%s"},{"act.data.to":"%s"}]}).sort({"_id":%d}).limit(%d)' % (account.name, account.name, pos, abs(offset))
if Utils.Debug: Utils.Print("cmd: echo '%s' | %s" % (subcommand, cmd))
try:
actions=Node.runMongoCmdReturnJson(cmd.split(), subcommand, exitOnError=exitOnError)
if actions is not None:
return actions
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
errorMsg="Exception during get db actions. %s" % (msg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
# Gets accounts mapped to key. Returns array
def getAccountsArrByKey(self, key):
trans=self.getAccountsByKey(key)
assert(trans)
assert("account_names" in trans)
accounts=trans["account_names"]
return accounts
def getServants(self, name, exitOnError=False):
cmdDesc = "get servants"
cmd="%s %s" % (cmdDesc, name)
msg="name=%s" % (name);
return self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
def getServantsArr(self, name):
trans=self.getServants(name, exitOnError=True)
servants=trans["controlled_accounts"]
return servants
def getAccountdccBalanceStr(self, scope):
"""Returns SYS currency0000 account balance from cldcc get table command. Returned balance is string following syntax "98.0311 SYS". """
assert isinstance(scope, str)
amount=self.getTableAccountBalance("dccio.token", scope)
if Utils.Debug: Utils.Print("getNodeAccountdccBalance %s %s" % (scope, amount))
assert isinstance(amount, str)
return amount
def getAccountdccBalance(self, scope):
"""Returns SYS currency0000 account balance from cldcc get table command. Returned balance is an integer e.g. 980311. """
balanceStr=self.getAccountdccBalanceStr(scope)
balance=Node.currencyStrToInt(balanceStr)
return balance
def getAccountCodeHash(self, account):
cmd="%s %s get code %s" % (Utils.dccClientPath, self.dccClientArgs(), account)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
try:
retStr=Utils.checkOutput(cmd.split())
#Utils.Print ("get code> %s"% retStr)
p=re.compile(r'code\shash: (\w+)\n', re.MULTILINE)
m=p.search(retStr)
if m is None:
msg="Failed to parse code hash."
Utils.Print("ERROR: "+ msg)
return None
return m.group(1)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during code hash retrieval. %s" % (msg))
return None
# publish contract and return transaction as json object
def publishContract(self, account, contractDir, wasmFile, abiFile, waitForTransBlock=False, shouldFail=False):
cmd="%s %s -v set contract -j %s %s" % (Utils.dccClientPath, self.dccClientArgs(), account, contractDir)
cmd += "" if wasmFile is None else (" "+ wasmFile)
cmd += "" if abiFile is None else (" " + abiFile)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
trans=None
try:
trans=Utils.runCmdReturnJson(cmd, trace=False)
self.trackCmdTransaction(trans)
except subprocess.CalledProcessError as ex:
if not shouldFail:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during code hash retrieval. %s" % (msg))
return None
else:
retMap={}
retMap["returncode"]=ex.returncode
retMap["cmd"]=ex.cmd
retMap["output"]=ex.output
# commented below as they are available only in Python3.5 and above
# retMap["stdout"]=ex.stdout
# retMap["stderr"]=ex.stderr
return retMap
if shouldFail:
Utils.Print("ERROR: The publish contract did not fail as expected.")
return None
Node.validateTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=False)
def getTableRows(self, contract, scope, table):
jsonData=self.getTable(contract, scope, table)
if jsonData is None:
return None
rows=jsonData["rows"]
return rows
def getTableRow(self, contract, scope, table, idx):
if idx < 0:
Utils.Print("ERROR: Table index cannot be negative. idx: %d" % (idx))
return None
rows=self.getTableRows(contract, scope, table)
if rows is None or idx >= len(rows):
Utils.Print("ERROR: Retrieved table does not contain row %d" % idx)
return None
row=rows[idx]
return row
def getTableColumns(self, contract, scope, table):
row=self.getTableRow(contract, scope, table, 0)
keys=list(row.keys())
return keys
# returns tuple with transaction and
def pushMessage(self, account, action, data, opts, silentErrors=False):
cmd="%s %s push action -j %s %s" % (Utils.dccClientPath, self.dccClientArgs(), account, action)
cmdArr=cmd.split()
if data is not None:
cmdArr.append(data)
if opts is not None:
cmdArr += opts.split()
s=" ".join(cmdArr)
if Utils.Debug: Utils.Print("cmd: %s" % (cmdArr))
try:
trans=Utils.runCmdArrReturnJson(cmdArr)
self.trackCmdTransaction(trans, ignoreNonTrans=True)
return (True, trans)
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
if not silentErrors:
Utils.Print("ERROR: Exception during push message. %s" % (msg))
return (False, msg)
def setPermission(self, account, code, pType, requirement, waitForTransBlock=False, exitOnError=False):
cmdDesc="set action permission"
cmd="%s -j %s %s %s %s" % (cmdDesc, account, code, pType, requirement)
trans=self.processCldccCmd(cmd, cmdDesc, silentErrors=False, exitOnError=exitOnError)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def delegatebw(self, fromAccount, netQuantity, cpuQuantity, toAccount=None, transferTo=False, waitForTransBlock=False, exitOnError=False):
if toAccount is None:
toAccount=fromAccount
cmdDesc="system delegatebw"
transferStr="--transfer" if transferTo else ""
cmd="%s -j %s %s \"%s %s\" \"%s %s\" %s" % (
cmdDesc, fromAccount.name, toAccount.name, netQuantity, CORE_SYMBOL, cpuQuantity, CORE_SYMBOL, transferStr)
msg="fromAccount=%s, toAccount=%s" % (fromAccount.name, toAccount.name);
trans=self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def regproducer(self, producer, url, location, waitForTransBlock=False, exitOnError=False):
cmdDesc="system regproducer"
cmd="%s -j %s %s %s %s" % (
cmdDesc, producer.name, producer.activePublicKey, url, location)
msg="producer=%s" % (producer.name);
trans=self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def vote(self, account, producers, waitForTransBlock=False, exitOnError=False):
cmdDesc = "system voteproducer prods"
cmd="%s -j %s %s" % (
cmdDesc, account.name, " ".join(producers))
msg="account=%s, producers=[ %s ]" % (account.name, ", ".join(producers));
trans=self.processCldccCmd(cmd, cmdDesc, exitOnError=exitOnError, exitMsg=msg)
self.trackCmdTransaction(trans)
return self.waitForTransBlockIfNeeded(trans, waitForTransBlock, exitOnError=exitOnError)
def processCldccCmd(self, cmd, cmdDesc, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(returnType, ReturnType))
cmd="%s %s %s" % (Utils.dccClientPath, self.dccClientArgs(), cmd)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
if exitMsg is not None:
exitMsg="Context: " + exitMsg
else:
exitMsg=""
trans=None
try:
if returnType==ReturnType.json:
trans=Utils.runCmdReturnJson(cmd, silentErrors=silentErrors)
elif returnType==ReturnType.raw:
trans=Utils.runCmdReturnStr(cmd)
else:
unhandledEnumType(returnType)
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
errorMsg="Exception during \"%s\". Exception message: %s. %s" % (cmdDesc, msg, exitMsg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
if exitOnError and trans is None:
Utils.cmdError("could not \"%s\". %s" % (cmdDesc,exitMsg))
errorExit("Failed to \"%s\"" % (cmdDesc))
return trans
def killNodeOnProducer(self, producer, whereInSequence, blockType=BlockType.head, silentErrors=True, exitOnError=False, exitMsg=None, returnType=ReturnType.json):
assert(isinstance(producer, str))
assert(isinstance(whereInSequence, int))
assert(isinstance(blockType, BlockType))
assert(isinstance(returnType, ReturnType))
basedOnLib="true" if blockType==BlockType.lib else "false"
cmd="curl %s/v1/test_control/kill_node_on_producer -d '{ \"producer\":\"%s\", \"where_in_sequence\":%d, \"based_on_lib\":\"%s\" }' -X POST -H \"Content-Type: application/json\"" % \
(self.endpointHttp, producer, whereInSequence, basedOnLib)
if Utils.Debug: Utils.Print("cmd: %s" % (cmd))
rtn=None
try:
if returnType==ReturnType.json:
rtn=Utils.runCmdReturnJson(cmd, silentErrors=silentErrors)
elif returnType==ReturnType.raw:
rtn=Utils.runCmdReturnStr(cmd)
else:
unhandledEnumType(returnType)
except subprocess.CalledProcessError as ex:
if not silentErrors:
msg=ex.output.decode("utf-8")
errorMsg="Exception during \"%s\". %s" % (cmd, msg)
if exitOnError:
Utils.cmdError(errorMsg)
Utils.errorExit(errorMsg)
else:
Utils.Print("ERROR: %s" % (errorMsg))
return None
if exitMsg is not None:
exitMsg=": " + exitMsg
else:
exitMsg=""
if exitOnError and rtn is None:
Utils.cmdError("could not \"%s\" - %s" % (cmd,exitMsg))
Utils.errorExit("Failed to \"%s\"" % (cmd))
return rtn
def waitForTransBlockIfNeeded(self, trans, waitForTransBlock, exitOnError=False):
if not waitForTransBlock:
return trans
transId=Node.getTransId(trans)
if not self.waitForTransInBlock(transId):
if exitOnError:
Utils.cmdError("transaction with id %s never made it to a block" % (transId))
Utils.errorExit("Failed to find transaction with id %s in a block before timeout" % (transId))
return None
return trans
def getInfo(self, silentErrors=False, exitOnError=False):
cmdDesc = "get info"
info=self.processCldccCmd(cmdDesc, cmdDesc, silentErrors=silentErrors, exitOnError=exitOnError)
if info is None:
self.infoValid=False
else:
self.infoValid=True
self.lastRetrievedHeadBlockNum=int(info["head_block_num"])
self.lastRetrievedLIB=int(info["last_irreversible_block_num"])
return info
def getBlockFromDb(self, idx):
cmd="%s %s" % (Utils.MongoPath, self.mongoEndpointArgs)
subcommand="db.blocks.find().sort({\"_id\":%d}).limit(1).pretty()" % (idx)
if Utils.Debug: Utils.Print("cmd: echo \"%s\" | %s" % (subcommand, cmd))
try:
trans=Node.runMongoCmdReturnJson(cmd.split(), subcommand)
return trans
except subprocess.CalledProcessError as ex:
msg=ex.output.decode("utf-8")
Utils.Print("ERROR: Exception during get db block. %s" % (msg))
return None
def checkPulse(self, exitOnError=False):
info=self.getInfo(True, exitOnError=exitOnError)
return False if info is None else True
def getHeadBlockNum(self):
"""returns head block number(string) as returned by cldcc get info."""
if not self.enableMongo:
info=self.getInfo(exitOnError=True)
if info is not None:
headBlockNumTag="head_block_num"
return info[headBlockNumTag]
else:
# Either this implementation or the one in getIrreversibleBlockNum are likely wrong.
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def getIrreversibleBlockNum(self):
if not self.enableMongo:
info=self.getInfo(exitOnError=True)
if info is not None:
return info["last_irreversible_block_num"]
else:
# Either this implementation or the one in getHeadBlockNum are likely wrong.
block=self.getBlockFromDb(-1)
if block is not None:
blockNum=block["block_num"]
return blockNum
return None
def getBlockNum(self, blockType=BlockType.head):
assert isinstance(blockType, BlockType)
if blockType==BlockType.head:
return self.getHeadBlockNum()
elif blockType==BlockType.lib:
return self.getIrreversibleBlockNum()
else:
unhandledEnumType(blockType)
def kill(self, killSignal):
if Utils.Debug: Utils.Print("Killing node: %s" % (self.cmd))
assert(self.pid is not None)
try:
os.kill(self.pid, killSignal)
except OSError as ex:
Utils.Print("ERROR: Failed to kill node (%d)." % (self.cmd), ex)
return False
# wait for kill validation
def myFunc():
try:
os.kill(self.pid, 0) #check if process with pid is running
except OSError as _:
return True
return False
if not Utils.waitForBool(myFunc):
Utils.Print("ERROR: Failed to validate node shutdown.")
return False
# mark node as killed
self.pid=None
self.killed=True
return True
def verifyAlive(self, silent=False):
if not silent and Utils.Debug: Utils.Print("Checking if node(pid=%s) is alive(killed=%s): %s" % (self.pid, self.killed, self.cmd))
if self.killed or self.pid is None:
return False
try:
os.kill(self.pid, 0)
except ProcessLookupError as ex:
# mark node as killed
self.pid=None
self.killed=True
return False
except PermissionError as ex:
return True
else:
return True
def getBlockProducerByNum(self, blockNum, timeout=None, waitForBlock=True, exitOnError=True):
if waitForBlock:
self.waitForBlock(blockNum, timeout=timeout, blockType=BlockType.head)
block=self.getBlock(blockNum, exitOnError=exitOnError)
blockProducer=block["producer"]
if blockProducer is None and exitOnError:
Utils.cmdError("could not get producer for block number %s" % (blockNum))
errorExit("Failed to get block's producer")
return blockProducer
def getBlockProducer(self, timeout=None, waitForBlock=True, exitOnError=True, blockType=BlockType.head):
blockNum=self.getBlockNum(blockType=blockType)
block=self.getBlock(blockNum, exitOnError=exitOnError, blockType=blockType)
blockProducer=block["producer"]
if blockProducer is None and exitOnError:
Utils.cmdError("could not get producer for block number %s" % (blockNum))
errorExit("Failed to get block's producer")
return blockProducer
def getNextCleanProductionCycle(self, trans):
transId=Node.getTransId(trans)
rounds=21*12*2 # max time to ensure that at least 2/3+1 of producers x blocks per producer x at least 2 times
self.waitForTransFinalization(transId, timeout=rounds/2)
irreversibleBlockNum=self.getIrreversibleBlockNum()
# The voted schedule should be promoted now, then need to wait for that to become irreversible
votingTallyWindow=120 #could be up to 120 blocks before the votes were tallied
promotedBlockNum=self.getHeadBlockNum()+votingTallyWindow
self.waitForIrreversibleBlock(promotedBlockNum, timeout=rounds/2)
ibnSchedActive=self.getIrreversibleBlockNum()
blockNum=self.getHeadBlockNum()
Utils.Print("Searching for clean production cycle blockNum=%s ibn=%s transId=%s promoted bn=%s ibn for schedule active=%s" % (blockNum,irreversibleBlockNum,transId,promotedBlockNum,ibnSchedActive))
blockProducer=self.getBlockProducerByNum(blockNum)
blockNum+=1
Utils.Print("Advance until the next block producer is retrieved")
while blockProducer == self.getBlockProducerByNum(blockNum):
blockNum+=1
blockProducer=self.getBlockProducerByNum(blockNum)
return blockNum
# TBD: make nodeId an internal property
# pylint: disable=too-many-locals
def relaunch(self, nodeId, chainArg, newChain=False, timeout=Utils.systemWaitTimeout, addOrSwapFlags=None):
assert(self.pid is None)
assert(self.killed)
if Utils.Debug: Utils.Print("Launching node process, Id: %d" % (nodeId))
cmdArr=[]
myCmd=self.cmd
toAddOrSwap=copy.deepcopy(addOrSwapFlags) if addOrSwapFlags is not None else {}
if not newChain:
skip=False
swapValue=None
for i in self.cmd.split():
Utils.Print("\"%s\"" % (i))
if skip:
skip=False
continue
if "--genesis-json" == i or "--genesis-timestamp" == i:
skip=True
continue
if swapValue is None:
cmdArr.append(i)
else:
cmdArr.append(swapValue)
swapValue=None
if i in toAddOrSwap:
swapValue=toAddOrSwap[i]
del toAddOrSwap[i]
for k,v in toAddOrSwap.items():
cmdArr.append(k)
cmdArr.append(v)
myCmd=" ".join(cmdArr)
dataDir="var/lib/node_%02d" % (nodeId)
dt = datetime.datetime.now()
dateStr="%d_%02d_%02d_%02d_%02d_%02d" % (
dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second)
stdoutFile="%s/stdout.%s.txt" % (dataDir, dateStr)
stderrFile="%s/stderr.%s.txt" % (dataDir, dateStr)
with open(stdoutFile, 'w') as sout, open(stderrFile, 'w') as serr:
cmd=myCmd + ("" if chainArg is None else (" " + chainArg))
Utils.Print("cmd: %s" % (cmd))
popen=subprocess.Popen(cmd.split(), stdout=sout, stderr=serr)
self.pid=popen.pid
if Utils.Debug: Utils.Print("restart Node host=%s, port=%s, pid=%s, cmd=%s" % (self.host, self.port, self.pid, self.cmd))
def isNodeAlive():
"""wait for node to be responsive."""
try:
return True if self.checkPulse() else False
except (TypeError) as _:
pass
return False
isAlive=Utils.waitForBool(isNodeAlive, timeout)
if isAlive:
Utils.Print("Node relaunch was successfull.")
else:
Utils.Print("ERROR: Node relaunch Failed.")
self.pid=None
return False
self.cmd=cmd
self.killed=False
return True
def trackCmdTransaction(self, trans, ignoreNonTrans=False):
if trans is None:
if Utils.Debug: Utils.Print(" cmd returned transaction: %s" % (trans))
return
if ignoreNonTrans and not Node.isTrans(trans):
if Utils.Debug: Utils.Print(" cmd returned a non-transaction")
return
transId=Node.getTransId(trans)
if Utils.Debug:
status=Node.getTransStatus(trans)
blockNum=Node.getTransBlockNum(trans)
if transId in self.transCache.keys():
replaceMsg="replacing previous trans=\n%s" % json.dumps(self.transCache[transId], indent=2, sort_keys=True)
else:
replaceMsg=""
Utils.Print(" cmd returned transaction id: %s, status: %s, (possible) block num: %s %s" % (transId, status, blockNum, replaceMsg))
self.transCache[transId]=trans
def reportStatus(self):
Utils.Print("Node State:")
Utils.Print(" cmd : %s" % (self.cmd))
self.verifyAlive(silent=True)
Utils.Print(" killed: %s" % (self.killed))
Utils.Print(" host : %s" % (self.host))
Utils.Print(" port : %s" % (self.port))
Utils.Print(" pid : %s" % (self.pid))
status="last getInfo returned None" if not self.infoValid else "at last call to getInfo"
Utils.Print(" hbn : %s (%s)" % (self.lastRetrievedHeadBlockNum, status))
Utils.Print(" lib : %s (%s)" % (self.lastRetrievedLIB, status))
|
[
"13821402840@139.com"
] |
13821402840@139.com
|
c2e975c70601b92c9be4f9a71b60ce11adb22d59
|
ed0ec5df77066762d30c2f86f473c551b90375d6
|
/bin/sync.py
|
75c57655b0e47744b997a36619125f60c47eb2d4
|
[
"MIT"
] |
permissive
|
agude/dotfiles
|
7441352b0b35b38c03625166deb05e66723e52e3
|
bfce1be63a4a29432ceb98df76c8e5437d78c60b
|
refs/heads/master
| 2023-08-07T18:40:10.024450
| 2023-07-28T16:08:33
| 2023-07-28T16:08:33
| 11,804,143
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,552
|
py
|
#!/usr/bin/env python3
"""Calls rsync to synchronize the local and a remote machine.
Exit Codes:
0: Exit successful.
"""
from distutils.spawn import find_executable
from subprocess import call
import argparse
import logging
import os.path
import sys
FLAGS = set([
"--acls",
"--archive",
"--compress",
"--fuzzy",
"--hard-links",
"--human-readable",
"--itemize-changes",
"--verbose",
"--xattrs",
"--rsh=ssh",
# For testing, otherwise remove!!
#"--dry-run",
])
def get_rsync_location():
"""Find the location of the rsync executable from the user path.
This will return the location, or error.
Returns:
str: the location of the rsync executable.
Raises:
RuntimeError: If rsync is not found on the path
"""
rsync_loc = find_executable("rsync")
if rsync_loc is None:
ERR_MSG = "Can not find rsync."
logging.error(ERR_MSG)
raise RuntimeError(ERR_MSG)
logging.debug("Found rsync: %s", rsync_loc)
return rsync_loc
class Rsync:
def __init__(self, rsync_loc, flags, remote, location, is_pull):
logging.info("Constructing rsync object.")
self.rsync_loc = rsync_loc
self.flags = sorted(flags) # Sorting makes it easier to scan by eye
self.remote = remote
self.location = location
# If pull, set local to target, otherwise set local to source
self.is_pull = is_pull
self.source, self.target = self.__set_source_and_target()
self.command = self.__build_command()
def __set_source_and_target(self):
# The remote needs a host name, so add it
remote_path = "{}:{}".format(self.remote, self.location)
local = self.location
# Output is (source, target)
output = (remote_path, local)
logging.debug("Is --pull: '%s'", self.is_pull)
return output if self.is_pull else reversed(output)
def __build_command(self):
command = [self.rsync_loc] + self.flags + [self.source, self.target]
logging.debug("Command: `%s`", ' '.join(command))
return command
def run(self):
logging.info("Running rsync command.")
call(self.command)
if __name__ == "__main__":
# Parse the list of files
parser = argparse.ArgumentParser()
parser.add_argument(
"--directory",
help="the directory to sync",
required=True,
)
parser.add_argument(
"--remote",
help="the host name of the remote system",
required=True,
)
parser.add_argument(
"--exclude",
nargs="*",
help="a list of files or directories to exclude, relative to the source path",
)
parser.add_argument(
"--pull",
help="set the local machine to the target",
action="store_true",
dest="pull",
)
parser.add_argument(
"--push",
help="set the local machine to the source",
action="store_false",
dest="pull",
)
parser.add_argument(
"--log",
help="set the logging level, defaults to WARNING",
dest="log_level",
default=logging.WARNING,
choices=[
"DEBUG",
"INFO",
"WARNING",
"ERROR",
"CRITICAL",
],
)
args, unknown_args = parser.parse_known_args()
# Set the logging level based on the arguments
logging.basicConfig(level=args.log_level)
logging.debug("Arguments: %s", args)
# Pass through flags unknown to argparse to rsync
logging.info("Checking for pass through flags.")
if unknown_args:
logging.info("Pass through flags found.")
for flag in unknown_args:
logging.debug("Adding pass through flag: '%s'", flag)
FLAGS.add(flag)
# Add excludes to flags
logging.info("Checking for excluded items.")
if args.exclude:
logging.info("Excluded items found.")
for ex in args.exclude:
exclude_statement = "--exclude={}".format(ex)
logging.debug("Adding excluded item: '%s'", exclude_statement)
FLAGS.add(exclude_statement)
# Build the full directory path
full_path = os.path.normpath(args.directory)
full_path = os.path.join(full_path, "") # Ensures a trailing slash
# Construct the rsync command and run it
rsync = Rsync(
get_rsync_location(),
FLAGS,
args.remote,
full_path,
args.pull,
)
rsync.run()
sys.exit(0)
|
[
"alex.public.account+dotfiles@gmail.com"
] |
alex.public.account+dotfiles@gmail.com
|
38b9b431a775c17cac50151a5e2bb5b71740e127
|
bb1c30864fc963f61bbab0a71d206fbe6447fd09
|
/test_autolens/integration/tests/result_passing/lens_light/instance_via_af_last_specify_light.py
|
7a005d266a1c2d6541852bede44f71f39f8be5ff
|
[
"MIT"
] |
permissive
|
pranath-reddy/PyAutoLens
|
c39c1aa413985c0a280da1b95e6cb55537f769cc
|
bcfb2e7b447aa24508fc648d60b6fd9b4fd852e7
|
refs/heads/master
| 2022-05-28T15:41:26.881648
| 2020-05-04T20:02:45
| 2020-05-04T20:02:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,641
|
py
|
import autofit as af
import autolens as al
from test_autolens.integration.tests.imaging import runner
test_type = "reult_passing"
test_name = "lens_light_instance_via_af_last_specify_light"
data_type = "lens_sie__source_smooth"
data_resolution = "lsst"
def make_pipeline(name, phase_folders, non_linear_class=af.MultiNest):
phase1 = al.PhaseImaging(
phase_name="phase_1",
phase_folders=phase_folders,
galaxies=dict(
lens=al.GalaxyModel(
redshift=0.5,
light=al.lp.SphericalDevVaucouleurs,
mass=al.mp.EllipticalIsothermal,
),
source=al.GalaxyModel(redshift=1.0, light=al.lp.EllipticalSersic),
),
sub_size=1,
non_linear_class=non_linear_class,
)
phase1.optimizer.const_efficiency_mode = True
phase1.optimizer.n_live_points = 60
phase1.optimizer.sampling_efficiency = 0.8
# This is an example of how we currently pass the lens light model, which works.
# We know it works because N=9 for the
phase2 = al.PhaseImaging(
phase_name="phase_2",
phase_folders=phase_folders,
galaxies=dict(
lens=al.GalaxyModel(
redshift=0.5,
light=af.last.instance.galaxies.lens.light,
mass=af.last.model.galaxies.lens.mass,
),
source=phase1.result.model.galaxies.source,
),
sub_size=1,
non_linear_class=non_linear_class,
)
return al.PipelineDataset(name, phase1, phase2)
if __name__ == "__main__":
import sys
runner.run(sys.modules[__name__])
|
[
"james.w.nightingale@durham.ac.uk"
] |
james.w.nightingale@durham.ac.uk
|
47c1eb63c8b72b52c548a9a5d54db9484a8f1c34
|
d2aa487820826b046d41cfa9b68dfaebef180468
|
/__init__.py
|
273b1a4b6690eddd3bafd2feba49045f46fec7e8
|
[
"MIT"
] |
permissive
|
shv-om/lecture-planner
|
f542ddeadc96f711deaf20b7e509b8080eee5996
|
88e4b7d9bd9912afcbfc15bec3aad8fa5efd69be
|
refs/heads/main
| 2023-07-13T04:47:03.657284
| 2021-08-17T18:56:35
| 2021-08-17T18:56:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,353
|
py
|
"""
Initialize Lecture Planner
"""
from os import system, name
try:
from lectureplanner import LecturePlanner
except ImportError as e:
print("Error loading Lecture Planner module :(")
menu = """
\t\t***Lecure Planner***
\t1. Input Data
\t2. Start Planning
\tq. Quit (q)
"""
def clear():
# for windows user
if name == 'nt':
system('cls')
else:
system('clear')
def input_batch_room(forname, forno):
list1 = []
while True:
name = input(f"Enter the {forname}: ")
no = int(input(f"Enter the {forno}: "))
list1.append([name, no])
if input("Add more (y/n): ") != 'y':
break
return list1
def inputdata():
clear()
"""
subjects = input("Enter subject list separated by a space eg: sub1 sub2... :\n").split()
print("\nEnter Batch Information (Name and No. of Students")
batches = input_batch_room('Batch Name', 'No. of students')
print("\nEnter Class Rooms information (Room no. and Capactiy)")
rooms = input_batch_room('Room No.', 'Capacity')
"""
# Temporary data
subjects = ['Predictive Analytics','NLP','Web Technology','DBMS','OODA','Information Retrieval','Introduction to Python','Machine Learning', 'Extra Sub', 'Extra 2']
batches = [['DA', 36], ['GA', 40], ['MI', 35], ['CS', 40]] #branch name with no. of students
#room no. + size of room
rooms = [['ROOM NO.1', 50], ['ROOM NO.2', 60], ['ROOM NO.3', 50], ['ROOM NO.4', 80], ['ROOM NO.5', 50], ['ROOM NO.6', 100], ['ROOM NO.7', 50], ['ROOM NO.8',100], ['ROOM NO.9', 50], ['ROOM NO.10', 100], ['ROOM NO.11', 50], ['ROOM NO.12', 60]]
#pre defined for ease of testing
time_period = ['9:00-10:00', '10:00-11:00', '11:00-12:00', '12:30-1:30', '1:30-2:30', '2:30-3:30', '3:30-4:30'] #cllg time from 9:00 to 4:30
return {'subjects': subjects, 'batches': batches, 'rooms': rooms, 'time_period': time_period}
def startplanning(data):
#lecture = LecturePlanner(data['batches'][:3], data['rooms'], data['subjects'][:3], data['time_period'][:5], 2)
lecture = LecturePlanner(data['batches'][:3], data['rooms'], data['subjects'], data['time_period'][:4], 4)
lecture.planner()
while True:
print(menu)
m = input()
if m == 'q':
print("Exiting...")
break
elif int(m) == 1:
data = inputdata()
print("Data Recieved")
elif int(m) == 2:
print("StartPlanning...")
startplanning(data)
print("Time Table created")
else:
print("Wrong choice")
|
[
"shivamchaudhary6197@gmail.com"
] |
shivamchaudhary6197@gmail.com
|
cfe841fb7dab9e357bcbe5e24e3431f48f229d42
|
c53115a982037f8545816a077d495d0eb10bd984
|
/venv/Scripts/pasteurize-script.py
|
c1e073950828cf12012490177ed77d43f6c3bca6
|
[] |
no_license
|
dssit/App02
|
de6c7a0b36925433330b3d3bbe6bf4a13f9e4819
|
97db4817ca05a9adf71bf3777488daacaf110d27
|
refs/heads/master
| 2022-12-29T04:37:52.465463
| 2020-10-15T01:30:33
| 2020-10-15T01:30:33
| 304,176,899
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
#!F:\python\App02\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','pasteurize'
__requires__ = 'future==0.18.2'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('future==0.18.2', 'console_scripts', 'pasteurize')()
)
|
[
"7725949+dssit@user.noreply.gitee.com"
] |
7725949+dssit@user.noreply.gitee.com
|
d71e4cf1084e2921dde728e40f9c3ad65034dcff
|
0506757a2ebfebe0a7eb93a43031784778836265
|
/covid19/venv/Scripts/futurize-script.py
|
b2f28883ea08a346dcf0735da0d4dd77af25a87f
|
[] |
no_license
|
SeungYeopB/bigdata
|
90772c70c257c93adef90431e43a55e288224769
|
dc5b0b3512dfa80e7c940c4328860c0d4aae99a7
|
refs/heads/master
| 2023-06-01T09:19:23.522689
| 2021-07-03T07:07:48
| 2021-07-03T07:07:48
| 382,545,218
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 986
|
py
|
#!c:\bigdata\covid19\venv\scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'future==0.18.2','console_scripts','futurize'
import re
import sys
# for compatibility with easy_install; see #2198
__requires__ = 'future==0.18.2'
try:
from importlib.metadata import distribution
except ImportError:
try:
from importlib_metadata import distribution
except ImportError:
from pkg_resources import load_entry_point
def importlib_load_entry_point(spec, group, name):
dist_name, _, _ = spec.partition('==')
matches = (
entry_point
for entry_point in distribution(dist_name).entry_points
if entry_point.group == group and entry_point.name == name
)
return next(matches).load()
globals().setdefault('load_entry_point', importlib_load_entry_point)
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(load_entry_point('future==0.18.2', 'console_scripts', 'futurize')())
|
[
"="
] |
=
|
04e977d5b647056e41541ee1e3deb317f157bbc8
|
4df56db2e5b7b72493b020989d7619c370328212
|
/app/main/__init__.py
|
a071b178b4feda88141d52331641ce91a1eefcbd
|
[] |
no_license
|
HsOjo/ExamSystem
|
0cb7930edcb904766928c24da3445033b7d1143a
|
0b48dd700fcbedb45663d5402a4f7e7c9272e153
|
refs/heads/master
| 2020-05-26T02:55:13.426747
| 2019-06-29T22:35:59
| 2019-06-29T22:35:59
| 188,082,877
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 138
|
py
|
from flask import Blueprint
main = Blueprint('main', __name__, template_folder='templates', static_folder='static')
from . import views
|
[
"1134031392@qq.com"
] |
1134031392@qq.com
|
5240062cac16cf1e417a72d4636318686b7b0433
|
bb7746cb5db316a932ede0200bee2fb1f5e3162d
|
/1. Building APIs/2. FastAPI Deployment/example/main.py
|
12cd522eb3c0533b77d70d23408bcb1769d8fbd6
|
[] |
no_license
|
adiwany/Cloud-and-DevOps
|
bf9ba0b37d70dbc6fc88c9817ae59554117dd600
|
2aef3b20d2120582fd80dee33897fb4f0bcefc62
|
refs/heads/main
| 2023-08-26T15:40:55.965672
| 2021-11-08T16:25:24
| 2021-11-08T16:25:24
| 425,945,784
| 0
| 0
| null | 2021-11-08T18:13:13
| 2021-11-08T18:13:12
| null |
UTF-8
|
Python
| false
| false
| 337
|
py
|
import fastapi
import uvicorn
from api import dob_api
from views import home
app = fastapi.FastAPI()
def configure_routing():
app.include_router(home.router)
app.include_router(dob_api.router)
if __name__ == '__main__':
configure_routing()
uvicorn.run(app, port=8000, host='127.0.0.1')
else:
configure_routing()
|
[
"harryaberg@gmail.com"
] |
harryaberg@gmail.com
|
81cff913410d53649edb703ba9e2cf418dd357aa
|
c49e991c0fd43bcca5fa624c9beb8a96fa6c0de6
|
/create_employee_user/__manifest__.py
|
15e6ad57477a6c7bb0e0bc4b15fb1ad933dbe4a3
|
[] |
no_license
|
VADR670730/apps-odoo12
|
a822ac473129d088bfcad477ed834cdda60a3968
|
49ab7129d832c7972277c246382c52e269aaf72f
|
refs/heads/master
| 2022-05-31T01:53:11.913976
| 2020-05-04T11:38:34
| 2020-05-04T11:38:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,958
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This module you will create of the system user and contact from HR Employee
# It will process the data entered and return a message indicating whether
# the company is eligible or not.
#
# Copyright (C) 2020- todooweb.com (https://www.todooweb.com)
# @author ToDOO (https://www.linkedin.com/company/todooweb)
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': "HR Employee to system user",
'summary': """HR Employee to system user with automatics task""",
'description': """Creation of the system user and contact from HR Employee with automatics task.
""",
'version': '12.0.0.0.1',
'category': 'Human Resources',
'license': 'LGPL-3',
'author': "ToDOO (www.todooweb.com)",
'website': "https://todooweb.com/",
'contributors': [
"Equipo Dev <devtodoo@gmail.com>",
"Edgar Naranjo <edgarnaranjof@gmail.com>",
"Tatiana Rosabal <tatianarosabal@gmail.com>",
],
'support': 'devtodoo@gmail.com',
'depends': [
'base',
'sale',
'account',
'mail',
'hr_contract',
'hr',
'crm',
'management_contract'
],
'data': [
'views/employee_user.xml',
'data/create_user_data.xml',
],
'images': [
'static/description/screenshot_user.png'
],
'installable': True,
'application': False,
'auto_install': False,
}
|
[
"edgar.naranjo"
] |
edgar.naranjo
|
e38a84be34689a121f7630e0be71a892853bfd0a
|
2bfa8f88dd1ce7a73bfac013f567b33520a99cb8
|
/depthTo3d.py
|
5f9b3fe90d489c84351438b25777859e77fa7f14
|
[] |
no_license
|
yaminikota95/ComputerVision
|
46df68b153ddc14c32247c110647d644f8573731
|
48163658f15a0350895bbe086ca79bf0c2e3df3e
|
refs/heads/master
| 2023-08-15T21:45:50.919258
| 2021-10-21T21:09:32
| 2021-10-21T21:09:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,940
|
py
|
import numpy as np
import cv2 as cv
import glob
from matplotlib import pyplot as plt
################ FIND CHESSBOARD CORNERS - OBJECT POINTS AND IMAGE POINTS #############################
chessboardSize = (9,6)
frameSize = (640,480)
# termination criteria
criteria = (cv.TERM_CRITERIA_EPS + cv.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((chessboardSize[0] * chessboardSize[1], 3), np.float32)
objp[:,:2] = np.mgrid[0:chessboardSize[0],0:chessboardSize[1]].T.reshape(-1,2)
objp = objp * 30
#print(objp)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d point in real world space
imgpointsL = [] # 2d points in image plane.
imgpointsR = [] # 2d points in image plane.
imagesLeft = sorted(glob.glob('images/stereoLeft/*.jpg'))
imagesRight = sorted(glob.glob('images/stereoRight/*.jpg'))
for imgLeft, imgRight in zip(imagesLeft, imagesRight):
imgL = cv.imread(imgLeft)
imgR = cv.imread(imgRight)
grayL = cv.cvtColor(imgL, cv.COLOR_BGR2GRAY)
grayR = cv.cvtColor(imgR, cv.COLOR_BGR2GRAY)
# Find the chess board corners
retL, cornersL = cv.findChessboardCorners(grayL, chessboardSize, None)
retR, cornersR = cv.findChessboardCorners(grayR, chessboardSize, None)
# If found, add object points, image points (after refining them)
if retL and retR == True:
objpoints.append(objp)
cornersL = cv.cornerSubPix(grayL, cornersL, (11,11), (-1,-1), criteria)
imgpointsL.append(cornersL)
cornersR = cv.cornerSubPix(grayR, cornersR, (11,11), (-1,-1), criteria)
imgpointsR.append(cornersR)
# Draw and display the corners
cv.drawChessboardCorners(imgL, chessboardSize, cornersL, retL)
cv.imshow('img left', imgL)
cv.drawChessboardCorners(imgR, chessboardSize, cornersR, retR)
cv.imshow('img right', imgR)
cv.waitKey(100)
cv.destroyAllWindows()
############## CALIBRATION #######################################################
retL, cameraMatrixL, distL, rvecsL, tvecsL = cv.calibrateCamera(objpoints, imgpointsL, frameSize, None, None)
heightL, widthL, channelsL = imgL.shape
newCameraMatrixL, roi_L = cv.getOptimalNewCameraMatrix(cameraMatrixL, distL, (widthL, heightL), 1, (widthL, heightL))
retR, cameraMatrixR, distR, rvecsR, tvecsR = cv.calibrateCamera(objpoints, imgpointsR, frameSize, None, None)
heightR, widthR, channelsR = imgR.shape
newCameraMatrixR, roi_R = cv.getOptimalNewCameraMatrix(cameraMatrixR, distR, (widthR, heightR), 1, (widthR, heightR))
print(cameraMatrixL)
print(newCameraMatrixL)
########## Stereo Vision Calibration #############################################
flags = 0
flags |= cv.CALIB_FIX_INTRINSIC
# Here we fix the intrinsic camara matrixes so that only Rot, Trns, Emat and Fmat are calculated.
# Hence intrinsic parameters are the same
criteria_stereo = (cv.TERM_CRITERIA_EPS + cv.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# This step is performed to transformation between the two cameras and calculate Essential and Fundamenatl matrix
retStereo, newCameraMatrixL, distL, newCameraMatrixR, distR, rot, trans, essentialMatrix, fundamentalMatrix = cv.stereoCalibrate(objpoints, imgpointsL, imgpointsR, newCameraMatrixL, distL, newCameraMatrixR, distR, grayL.shape[::-1], criteria_stereo, flags)
# Reprojection Error
mean_error = 0
for i in range(len(objpoints)):
imgpoints2, _ = cv.projectPoints(objpoints[i], rvecsL[i], tvecsL[i], newCameraMatrixL, distL)
error = cv.norm(imgpointsL[i], imgpoints2, cv.NORM_L2)/len(imgpoints2)
mean_error += error
print("total error: {}".format(mean_error/len(objpoints)))
########## Stereo Rectification #################################################
rectifyScale= 1
rectL, rectR, projMatrixL, projMatrixR, Q, roi_L, roi_R= cv.stereoRectify(newCameraMatrixL, distL, newCameraMatrixR, distR, grayL.shape[::-1], rot, trans, rectifyScale,(0,0))
stereoMapL = cv.initUndistortRectifyMap(newCameraMatrixL, distL, rectL, projMatrixL, grayL.shape[::-1], cv.CV_16SC2)
stereoMapR = cv.initUndistortRectifyMap(newCameraMatrixR, distR, rectR, projMatrixR, grayR.shape[::-1], cv.CV_16SC2)
print("Saving parameters!")
cv_file = cv.FileStorage('stereoMap.xml', cv.FILE_STORAGE_WRITE)
cv_file.write('stereoMapL_x',stereoMapL[0])
cv_file.write('stereoMapL_y',stereoMapL[1])
cv_file.write('stereoMapR_x',stereoMapR[0])
cv_file.write('stereoMapR_y',stereoMapR[1])
cv_file.release()
# Camera parameters to undistort and rectify images
cv_file = cv.FileStorage()
cv_file.open('stereoMap.xml', cv.FileStorage_READ)
stereoMapL_x = cv_file.getNode('stereoMapL_x').mat()
stereoMapL_y = cv_file.getNode('stereoMapL_y').mat()
stereoMapR_x = cv_file.getNode('stereoMapR_x').mat()
stereoMapR_y = cv_file.getNode('stereoMapR_y').mat()
imgL = cv.imread('images/stereoLeft/left10.jpg', cv.IMREAD_GRAYSCALE)
imgR = cv.imread('images/stereoRight/right10.jpg', cv.IMREAD_GRAYSCALE)
# Show the frames
cv.imshow("frame right", imgR)
cv.imshow("frame left", imgL)
# Undistort and rectify images
imgR = cv.remap(imgR, stereoMapR_x, stereoMapR_y, cv.INTER_LANCZOS4, cv.BORDER_CONSTANT, 0)
imgL = cv.remap(imgL, stereoMapL_x, stereoMapL_y, cv.INTER_LANCZOS4, cv.BORDER_CONSTANT, 0)
# Show the frames
cv.imshow("frame right", imgR)
cv.imshow("frame left", imgL)
stereo = cv.StereoBM_create(numDisparities=32, blockSize=9)
# For each pixel algorithm will find the best disparity from 0
# Larger block size implies smoother, though less accurate disparity map
disparity = stereo.compute(imgL, imgR)
#print(depPth)
image_3d_reprojection = cv.reprojectImageTo3D(disparity, Q, handleMissingValues=True)
cv.imshow("Left", imgL)
cv.imshow("right", imgR)
cv.imshow("Disparity", disparity)
cv.imshow("Reprojection", image_3d_reprojection)
cv.waitKey(0)
plt.imshow(disparity)
plt.axis('off')
plt.show()
|
[
"noreply@github.com"
] |
yaminikota95.noreply@github.com
|
d93341049c222de00d1435dfdcd43ca42781e011
|
6ba921390a5ad78433f79857ae3d319aeed1df8c
|
/interface/__init__.py
|
fc78bb79879f208103c872f27171bbcc799159a0
|
[
"MIT"
] |
permissive
|
ykingdsjj/CourseSelect
|
10a51be31c48846b04db227e13adf89e22edd9be
|
2993ba7f69112fa02b1786d205b3fc2dbecdc789
|
refs/heads/master
| 2021-10-09T05:57:08.825780
| 2018-12-22T09:20:18
| 2018-12-22T09:20:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 165
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/12/22 10:25 AM
# @Author : zhangjiang
# @Site :
# @File : __init__.py.py
# @Software: PyCharm
|
[
"zhangjiang@imcoming.cn"
] |
zhangjiang@imcoming.cn
|
11bc3917dfe4f578473ceb682f767ba39f94651f
|
17f3cb73d9077fe06469cc3a7130293f12a9095f
|
/greater_2.py
|
167f5d392e254974ee4cd59eac0a992cf21f53ff
|
[] |
no_license
|
crazyuploader/Python
|
9037427158beee8eaf3db79daae6c57f71ddbfdc
|
c8b2de8a66620864be85842dd974a0dc4dcc2bd2
|
refs/heads/main
| 2023-08-15T04:16:55.738823
| 2023-08-01T09:14:29
| 2023-08-01T09:14:29
| 233,270,237
| 0
| 3
| null | 2023-09-14T18:35:16
| 2020-01-11T17:31:11
|
Python
|
UTF-8
|
Python
| false
| false
| 555
|
py
|
#!/usr/bin/env python3
__author__ = "Jugal Kishore"
__version__ = "1.0"
print("///Program to Display Greater Number///")
print("\nEnter First Number: ")
a = int(input())
print("\nEnter Second Number: ")
b = int(input())
if a > b:
print("\nGreater Number between {0} and {1} is = {2}.".format(a, b, a))
elif b > a:
print("\nGreater Number between {0} and {1} is = {2}.".format(a, b, b))
else:
print("\n{0} and {1} are Equal!".format(a, b))
print("\nCreated by Jugal Kishore -- 2020")
# Run it online at https://python.jugalkishore.repl.run/
|
[
"49350241+crazyuploader@users.noreply.github.com"
] |
49350241+crazyuploader@users.noreply.github.com
|
fd57bcbea1d64a2ad971b4d282c6c6b90a22e774
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/adjectives/_uphill.py
|
4a07e25a7ea1e6741ed9f3888ed7c277825020d1
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 435
|
py
|
#calss header
class _UPHILL():
def __init__(self,):
self.name = "UPHILL"
self.definitions = [u'leading to a higher place on a slope: ', u'needing a large amount of effort: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
ccb81308eed9b9e5730a621e2b4a3fbda552538f
|
c0450361aa707635f5bf70eff21c1235d7e60cfa
|
/Lessons by HoudyHo/lesson (14).py
|
af963f7dc6838e6f849641c660348af41387cfcf
|
[] |
no_license
|
zarkaltair/Learn-python
|
f48810b86e9832f4c364c345d1fa8624f9ced683
|
dd6114b5bd6cc30eff328002521041dd2be2c3c5
|
refs/heads/master
| 2020-04-10T05:48:51.052751
| 2019-01-23T18:48:34
| 2019-01-23T18:48:34
| 160,837,639
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 333
|
py
|
import random
for i in range(10):
print(random.randint(1,100))
import math
num=10
print(math.sqrt(num))
from random import randint
print(randint(1,10))
from math import*
print(sqrt(25))
from math import*
print(pi)
from math import sqrt,pi
from math import sqrt as my_sqrt
def sqrt():
print('my function')
print(my_sqrt(36))
|
[
"zarkaltair@gmail.com"
] |
zarkaltair@gmail.com
|
d51af6f55723ced7f1f7eed4375f0a64b731eae9
|
e7164d44058a06331c034cc17eefe1521d6c95a2
|
/python-wrapper/segment.py
|
3e675e6ff50678963fbb76dcbbc96df4d7abf6ff
|
[] |
no_license
|
chenghuige/gezi
|
fbc1e655396fbc365fffacc10409d35d20e3952c
|
4fc8f9a3c5837e8add720bf6954a4f52abfff8b5
|
refs/heads/master
| 2021-01-20T01:57:18.362413
| 2016-11-08T15:34:07
| 2016-11-08T15:34:07
| 101,304,774
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,209
|
py
|
import os
import sys
import glob
from pyplusplus import module_builder
root = '/home/users/chenghuige/rsc/'
name = 'segment'
#define_symbols = ['GCCXML','PYTHON_WRAPPER','NO_BAIDU_DEP']
define_symbols = ['GCCXML','PYTHON_WRAPPER']
files = [
'include.python/common_util.h', #for safe let it be first for without it file_util.h or string_util.h... will fail
'./include.python/log_util.h',
'./include.python/Segmentor.h',
]
paths = [
#'./gezi.include.python/Numeric/Vector/',
#'./include.python/MLCore/',
#'./include.python/Prediction/Instances/',
]
#import gezi
#for path in paths:
# files += [f for f in gezi.get_filepaths(path) if f.endswith('.h')]
include_paths=[
'third-64/glog',
'third-64/gflags',
'third-64/gtest',
'third-64/boost.1.53',
'lib2-64/bsl',
'lib2-64/postag',
'lib2-64/dict',
'lib2-64/libcrf',
'lib2-64/others-ex',
'lib2-64/ullib',
'lib2-64/ccode',
'public/odict/output',
'public/uconv/output',
'public/configure/output',
'app/search/sep/anti-spam/gezi/third/rabit',
]
include_paths_python = [
'app/search/sep/anti-spam/gezi/python-wrapper',
]
include_paths_obsolute = [
#'app/search/sep/anti-spam/melt/python-wrapper/gezi.include.python',
'lib2-64/wordseg',
'public/comlog-plugin',
'app/search/sep/anti-spam/gezi/third',
]
mb = module_builder.module_builder_t(
gccxml_path = '~/.jumbo/bin/gccxml',
define_symbols = define_symbols,
files = files,
include_paths = [root + f + '/include' for f in include_paths]
+ [root + f + '/include.python' for f in include_paths_python]
+ [root + f for f in include_paths_obsolute]
)
mb.build_code_creator( module_name='lib%s'%name )
mb.code_creator.user_defined_directories.append( os.path.abspath('.') )
mb.write_module( os.path.join( os.path.abspath('./'), '%s_py.cc'%name) )
|
[
"chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97"
] |
chenghuige@fa64baa9-71d1-4fed-97ae-c15534abce97
|
91d56317f2f750056e042724cdef6a742f9b675e
|
9ed4a4b50de33ff23f4a4ee709cb7a634996d801
|
/movie/migrations/0001_initial.py
|
e9213972ea3e8474cc1309d74f0d32ce0319ace6
|
[] |
no_license
|
Dean-shiyi/django
|
8ec06ba256e23db9b8642c99f463e7662ef00344
|
b0e7d53dbafb58618d296752180cb3e86b9be091
|
refs/heads/master
| 2020-05-17T23:51:02.244087
| 2019-04-29T09:38:53
| 2019-04-29T09:38:53
| 184,039,983
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 794
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-04-28 10:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='MovieDetall',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, unique=True, verbose_name='电影标题')),
('desc', models.CharField(default='暂无简介', max_length=1000, verbose_name='电影简介')),
('online_time', models.DateField(verbose_name='上映日期')),
],
),
]
|
[
"admin@shiyi.local"
] |
admin@shiyi.local
|
2c8ce94c10e7d8264f24f6c59a3fea4aaa18be0c
|
066ee4df594a5dc90335d271b9d5a1b1e2a4d34c
|
/y/google-cloud-sdk/platform/google_appengine/google/appengine/tools/devappserver2/admin/admin_server.py
|
b7a9ebc2fb5517e3f4165380324ac65c95ff86f6
|
[
"Apache-2.0",
"LGPL-2.1-or-later",
"BSD-3-Clause",
"MIT",
"GPL-2.0-or-later",
"MPL-1.1",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
ychen820/microblog
|
a2d82447525325ec58285c2e5db58b79cceaca1b
|
d379afa2db3582d5c3be652165f0e9e2e0c154c6
|
refs/heads/master
| 2021-01-20T05:58:48.424357
| 2015-04-28T22:03:09
| 2015-04-28T22:03:09
| 32,948,331
| 0
| 2
|
BSD-3-Clause
| 2020-07-25T05:04:35
| 2015-03-26T19:45:07
|
Python
|
UTF-8
|
Python
| false
| false
| 5,716
|
py
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Run a server displaying the administrative UI for the application."""
import logging
import google
import webapp2
from google.appengine.tools.devappserver2 import wsgi_server
from google.appengine.tools.devappserver2.admin import admin_request_handler
from google.appengine.tools.devappserver2.admin import blobstore_viewer
from google.appengine.tools.devappserver2.admin import console
from google.appengine.tools.devappserver2.admin import cron_handler
from google.appengine.tools.devappserver2.admin import datastore_indexes_viewer
from google.appengine.tools.devappserver2.admin import datastore_stats_handler
from google.appengine.tools.devappserver2.admin import datastore_viewer
from google.appengine.tools.devappserver2.admin import logs_handler
from google.appengine.tools.devappserver2.admin import mail_request_handler
from google.appengine.tools.devappserver2.admin import memcache_viewer
from google.appengine.tools.devappserver2.admin import modules_handler
from google.appengine.tools.devappserver2.admin import quit_handler
from google.appengine.tools.devappserver2.admin import search_handler
from google.appengine.tools.devappserver2.admin import static_file_handler
from google.appengine.tools.devappserver2.admin import taskqueue_queues_handler
from google.appengine.tools.devappserver2.admin import taskqueue_tasks_handler
from google.appengine.tools.devappserver2.admin import xmpp_request_handler
class AdminApplication(webapp2.WSGIApplication):
"""A WSGI application that serves an administrative UI for the application."""
def __init__(self, dispatch, configuration):
"""Initializer for AdminApplication.
Args:
dispatch: A dispatcher.Dispatcher instance used to route requests and
provide state about running servers.
configuration: An application_configuration.ApplicationConfiguration
instance containing the configuration for the application.
"""
super(AdminApplication, self).__init__(
[('/datastore', datastore_viewer.DatastoreRequestHandler),
('/datastore/edit/(.*)', datastore_viewer.DatastoreEditRequestHandler),
('/datastore/edit', datastore_viewer.DatastoreEditRequestHandler),
('/datastore-indexes',
datastore_indexes_viewer.DatastoreIndexesViewer),
('/datastore-stats', datastore_stats_handler.DatastoreStatsHandler),
('/console', console.ConsoleRequestHandler),
('/console/restart/(.+)', console.ConsoleRequestHandler.restart),
('/memcache', memcache_viewer.MemcacheViewerRequestHandler),
('/blobstore', blobstore_viewer.BlobstoreRequestHandler),
('/blobstore/blob/(.+)', blobstore_viewer.BlobRequestHandler),
('/taskqueue', taskqueue_queues_handler.TaskQueueQueuesHandler),
('/taskqueue/queue/(.+)',
taskqueue_tasks_handler.TaskQueueTasksHandler),
('/cron', cron_handler.CronHandler),
('/xmpp', xmpp_request_handler.XmppRequestHandler),
('/mail', mail_request_handler.MailRequestHandler),
('/quit', quit_handler.QuitHandler),
('/search', search_handler.SearchIndexesListHandler),
('/search/document', search_handler.SearchDocumentHandler),
('/search/index', search_handler.SearchIndexHandler),
('/assets/(.+)', static_file_handler.StaticFileHandler),
('/instances', modules_handler.ModulesHandler),
('/instances/logs', logs_handler.LogsHandler),
webapp2.Route('/',
webapp2.RedirectHandler,
defaults={'_uri': '/instances'})],
debug=True)
self.dispatcher = dispatch
self.configuration = configuration
class AdminServer(wsgi_server.WsgiServer):
"""Serves an administrative UI for the application over HTTP."""
def __init__(self, host, port, dispatch, configuration, xsrf_token_path):
"""Initializer for AdminServer.
Args:
host: A string containing the name of the host that the server should bind
to e.g. "localhost".
port: An int containing the port that the server should bind to e.g. 80.
dispatch: A dispatcher.Dispatcher instance used to route requests and
provide state about running servers.
configuration: An application_configuration.ApplicationConfiguration
instance containing the configuration for the application.
xsrf_token_path: A string containing the path to a file that contains the
XSRF configuration for the admin UI.
"""
self._host = host
self._xsrf_token_path = xsrf_token_path
super(AdminServer, self).__init__((host, port),
AdminApplication(dispatch, configuration))
def start(self):
"""Start the AdminServer."""
admin_request_handler.AdminRequestHandler.init_xsrf(self._xsrf_token_path)
super(AdminServer, self).start()
logging.info('Starting admin server at: http://%s:%d', self._host,
self.port)
def quit(self):
"""Quits the AdminServer."""
super(AdminServer, self).quit()
console.ConsoleRequestHandler.quit()
|
[
"ychen207@binghamton.edu"
] |
ychen207@binghamton.edu
|
9c06e3fee9b162e60c22c3a364ff13cc9c4004bb
|
9f3cc2cb6a853560a69ecb14af1ad3080650f954
|
/Week4_Lesson2/join2_reducer.py
|
731fbb10d50335ea394bc5c7f599bf7ce0e850fd
|
[] |
no_license
|
andrez00/Hadoop-Coursera
|
c4cb0bd85f24db472125f46d6cd792cdcfead8fa
|
147f460aa760df12c8d3009b148d5f7563635025
|
refs/heads/master
| 2020-04-05T17:05:05.253338
| 2018-11-11T03:36:21
| 2018-11-11T03:36:21
| 157,043,695
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,153
|
py
|
#!/usr/bin/env python
import sys
#----
# see https://docs.python.org/2/tutorial/index.html for python tutorials
#[Source code] https://github.com/kiichi/CourseraBigData/blob/master/map_reduce_join_python/join2_reducer.py
#----
last_key = None #initialize these variables
running_total = 0
abc_found = False
key_out = None
#read lines, and split lines into key & value
for line in sys.stdin:
line = line.strip() #strip out carriage return
key_value = line.split('\t') #split line, into key and value, returns a list
value = 0
if key_value[1].isdigit():
value = int(key_value[1])
if key_value[0] == last_key: #if a key has changed (and it's not the first input)
running_total += value #otherwise keep a running total of viewer counts
else:
if last_key and abc_found: # then check if ABC had been found and print out key and running total
print("%s\t%d"%(key_out,running_total))
abc_found = False
running_total = value
if key_value[1] == 'ABC': # if value is ABC then set some variable to mark that ABC was found (like abc_found = True)
key_out = key_value[0]
abc_found = True
last_key = key_value[0]
|
[
"43712233+andrez00@users.noreply.github.com"
] |
43712233+andrez00@users.noreply.github.com
|
a578fd177e8e56258aff2a691ced34b0db9fdf3a
|
e1dbb3111a8497eaac0544636c0849e5f65676b6
|
/backend/manage.py
|
4a823ef2c323e2e8892daea8fbd48a101b30591f
|
[] |
no_license
|
crowdbotics-apps/other-25649
|
f78854ddd25b596653f0a233c8fd004b1fb2df5d
|
5df42438e06420d2d4503b22189be9b1c639c310
|
refs/heads/master
| 2023-04-08T16:37:13.862160
| 2021-04-12T12:25:24
| 2021-04-12T12:25:24
| 357,184,141
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 631
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'other_25649.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"team@crowdbotics.com"
] |
team@crowdbotics.com
|
c4d0b7dfe9a8edbb2cfdf881c7b5724d86302f54
|
6130221f57664ca03adbd215cff45cd0681c3881
|
/app_proyecto01/apps.py
|
3618145e667c1042f7485dc4d0c2f76797cc2e49
|
[] |
no_license
|
verje/Simple-Blog
|
53164ff304d36beb1e519bed7e9325f5897bf6ff
|
c4c3cf44e21e0df33a19d96641e624d40bb0f008
|
refs/heads/master
| 2022-10-15T11:23:08.786031
| 2020-06-12T16:55:48
| 2020-06-12T16:55:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 102
|
py
|
from django.apps import AppConfig
class AppProyecto01Config(AppConfig):
name = 'app_proyecto01'
|
[
"jesus_jarm@yahoo.com"
] |
jesus_jarm@yahoo.com
|
4c3414828d29370ae2304f95d78e7420bf33bfb1
|
0e478f3d8b6c323c093455428c9094c45de13bac
|
/src/OTLMOW/PostenMapping/Model/Post060361226.py
|
e9c499ee16577307dabf18dc0ca996d2ef260913
|
[
"MIT"
] |
permissive
|
davidvlaminck/OTLMOW
|
c6eae90b2cab8a741271002cde454427ca8b75ba
|
48f8c357c475da1d2a1bc7820556843d4b37838d
|
refs/heads/main
| 2023-01-12T05:08:40.442734
| 2023-01-10T15:26:39
| 2023-01-10T15:26:39
| 432,681,113
| 3
| 1
|
MIT
| 2022-06-20T20:36:00
| 2021-11-28T10:28:24
|
Python
|
UTF-8
|
Python
| false
| false
| 4,392
|
py
|
# coding=utf-8
from OTLMOW.PostenMapping.StandaardPost import StandaardPost
from OTLMOW.PostenMapping.StandaardPostMapping import StandaardPostMapping
# Generated with PostenCreator. To modify: extend, do not edit
class Post060361226(StandaardPost):
def __init__(self):
super().__init__(
nummer='0603.61226',
beschrijving='Onbehandelde betontegels, gekleurd met anorganische pigmenten volgens 6-3.7, 400 x 400, 60 mm',
meetstaateenheid='M2',
mappings=[StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel.afmetingVanBestratingselementLxB',
dotnotation='afmetingVanBestratingselementLxB',
defaultWaarde='400-x-400',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Laag.laagRol',
dotnotation='laagRol',
defaultWaarde='straatlaag',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel.afwerking',
dotnotation='afwerking',
defaultWaarde='onbehandeld',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel.type',
dotnotation='type',
defaultWaarde='gekleurde-met-anorganische-pigmenten',
range='',
usagenote='',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#LaagDikte.dikte',
dotnotation='dikte',
defaultWaarde='6',
range='',
usagenote='cm^^cdt:ucumunit',
isMeetstaatAttr=0,
isAltijdInTeVullen=0,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')
, StandaardPostMapping(
typeURI='https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#BestratingVanBetontegel',
attribuutURI='https://wegenenverkeer.data.vlaanderen.be/ns/abstracten#Laag.oppervlakte',
dotnotation='oppervlakte',
defaultWaarde='',
range='',
usagenote='m2^^cdt:ucumunit',
isMeetstaatAttr=1,
isAltijdInTeVullen=1,
isBasisMapping=1,
mappingStatus='gemapt 2.0',
mappingOpmerking='',
standaardpostnummer='0603.61226')])
|
[
"david.vlaminck@mow.vlaanderen.be"
] |
david.vlaminck@mow.vlaanderen.be
|
f8bb15de2bfa0b6810bcf4adbf68ce75096399b1
|
2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02
|
/PyTorch/contrib/cv/detection/SSD-Resnet/eval.py
|
32420567ee24637672cd21a6a4cfb664f2767b2d
|
[
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Ascend/ModelZoo-PyTorch
|
4c89414b9e2582cef9926d4670108a090c839d2d
|
92acc188d3a0f634de58463b6676e70df83ef808
|
refs/heads/master
| 2023-07-19T12:40:00.512853
| 2023-07-17T02:48:18
| 2023-07-17T02:48:18
| 483,502,469
| 23
| 6
|
Apache-2.0
| 2022-10-15T09:29:12
| 2022-04-20T04:11:18
|
Python
|
UTF-8
|
Python
| false
| false
| 8,808
|
py
|
# Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import torch
if torch.__version__ >= '1.8':
import torch_npu
import time
import numpy as np
import io
from ssd300 import SSD300
from box_coder import dboxes300_coco, build_ssd300_coder
from parse_config import parse_args, validate_arguments, validate_group_bn
from data.build_pipeline import build_pipeline
from data.prefetcher import eval_prefetcher
from async_evaluator import AsyncEvaluator
import sys
# necessary pytorch imports
import torch.utils.data.distributed
import torch.distributed as dist
# Apex imports
try:
import apex_C
import apex
from apex.parallel import DistributedDataParallel as DDP
from apex.fp16_utils import *
from apex.multi_tensor_apply import multi_tensor_applier
#import amp_C
except ImportError:
raise ImportError("Please install APEX from https://github.com/nvidia/apex")
# from SSD import _C as C
def print_message(rank, *print_args):
if rank == 0:
print(*print_args)
"""
Take results and produce mAP on COCO
Intended to be used with an async evaluator, and run on a single
node -- calling code is responsible for that delegation
"""
def evaluate_coco(final_results, cocoGt, local_rank, threshold):
from pycocotools.cocoeval import COCOeval
#cocoDt = cocoGt.loadRes(final_results, use_ext=True)
cocoDt = cocoGt.loadRes(final_results)
#E = COCOeval(cocoGt, cocoDt, iouType='bbox', use_ext=True)
E = COCOeval(cocoGt, cocoDt, iouType='bbox')
E.evaluate()
E.accumulate()
E.summarize()
print("Current AP: {:.5f} AP goal: {:.5f}".format(E.stats[0], threshold))
sys.stdout.flush()
return E.stats[0]
def coco_eval(args, model, coco, cocoGt, encoder, inv_map, epoch, iteration, evaluator=None):
from pycocotools.cocoeval import COCOeval
threshold = args.threshold
batch_size = args.eval_batch_size
use_fp16 = args.use_fp16
local_rank = args.local_rank
N_gpu = args.N_gpu
use_nhwc = args.nhwc
pad_input = args.pad_input
distributed = args.distributed
ret = []
overlap_threshold = 0.50
nms_max_detections = 200
start = time.time()
# Wrap dataloader for prefetching
coco = eval_prefetcher(iter(coco),
torch.npu.current_device(),
args.pad_input,
args.nhwc,
args.use_fp16)
for nbatch, (img, img_id, img_size) in enumerate(coco):
with torch.no_grad():
# Get predictions
ploc, plabel = model(img)
ploc = torch_npu.npu_format_cast(ploc, 2)
plabel = torch_npu.npu_format_cast(plabel, 2)
ploc, plabel = ploc.float(), plabel.float()
# Handle the batch of predictions produced
# This is slow, but consistent with old implementation.
for idx in range(ploc.shape[0]):
# ease-of-use for specific predictions
ploc_i = ploc[idx, :, :].unsqueeze(0)
plabel_i = plabel[idx, :, :].unsqueeze(0)
result = encoder.decode_batch(ploc_i, plabel_i, overlap_threshold, nms_max_detections)
htot, wtot = img_size[0][idx].item(), img_size[1][idx].item()
loc, label, prob = [r[0].cpu().numpy() for r in result]
for loc_, label_, prob_ in zip(loc, label, prob):
ret.append([img_id[idx], loc_[0]*wtot, \
loc_[1]*htot,
(loc_[2] - loc_[0])*wtot,
(loc_[3] - loc_[1])*htot,
prob_,
inv_map[(label_+1)]])
# Now we have all predictions from this rank, gather them all together
# if necessary
ret = np.array(ret).astype(np.float32)
# Multi-GPU eval
if distributed:
# NCCL backend means we can only operate on GPU tensors
ret_copy = torch.tensor(ret).npu()
# Everyone exchanges the size of their results
ret_sizes = [torch.tensor(0).npu() for _ in range(N_gpu)]
torch.distributed.all_gather(ret_sizes, torch.tensor(ret_copy.shape[0]).npu())
# Get the maximum results size, as all tensors must be the same shape for
# the all_gather call we need to make
max_size = 0
sizes = []
for s in ret_sizes:
max_size = max(max_size, s.item())
sizes.append(s.item())
# Need to pad my output to max_size in order to use in all_gather
ret_pad = torch.cat([ret_copy, torch.zeros(max_size-ret_copy.shape[0], 7, dtype=torch.float32).npu()])
# allocate storage for results from all other processes
other_ret = [torch.zeros(max_size, 7, dtype=torch.float32).npu() for i in range(N_gpu)]
# Everyone exchanges (padded) results
torch.distributed.all_gather(other_ret, ret_pad)
# Now need to reconstruct the _actual_ results from the padded set using slices.
cat_tensors = []
for i in range(N_gpu):
cat_tensors.append(other_ret[i][:sizes[i]][:])
final_results = torch.cat(cat_tensors).cpu().numpy()
else:
# Otherwise full results are just our results
final_results = ret
print_message(args.rank, "Predicting Ended, total time: {:.2f} s".format(time.time()-start))
# All results are assembled -- if rank == 0 start async evaluation (if enabled)
if args.rank == 0 and (evaluator is not None):
evaluator.submit_task(epoch, evaluate_coco, final_results, cocoGt, local_rank, threshold)
return
def load_checkpoint(model, checkpoint):
print("loading model checkpoint", checkpoint)
od = torch.load(checkpoint)
# remove proceeding 'module' from checkpoint
saved_model = od["model"]
for k in list(saved_model.keys()):
if k.startswith('module.'):
saved_model[k[7:]] = saved_model.pop(k)
model.load_state_dict(saved_model)
def setup_distributed(args):
# Setup multi-GPU if necessary
args.distributed = False
if 'WORLD_SIZE' in os.environ:
args.distributed = int(os.environ['WORLD_SIZE']) > 1
os.environ['MASTER_ADDR'] = '127.0.0.1'
os.environ['MASTER_PORT'] = '29688'
if args.distributed:
torch.npu.set_device(args.local_rank)
torch.distributed.init_process_group(backend='hccl',
world_size=int(os.environ['WORLD_SIZE']),
rank=args.local_rank,
)
args.local_seed = 0
# start timing here
if args.distributed:
args.N_gpu = torch.distributed.get_world_size()
args.rank = torch.distributed.get_rank()
else:
args.N_gpu = 1
args.rank = 0
validate_group_bn(args.bn_group)
return args
# setup everything (model, etc) to run eval
def run_eval(args):
args = setup_distributed(args)
from pycocotools.coco import COCO
local_seed = args.local_seed
encoder = build_ssd300_coder()
val_annotate = os.path.join(args.data, "annotations/instances_val2017.json")
val_coco_root = os.path.join(args.data, "val2017")
val_loader, inv_map, cocoGt = build_pipeline(args, training=False)
model_options = {
'use_nhwc' : args.nhwc,
'pad_input' : args.pad_input,
'bn_group' : args.bn_group,
'pretrained' : False,
}
ssd300_eval = SSD300(args, args.num_classes, **model_options).npu()
if args.use_fp16:
convert_network(ssd300_eval, torch.half)
ssd300_eval.eval()
if args.checkpoint is not None:
load_checkpoint(ssd300_eval, args.checkpoint)
evaluator = AsyncEvaluator(num_threads=1)
coco_eval(args,
ssd300_eval,
val_loader,
cocoGt,
encoder,
inv_map,
0, # epoch
0, # iter_num
evaluator=evaluator)
if __name__ == "__main__":
args = parse_args()
validate_arguments(args)
torch.backends.cudnn.benchmark = True
torch.set_num_threads(1)
run_eval(args)
|
[
"wangjiangben@huawei.com"
] |
wangjiangben@huawei.com
|
99cb695db4bbb2ce1d7d866cfefbc5bda140e4b6
|
3e9ea1ff124159906215e7fa9b625a73cb4b5258
|
/tests/test_slurm.py
|
d3e5e9bd33574a8a08e8a27dd746e5199e3b3914
|
[
"MIT"
] |
permissive
|
tamburinif/slurm
|
d65aecbeeecabe8a150f5fee472ad5603af04cc8
|
925adada26978978e7e8fd7ee777046a0470ccc3
|
refs/heads/master
| 2021-07-16T11:52:02.833529
| 2021-02-15T22:13:33
| 2021-02-15T22:13:33
| 240,618,292
| 0
| 0
|
MIT
| 2020-02-14T23:49:26
| 2020-02-14T23:49:26
| null |
UTF-8
|
Python
| false
| false
| 2,988
|
py
|
#!/usr/bin/env python3
import pytest
import time
@pytest.fixture
def profile(cookie_factory, data):
cookie_factory()
@pytest.mark.slow
@pytest.mark.skipci
def test_no_timeout(smk_runner, profile):
"""Test that rule that updates runtime doesn't timeout"""
smk_runner.make_target("timeout.txt")
assert "Trying to restart" in smk_runner.output
smk_runner.wait_for_status("COMPLETED")
assert "Finished job" in smk_runner.output
@pytest.mark.slow
def test_timeout(smk_runner, profile):
"""Test that rule excessive runtime resources times out"""
opts = (
f'--cluster "sbatch --parsable -p {smk_runner.partition} {pytest.account} '
'-c 1 -t {resources.runtime}" --attempt 1'
)
smk_runner.make_target("timeout.txt", options=opts, profile=None, asynchronous=True)
# Discount queueing time
smk_runner.wait_for_status("RUNNING")
smk_runner.wait_while_status("RUNNING", tdelta=20, timeout=90)
assert smk_runner.check_jobstatus("TIMEOUT|NODE_FAIL")
def test_profile_status_running(smk_runner, profile):
"""Test that slurm-status.py catches RUNNING status"""
opts = (
f'--cluster "sbatch --parsable -p {smk_runner.partition}'
f' {pytest.account} -c 1 -t 1"'
)
smk_runner.make_target(
"timeout.txt", options=opts, profile=None, asynchronous=True
) # noqa: E501
smk_runner.wait_for_status("RUNNING", tdelta=5)
jid = smk_runner.external_jobid[0]
_, output = smk_runner.exec_run(
cmd=f"{smk_runner.slurm_status} {jid}", stream=False
)
assert output.decode().strip() == "running"
smk_runner.cancel_slurm_job(jid)
@pytest.mark.timeout(60)
def test_slurm_submit(smk_runner, profile):
"""Test that slurm-submit.py works"""
jobscript = smk_runner.script("jobscript.sh")
jobscript.write(
(
"#!/bin/bash\n"
'# properties = {"cluster": {"job-name": "sm-job"},'
'"input": [], "output": [], "wildcards": {}, "params": {},'
'"rule": "slurm_submit"}\n'
)
)
_, output = smk_runner.exec_run(
cmd=f"{smk_runner.slurm_submit} {jobscript}", stream=False
)
jobid = int(output.decode().strip())
time.sleep(5)
assert smk_runner.check_jobstatus(
"sm-job", options="--format=jobname", jobid=jobid)
smk_runner.cancel_slurm_job(jobid)
@pytest.mark.timeout(60)
@pytest.mark.skipci
def test_group_job(smk_runner, profile):
"""Test that group job properties formatted as expected"""
smk_runner.make_target("group_job.2.txt", stream=False)
smk_runner.wait_for_status("COMPLETED", tdelta=5)
assert "Submitted group job" in smk_runner.output
assert "2 of 2 steps" in smk_runner.output
@pytest.mark.timeout(60)
@pytest.mark.skipci
def test_wildcard_job(smk_runner, profile):
"""Test that wildcard job properties formatted as expected"""
smk_runner.make_target("wildcard.wc.txt")
assert "Finished job" in smk_runner.output
|
[
"per.unneberg@scilifelab.se"
] |
per.unneberg@scilifelab.se
|
af15845c90defb046faea5d8943043b47c0ed874
|
c741f04141784a2571d2d27d95e0d994e4584ab1
|
/learning/py3/基本数据类型/List(列表)/注意.py
|
145b660838690adc8eecc3b0bac4da911490bd50
|
[] |
no_license
|
haodonghui/python
|
bbdece136620bc6f787b4942d6e1760ed808afd4
|
365062ba54297c81093b7f378742e76d438658b7
|
refs/heads/master
| 2022-02-03T23:52:37.288503
| 2022-01-27T05:23:25
| 2022-01-27T05:23:25
| 191,729,797
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 235
|
py
|
"""
注意:
1、List写在方括号之间,元素用逗号隔开。
2、和字符串一样,list可以被索引和切片。
3、List可以使用+操作符进行拼接。
4、List中的元素是可以改变的。
"""
|
[
"haodonghui@yestae.com"
] |
haodonghui@yestae.com
|
ad397c3ccf428aba349d36890a58767b6b21a677
|
24449e77422a6236acd17b945174c546c55a7082
|
/exr1.py
|
0793c174398ba409676b599ca3022cced687dbeb
|
[] |
no_license
|
MahdieRrad/ImgeP-3
|
48297cdf7d0c5d6fb69772bb4d096f1266d0a3b6
|
99c75682968edd9fe3163e8febef5c591d47dd98
|
refs/heads/main
| 2023-08-23T10:33:30.835774
| 2021-11-06T14:04:54
| 2021-11-06T14:04:54
| 424,978,692
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 157
|
py
|
import cv2
img1 = cv2.imread('a.tif')
img2 = cv2.imread('b.tif' )
imge = img2 - img1
cv2.imwrite('Scrt.jpg' , imge)
cv2.imshow('Scrt',imge)
cv2.waitKey()
|
[
"noreply@github.com"
] |
MahdieRrad.noreply@github.com
|
5ca34df0861e04d99dc995c7fc34161219b92c1d
|
2d59afbe33eccd836aa0673d43d3b23a30d0422a
|
/mysite/hello/views.py
|
14c16019ee8059972b77d4695e061c0696a923e6
|
[] |
no_license
|
wwwangxinyi/django_projects
|
a60eb705aca68e53449a5c83e69e4b2e45addca3
|
62d13bb9240cbfbe080c8a0f8744883c8216fd58
|
refs/heads/master
| 2023-03-23T22:48:58.623669
| 2021-03-14T04:07:00
| 2021-03-14T04:07:00
| 342,107,106
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 429
|
py
|
from django.http import HttpResponse
def sessfun(request) :
request
num_visits = request.session.get('num_visits', 0) + 1
request.session['num_visits'] = num_visits
if num_visits > 4 : del(request.session['num_visits'])
resp = HttpResponse('view count='+str(num_visits))
resp.set_cookie('dj4e_cookie', '90ecce8d', max_age=1000)
resp.set_cookie('dj4e_cookie', '90ecce8d', max_age=1000)
return resp
|
[
"youremail@umich.edu"
] |
youremail@umich.edu
|
efd39ce2b2ee56e7d45f29574ff2e9e5a8633b1f
|
9afef9a6a25bbcde30596fa2fb3dce2bd6f4f6f6
|
/setup.py
|
840c1cfb4c74d81f67fa1e1894f6cfa75c4a7483
|
[] |
no_license
|
msnyd/my-lambdata-package
|
2dcf3cda55b86aa071cce8fd1d7d82126db44ddf
|
64657a7e3ca9f41671a95d81353135622127a6fe
|
refs/heads/master
| 2021-01-02T12:41:40.113648
| 2020-02-11T18:49:36
| 2020-02-11T18:49:36
| 239,629,597
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 555
|
py
|
from setuptools import find_packages, setup
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="msnyd-helper-functions",
version="1.3",
author="M Snyder",
author_email="msnyder97@gmail.com",
description="Useful data science functions",
long_description=long_description,
long_description_content_type="text/markdown", # required if using a md file for long desc
license="",
url="https://github.com/msnyd/my-lambdata-package",
keywords="data-split-test",
packages=find_packages()
)
|
[
"46577534+msnyd@users.noreply.github.com"
] |
46577534+msnyd@users.noreply.github.com
|
de324d4a4599251bc494e523cfd835ac6636c62c
|
5a4436884af5341ce855c0e84866b972a0f61c05
|
/day1/expressions/28.py
|
cdf3516accacba87257b18e8580d17bf50b3169a
|
[] |
no_license
|
sreejithev/pythoncodes
|
74a420c4f025b893e27f17ba85632a4a096f17fd
|
70df14871a9687916d1c4ada76c055607f13e8ce
|
refs/heads/master
| 2021-01-21T20:59:47.056167
| 2017-06-19T09:43:17
| 2017-06-19T09:43:17
| 92,292,259
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 31
|
py
|
print 'hello'
# print 'world'
|
[
"sreejithevwyd@gmail.com"
] |
sreejithevwyd@gmail.com
|
f2f63b88a5d5ccf7652ef38e040b3f1eaa84e1af
|
f319ed30f5b71f9eae5b7e3d1b94f0a61180a80a
|
/code/toys/SineRegression.py
|
1058eec5501f29cc57994a52e05f6281d04ec013
|
[] |
no_license
|
greentfrapp/metaphor
|
a170b0ecca2963c4c99e3ae63f2b3437af950f30
|
229e3c6b408cc9307141e7c8564b78b9370f3848
|
refs/heads/master
| 2022-03-10T00:15:40.012281
| 2019-10-19T19:13:47
| 2019-10-19T19:13:47
| 212,151,830
| 0
| 0
| null | 2019-10-19T19:13:47
| 2019-10-01T17:00:39
|
Python
|
UTF-8
|
Python
| false
| false
| 1,739
|
py
|
"""
The Sine Regression toy problem as first described by Finn et al.
(2017). To quote from the paper:
Each task involves regressing from the input to the output of a
sine wave, where the amplitude and phase of the sinusoid are
varied between tasks. Thus, p(T) is continuous, where the
amplitude varies within [0.1, 5.0] and the phase varies within
[0, π], and the input and output both have a dimensionality of
1. During training and testing, datapoints x are sampled
uniformly from [−5.0, 5.0]. The loss is the mean-squared error
between the prediction f(x) and true value.
Note here that p(T) refers to the distribution over tasks.
"""
import numpy as np
import matplotlib.pyplot as plt
class SineTask:
def __init__(self, ampl, phase, x_range=[-5, 5]):
self.ampl = ampl
self.phase = phase
self.x_range = x_range
def generate_points(self, n=10):
x = self.x_range[0] + np.random.rand(n, 1) * (self.x_range[1] - self.x_range[0])
y = self.ampl * np.sin(x + self.phase)
return x, y
def plot(self):
fig, ax = plt.subplots()
plot_x = np.arange(-5, 5, 0.1)
plot_y = self.ampl * np.sin(plot_x + self.phase)
ax.plot(plot_x, plot_y)
points = self.generate_points()
ax.scatter(points[0], points[1])
plt.show()
class SineRegressionDist:
def __init__(self, ampl_range=[0.1, 5], phase_range=[0, np.pi]):
self.ampl_range = ampl_range
self.phase_range = phase_range
def sampler(self, rnge):
return rnge[0] + np.random.rand() * (rnge[1] - rnge[0])
def generate_task(self):
return SineTask(self.sampler(self.ampl_range), self.sampler(self.phase_range))
if __name__ == "__main__":
dist = SineRegressionDist()
task = dist.generate_task()
print(task.generate_points())
task.plot()
|
[
"limsweekiat@gmail.com"
] |
limsweekiat@gmail.com
|
ebfd7331f9d6cbbf9eade56aa56fcfd4f0dd81f3
|
ad733caded665623f46a9b15b834911b34f8ff19
|
/maratona/models.py
|
a6b8a3a01f94a75a5ec4f96aaa2c281395ec9139
|
[] |
no_license
|
uilton-oliveira/maratonafc3_django
|
eda7a74aebb7477b573d0bee5f851dbedd3138b3
|
51a70b83f59a8bd9a15a57f9fb49151ee7742f6b
|
refs/heads/master
| 2022-11-11T14:47:39.273293
| 2020-07-02T21:06:58
| 2020-07-02T21:06:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 330
|
py
|
from django.db import models
class Aula(models.Model):
title = models.CharField(max_length=200)
image_url = models.TextField('image url')
video_url = models.TextField('video url', default=None, blank=True, null=True)
live_date = models.DateTimeField('live date')
def __str__(self):
return self.title
|
[
"uilton.oliveira@cortex-intelligence.com"
] |
uilton.oliveira@cortex-intelligence.com
|
ca6bec6ed6be07c0c69746803ccb73ad32d37329
|
48ddaeb3eb7b127ccf6ce362b2a42307ecacc875
|
/src/filters/filter_lat_lon_to_cartesian.py
|
0300e89505eadc5abdac2cd1189ab2932e590e58
|
[
"BSD-3-Clause"
] |
permissive
|
aashish24/ParaViewGeophysics
|
f720cdf2555f2a81801dfa4132189a040c980b8b
|
d9a71ffd21a57fa0eb704c5f6893ec9b1ddf6da6
|
refs/heads/master
| 2022-03-05T15:23:56.166299
| 2017-11-10T22:28:44
| 2017-11-10T22:28:44
| 110,368,572
| 0
| 0
|
BSD-3-Clause
| 2020-01-12T00:22:22
| 2017-11-11T18:12:25
|
Python
|
UTF-8
|
Python
| false
| false
| 2,042
|
py
|
Name = 'LatLonToCartesian'
Label = 'Lat Lon To Cartesian'
FilterCategory = 'CSM Geophysics Filters'
Help = 'Help for the Test Filter'
NumberOfInputs = 1
InputDataType = 'vtkTable'
OutputDataType = 'vtkTable'
ExtraXml = ''
Properties = dict(
Radius=6371.0,
lat_i=1,
lon_i=0,
)
# TODO: filter works but assumes a spherical earth wich is very wrong
# NOTE: Msatches the vtkEarth SOurce however so we gonna keep it this way
def RequestData():
import numpy as np
from vtk.util import numpy_support as nps
pdi = self.GetInput()
pdo = self.GetOutput()
pdo.DeepCopy(pdi)
# Get number of columns
ncols = pdi.GetNumberOfColumns()
nrows = pdi.GetColumn(0).GetNumberOfTuples()
# Make a 2D numpy array and fille with data from input table
data = np.empty((nrows,ncols))
for i in range(ncols):
c = pdi.GetColumn(i)
data[:,i] = nps.vtk_to_numpy(c)
rad = 2 * np.pi / 360.0
coords = np.empty((nrows,3))
row_i = 0
'''
for r in data:
x = Radius * cos(r[lat_i] * rad) * cos(r[lon_i] * rad)
y = Radius * cos(r[lat_i] * rad) * sin(r[lon_i] * rad)
z = Radius * sin(r[lat_i] * rad)
coords[row_i] = [x,y,z]
row_i = row_i + 1
'''
x = Radius * cos(data[:,lat_i] * rad) * cos(data[:,lon_i] * rad)
y = Radius * cos(data[:,lat_i] * rad) * sin(data[:,lon_i] * rad)
z = Radius * sin(data[:,lat_i] * rad)
coords[:,0] = x
coords[:,1] = y
coords[:,2] = z
# Add coords to ouptut table
for i in range(3):
col = np.array(coords[:,i])
insert = nps.numpy_to_vtk(num_array=col, deep=True) # array_type=vtk.VTK_FLOAT
# VTK arrays need a name.
if i == 0:
insert.SetName('X')
elif i == 1:
insert.SetName('Y')
elif i == 2:
insert.SetName('Z')
#pdo.AddColumn(insert) # these are not getting added to the output table
# ... work around:
pdo.GetRowData().AddArray(insert) # NOTE: this is in the FieldData
|
[
"chrsulli@mines.edu"
] |
chrsulli@mines.edu
|
b9e9d7352029861962828c90619ec119a8da1245
|
a37b54df55c2475a5a75dfe37db02c1af8c86234
|
/dep/wand/drawing.py
|
4c576fb4619dfda104f94508f55adeca6d9e5f8b
|
[] |
no_license
|
lovexinforever/wallpaper
|
5ba366bdae7a4a838b8eb0a35bdfe41cbda7abb4
|
d279261dff0f647ece7fbb33c4234273cccca4d1
|
refs/heads/master
| 2023-03-20T01:09:12.244409
| 2021-03-02T07:14:36
| 2021-03-02T07:14:36
| 259,499,327
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 82,378
|
py
|
""":mod:`wand.drawing` --- Drawings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The module provides some vector drawing functions.
.. versionadded:: 0.3.0
"""
import collections
import ctypes
import numbers
from .api import (AffineMatrix, MagickPixelPacket, PixelInfo, PointInfo,
library)
from .color import Color
from .compat import binary, string_type, text, text_type, xrange
from .exceptions import WandLibraryVersionError
from .image import BaseImage, COMPOSITE_OPERATORS
from .sequence import SingleImage
from .resource import Resource
from .version import MAGICK_VERSION_NUMBER
__all__ = ('CLIP_PATH_UNITS', 'FILL_RULE_TYPES', 'FONT_METRICS_ATTRIBUTES',
'GRAVITY_TYPES', 'LINE_CAP_TYPES', 'LINE_JOIN_TYPES',
'PAINT_METHOD_TYPES', 'STRETCH_TYPES', 'STYLE_TYPES',
'TEXT_ALIGN_TYPES', 'TEXT_DECORATION_TYPES',
'TEXT_DIRECTION_TYPES', 'Drawing', 'FontMetrics')
#: (:class:`collections.Sequence`) The list of clip path units
#:
#: - ``'undefined_path_units'``
#: - ``'user_space'``
#: - ``'user_space_on_use'``
#: - ``'object_bounding_box'``
CLIP_PATH_UNITS = ('undefined_path_units', 'user_space', 'user_space_on_use',
'object_bounding_box')
#: (:class:`collections.Sequence`) The list of text align types.
#:
#: - ``'undefined'``
#: - ``'left'``
#: - ``'center'``
#: - ``'right'``
TEXT_ALIGN_TYPES = 'undefined', 'left', 'center', 'right'
#: (:class:`collections.Sequence`) The list of text decoration types.
#:
#: - ``'undefined'``
#: - ``'no'``
#: - ``'underline'``
#: - ``'overline'``
#: - ``'line_through'``
TEXT_DECORATION_TYPES = ('undefined', 'no', 'underline', 'overline',
'line_through')
#: (:class:`collections.Sequence`) The list of text direction types.
#:
#: - ``'undefined'``
#: - ``'right_to_left'``
#: - ``'left_to_right'``
TEXT_DIRECTION_TYPES = ('undefined', 'right_to_left', 'left_to_right')
#: (:class:`collections.Sequence`) The list of text gravity types.
#:
#: - ``'forget'``
#: - ``'north_west'``
#: - ``'north'``
#: - ``'north_east'``
#: - ``'west'``
#: - ``'center'``
#: - ``'east'``
#: - ``'south_west'``
#: - ``'south'``
#: - ``'south_east'``
#: - ``'static'``
GRAVITY_TYPES = ('forget', 'north_west', 'north', 'north_east', 'west',
'center', 'east', 'south_west', 'south', 'south_east',
'static')
#: (:class:`collections.Sequence`) The list of fill-rule types.
#:
#: - ``'undefined'``
#: - ``'evenodd'``
#: - ``'nonzero'``
FILL_RULE_TYPES = ('undefined', 'evenodd', 'nonzero')
#: (:class:`collections.Sequence`) The attribute names of font metrics.
FONT_METRICS_ATTRIBUTES = ('character_width', 'character_height', 'ascender',
'descender', 'text_width', 'text_height',
'maximum_horizontal_advance', 'x1', 'y1', 'x2',
'y2', 'x', 'y')
#: The tuple subtype which consists of font metrics data.
FontMetrics = collections.namedtuple('FontMetrics', FONT_METRICS_ATTRIBUTES)
#: (:class:`collections.Sequence`) The list of stretch types for fonts
#:
#: - ``'undefined;``
#: - ``'normal'``
#: - ``'ultra_condensed'``
#: - ``'extra_condensed'``
#: - ``'condensed'``
#: - ``'semi_condensed'``
#: - ``'semi_expanded'``
#: - ``'expanded'``
#: - ``'extra_expanded'``
#: - ``'ultra_expanded'``
#: - ``'any'``
STRETCH_TYPES = ('undefined', 'normal', 'ultra_condensed', 'extra_condensed',
'condensed', 'semi_condensed', 'semi_expanded', 'expanded',
'extra_expanded', 'ultra_expanded', 'any')
#: (:class:`collections.Sequence`) The list of style types for fonts
#:
#: - ``'undefined;``
#: - ``'normal'``
#: - ``'italic'``
#: - ``'oblique'``
#: - ``'any'``
STYLE_TYPES = ('undefined', 'normal', 'italic', 'oblique', 'any')
#: (:class:`collections.Sequence`) The list of LineCap types
#:
#: - ``'undefined;``
#: - ``'butt'``
#: - ``'round'``
#: - ``'square'``
LINE_CAP_TYPES = ('undefined', 'butt', 'round', 'square')
#: (:class:`collections.Sequence`) The list of LineJoin types
#:
#: - ``'undefined'``
#: - ``'miter'``
#: - ``'round'``
#: - ``'bevel'``
LINE_JOIN_TYPES = ('undefined', 'miter', 'round', 'bevel')
#: (:class:`collections.Sequence`) The list of paint method types.
#:
#: - ``'undefined'``
#: - ``'point'``
#: - ``'replace'``
#: - ``'floodfill'``
#: - ``'filltoborder'``
#: - ``'reset'``
PAINT_METHOD_TYPES = ('undefined', 'point', 'replace',
'floodfill', 'filltoborder', 'reset')
class Drawing(Resource):
"""Drawing object. It maintains several vector drawing instructions
and can get drawn into zero or more :class:`~wand.image.Image` objects
by calling it.
For example, the following code draws a diagonal line to the ``image``::
with Drawing() as draw:
draw.line((0, 0), image.size)
draw(image)
:param drawing: an optional drawing object to clone.
use :meth:`clone()` method rather than this parameter
:type drawing: :class:`Drawing`
.. versionadded:: 0.3.0
"""
c_is_resource = library.IsDrawingWand
c_destroy_resource = library.DestroyDrawingWand
c_get_exception = library.DrawGetException
c_clear_exception = library.DrawClearException
def __init__(self, drawing=None):
with self.allocate():
if not drawing:
wand = library.NewDrawingWand()
elif not isinstance(drawing, type(self)):
raise TypeError('drawing must be a wand.drawing.Drawing '
'instance, not ' + repr(drawing))
else:
wand = library.CloneDrawingWand(drawing.resource)
self.resource = wand
@property
def border_color(self):
"""(:class:`~wand.color.Color`) the current border color. It also can
be set. This attribute controls the behavior of
:meth:`~wand.drawing.Drawing.color()` during ``'filltoborder'``
operation.
.. versionadded:: 0.4.0
"""
pixelwand = library.NewPixelWand()
library.DrawGetBorderColor(self.resource, pixelwand)
if MAGICK_VERSION_NUMBER < 0x700:
pixel_structure = MagickPixelPacket
else:
pixel_structure = PixelInfo
size = ctypes.sizeof(pixel_structure)
buffer = ctypes.create_string_buffer(size)
library.PixelGetMagickColor(pixelwand, buffer)
pixelwand = library.DestroyPixelWand(pixelwand)
return Color(raw=buffer)
@border_color.setter
def border_color(self, border_color):
if not isinstance(border_color, Color):
raise ValueError('expected wand.color.Color, not ' +
repr(border_color))
with border_color:
library.DrawSetBorderColor(self.resource, border_color.resource)
@property
def clip_path(self):
"""(:class:`basestring`) The current clip path. It also can be set.
.. versionadded:: 0.4.0
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
clip_path_p = library.DrawGetClipPath(self.resource)
return text(clip_path_p.value)
@clip_path.setter
def clip_path(self, path):
if not isinstance(path, string_type):
raise TypeError('expected a string, not ' + repr(path))
library.DrawSetClipPath(self.resource, binary(path))
@property
def clip_rule(self):
"""(:class:`basestring`) The current clip rule. It also can be set.
It's a string value from :const:`FILL_RULE_TYPES` list.
.. versionadded:: 0.4.0
"""
clip_rule = library.DrawGetClipRule(self.resource)
return FILL_RULE_TYPES[clip_rule]
@clip_rule.setter
def clip_rule(self, clip_rule):
if not isinstance(clip_rule, string_type):
raise TypeError('expected a string, not ' + repr(clip_rule))
elif clip_rule not in FILL_RULE_TYPES:
raise ValueError('expected a string from FILE_RULE_TYPES, not' +
repr(clip_rule))
library.DrawSetClipRule(self.resource,
FILL_RULE_TYPES.index(clip_rule))
@property
def clip_units(self):
"""(:class:`basestring`) The current clip units. It also can be set.
It's a string value from :const:`CLIP_PATH_UNITS` list.
.. versionadded:: 0.4.0
"""
clip_unit = library.DrawGetClipUnits(self.resource)
return CLIP_PATH_UNITS[clip_unit]
@clip_units.setter
def clip_units(self, clip_unit):
if not isinstance(clip_unit, string_type):
raise TypeError('expected a string, not ' + repr(clip_unit))
elif clip_unit not in CLIP_PATH_UNITS:
raise ValueError('expected a string from CLIP_PATH_UNITS, not' +
repr(clip_unit))
library.DrawSetClipUnits(self.resource,
CLIP_PATH_UNITS.index(clip_unit))
@property
def fill_color(self):
"""(:class:`~wand.color.Color`) The current color to fill.
It also can be set.
"""
pixel = library.NewPixelWand()
library.DrawGetFillColor(self.resource, pixel)
if MAGICK_VERSION_NUMBER < 0x700:
pixel_structure = MagickPixelPacket
else:
pixel_structure = PixelInfo
size = ctypes.sizeof(pixel_structure)
buffer = ctypes.create_string_buffer(size)
library.PixelGetMagickColor(pixel, buffer)
pixel = library.DestroyPixelWand(pixel)
return Color(raw=buffer)
@fill_color.setter
def fill_color(self, color):
if not isinstance(color, Color):
raise TypeError('color must be a wand.color.Color object, not ' +
repr(color))
with color:
library.DrawSetFillColor(self.resource, color.resource)
@property
def fill_opacity(self):
"""(:class:`~numbers.Real`) The current fill opacity.
It also can be set.
.. versionadded:: 0.4.0
"""
return library.DrawGetFillOpacity(self.resource)
@fill_opacity.setter
def fill_opacity(self, opacity):
if not isinstance(opacity, numbers.Real):
raise TypeError('opacity must be a double, not ' +
repr(opacity))
library.DrawSetFillOpacity(self.resource, opacity)
@property
def fill_rule(self):
"""(:class:`basestring`) The current fill rule. It can also be set.
It's a string value from :const:`FILL_RULE_TYPES` list.
.. versionadded:: 0.4.0
"""
fill_rule_index = library.DrawGetFillRule(self.resource)
if fill_rule_index not in FILL_RULE_TYPES:
self.raise_exception()
return text(FILL_RULE_TYPES[fill_rule_index])
@fill_rule.setter
def fill_rule(self, fill_rule):
if not isinstance(fill_rule, string_type):
raise TypeError('expected a string, not ' + repr(fill_rule))
elif fill_rule not in FILL_RULE_TYPES:
raise ValueError('expected a string from FILE_RULE_TYPES, not' +
repr(fill_rule))
library.DrawSetFillRule(self.resource,
FILL_RULE_TYPES.index(fill_rule))
@property
def font(self):
"""(:class:`basestring`) The current font name. It also can be set.
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
font_p = library.DrawGetFont(self.resource)
return text(font_p.value)
@font.setter
def font(self, font):
if not isinstance(font, string_type):
raise TypeError('expected a string, not ' + repr(font))
library.DrawSetFont(self.resource, binary(font))
@property
def font_family(self):
"""(:class:`basestring`) The current font family. It also can be set.
.. versionadded:: 0.4.0
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
font_family_p = library.DrawGetFontFamily(self.resource)
return text(font_family_p.value)
@font_family.setter
def font_family(self, family):
if not isinstance(family, string_type):
raise TypeError('expected a string, not ' + repr(family))
library.DrawSetFontFamily(self.resource, binary(family))
@property
def font_resolution(self):
"""(:class:`~collections.Sequence`) The current font resolution. It also
can be set.
.. versionadded:: 0.4.0
"""
x, y = ctypes.c_double(0.0), ctypes.c_double(0.0)
library.DrawGetFontResolution(self.resource,
ctypes.byref(x),
ctypes.byref(y))
return x.value, y.value
@font_resolution.setter
def font_resolution(self, resolution):
if not isinstance(resolution, collections.Sequence):
raise TypeError('expected sequence, not ' + repr(resolution))
if len(resolution) != 2:
raise ValueError('expected sequence of 2 floats')
library.DrawSetFontResolution(self.resource, *resolution)
@property
def font_size(self):
"""(:class:`numbers.Real`) The font size. It also can be set."""
return library.DrawGetFontSize(self.resource)
@font_size.setter
def font_size(self, size):
if not isinstance(size, numbers.Real):
raise TypeError('expected a numbers.Real, but got ' + repr(size))
elif size < 0.0:
raise ValueError('cannot be less then 0.0, but got ' + repr(size))
library.DrawSetFontSize(self.resource, size)
@property
def font_stretch(self):
"""(:class:`basestring`) The current font stretch variation.
It also can be set, but will only apply if the font-family or encoder
supports the stretch type.
.. versionadded:: 0.4.0
"""
stretch_index = library.DrawGetFontStretch(self.resource)
return text(STRETCH_TYPES[stretch_index])
@font_stretch.setter
def font_stretch(self, stretch):
if not isinstance(stretch, string_type):
raise TypeError('expected a string, not ' + repr(stretch))
elif stretch not in STRETCH_TYPES:
raise ValueError('expected a string from STRETCH_TYPES, not' +
repr(stretch))
library.DrawSetFontStretch(self.resource,
STRETCH_TYPES.index(stretch))
@property
def font_style(self):
"""(:class:`basestring`) The current font style.
It also can be set, but will only apply if the font-family
supports the style.
.. versionadded:: 0.4.0
"""
style_index = library.DrawGetFontStyle(self.resource)
return text(STYLE_TYPES[style_index])
@font_style.setter
def font_style(self, style):
if not isinstance(style, string_type):
raise TypeError('expected a string, not ' + repr(style))
elif style not in STYLE_TYPES:
raise ValueError('expected a string from STYLE_TYPES, not' +
repr(style))
library.DrawSetFontStyle(self.resource,
STYLE_TYPES.index(style))
@property
def font_weight(self):
"""(:class:`~numbers.Integral`) The current font weight.
It also can be set.
.. versionadded:: 0.4.0
"""
return library.DrawGetFontWeight(self.resource)
@font_weight.setter
def font_weight(self, weight):
if not isinstance(weight, numbers.Integral):
raise TypeError('expected a integral, not ' + repr(weight))
library.DrawSetFontWeight(self.resource, weight)
@property
def opacity(self):
"""(:class:`~numbers.Real`) returns the opacity used when drawing with
the fill or stroke color or texture. Fully opaque is 1.0. This method
only affects vector graphics, and is experimental. To set the opacity
of a drawing, use
:attr:`Drawing.fill_opacity` & :attr:`Drawing.stroke_opacity`
.. versionadded:: 0.4.0
"""
return library.DrawGetOpacity(self.resource)
@property
def gravity(self):
"""(:class:`basestring`) The text placement gravity used when
annotating with text. It's a string from :const:`GRAVITY_TYPES`
list. It also can be set.
"""
gravity_index = library.DrawGetGravity(self.resource)
if not gravity_index:
self.raise_exception()
return text(GRAVITY_TYPES[gravity_index])
@gravity.setter
def gravity(self, value):
if not isinstance(value, string_type):
raise TypeError('expected a string, not ' + repr(value))
elif value not in GRAVITY_TYPES:
raise ValueError('expected a string from GRAVITY_TYPES, not ' +
repr(value))
library.DrawSetGravity(self.resource, GRAVITY_TYPES.index(value))
@opacity.setter
def opacity(self, opaque):
library.DrawSetOpacity(self.resource, ctypes.c_double(opaque))
@property
def stroke_antialias(self):
"""(:class:`bool`) Controls whether stroked outlines are antialiased.
Stroked outlines are antialiased by default. When antialiasing is
disabled stroked pixels are thresholded to determine if the stroke
color or underlying canvas color should be used.
It also can be set.
.. versionadded:: 0.4.0
"""
stroke_antialias = library.DrawGetStrokeAntialias(self.resource)
return bool(stroke_antialias)
@stroke_antialias.setter
def stroke_antialias(self, stroke_antialias):
library.DrawSetStrokeAntialias(self.resource, bool(stroke_antialias))
@property
def stroke_color(self):
"""(:class:`~wand.color.Color`) The current color of stroke.
It also can be set.
.. versionadded:: 0.3.3
"""
pixel = library.NewPixelWand()
library.DrawGetStrokeColor(self.resource, pixel)
if MAGICK_VERSION_NUMBER < 0x700:
pixel_structure = MagickPixelPacket
else:
pixel_structure = PixelInfo
size = ctypes.sizeof(pixel_structure)
buffer = ctypes.create_string_buffer(size)
library.PixelGetMagickColor(pixel, buffer)
return Color(raw=buffer)
@stroke_color.setter
def stroke_color(self, color):
if not isinstance(color, Color):
raise TypeError('color must be a wand.color.Color object, not ' +
repr(color))
with color:
library.DrawSetStrokeColor(self.resource, color.resource)
@property
def stroke_dash_array(self):
"""(:class:`~collections.Sequence`) - (:class:`numbers.Real`) An array
representing the pattern of dashes & gaps used to stroke paths.
It also can be set.
.. versionadded:: 0.4.0
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
number_elements = ctypes.c_size_t(0)
dash_array_p = library.DrawGetStrokeDashArray(
self.resource, ctypes.byref(number_elements)
)
dash_array = []
if dash_array_p is not None:
dash_array = [float(dash_array_p[i])
for i in xrange(number_elements.value)]
library.MagickRelinquishMemory(dash_array_p)
return dash_array
@stroke_dash_array.setter
def stroke_dash_array(self, dash_array):
dash_array_l = len(dash_array)
dash_array_p = (ctypes.c_double * dash_array_l)(*dash_array)
library.DrawSetStrokeDashArray(self.resource,
dash_array_l,
dash_array_p)
@property
def stroke_dash_offset(self):
"""(:class:`numbers.Real`) The stroke dash offset. It also can be set.
.. versionadded:: 0.4.0
"""
return library.DrawGetStrokeDashOffset(self.resource)
@stroke_dash_offset.setter
def stroke_dash_offset(self, offset):
library.DrawSetStrokeDashOffset(self.resource, float(offset))
@property
def stroke_line_cap(self):
"""(:class:`basestring`) The stroke line cap. It also can be set.
.. versionadded:: 0.4.0
"""
line_cap_index = library.DrawGetStrokeLineCap(self.resource)
if line_cap_index not in LINE_CAP_TYPES:
self.raise_exception()
return text(LINE_CAP_TYPES[line_cap_index])
@stroke_line_cap.setter
def stroke_line_cap(self, line_cap):
if not isinstance(line_cap, string_type):
raise TypeError('expected a string, not ' + repr(line_cap))
elif line_cap not in LINE_CAP_TYPES:
raise ValueError('expected a string from LINE_CAP_TYPES, not' +
repr(line_cap))
library.DrawSetStrokeLineCap(self.resource,
LINE_CAP_TYPES.index(line_cap))
@property
def stroke_line_join(self):
"""(:class:`basestring`) The stroke line join. It also can be set.
.. versionadded:: 0.4.0
"""
line_join_index = library.DrawGetStrokeLineJoin(self.resource)
if line_join_index not in LINE_JOIN_TYPES:
self.raise_exception()
return text(LINE_JOIN_TYPES[line_join_index])
@stroke_line_join.setter
def stroke_line_join(self, line_join):
if not isinstance(line_join, string_type):
raise TypeError('expected a string, not ' + repr(line_join))
elif line_join not in LINE_JOIN_TYPES:
raise ValueError('expected a string from LINE_JOIN_TYPES, not' +
repr(line_join))
library.DrawSetStrokeLineJoin(self.resource,
LINE_JOIN_TYPES.index(line_join))
@property
def stroke_miter_limit(self):
"""(:class:`~numbers.Integral`) The current miter limit.
It also can be set.
.. versionadded:: 0.4.0
"""
return library.DrawGetStrokeMiterLimit(self.resource)
@stroke_miter_limit.setter
def stroke_miter_limit(self, miter_limit):
if not isinstance(miter_limit, numbers.Integral):
raise TypeError('opacity must be a integer, not ' +
repr(miter_limit))
library.DrawSetStrokeMiterLimit(self.resource, miter_limit)
@property
def stroke_opacity(self):
"""(:class:`~numbers.Real`) The current stroke opacity.
It also can be set.
.. versionadded:: 0.4.0
"""
return library.DrawGetStrokeOpacity(self.resource)
@stroke_opacity.setter
def stroke_opacity(self, opacity):
if not isinstance(opacity, numbers.Real):
raise TypeError('opacity must be a double, not ' +
repr(opacity))
library.DrawSetStrokeOpacity(self.resource, opacity)
@property
def stroke_width(self):
"""(:class:`numbers.Real`) The stroke width. It also can be set.
.. versionadded:: 0.3.3
"""
return library.DrawGetStrokeWidth(self.resource)
@stroke_width.setter
def stroke_width(self, width):
if not isinstance(width, numbers.Real):
raise TypeError('expected a numbers.Real, but got ' + repr(width))
elif width < 0.0:
raise ValueError('cannot be less then 0.0, but got ' + repr(width))
library.DrawSetStrokeWidth(self.resource, width)
@property
def text_alignment(self):
"""(:class:`basestring`) The current text alignment setting.
It's a string value from :const:`TEXT_ALIGN_TYPES` list.
It also can be set.
"""
text_alignment_index = library.DrawGetTextAlignment(self.resource)
if not text_alignment_index:
self.raise_exception()
return text(TEXT_ALIGN_TYPES[text_alignment_index])
@text_alignment.setter
def text_alignment(self, align):
if not isinstance(align, string_type):
raise TypeError('expected a string, not ' + repr(align))
elif align not in TEXT_ALIGN_TYPES:
raise ValueError('expected a string from TEXT_ALIGN_TYPES, not ' +
repr(align))
library.DrawSetTextAlignment(self.resource,
TEXT_ALIGN_TYPES.index(align))
@property
def text_antialias(self):
"""(:class:`bool`) The boolean value which represents whether
antialiasing is used for text rendering. It also can be set to
``True`` or ``False`` to switch the setting.
"""
result = library.DrawGetTextAntialias(self.resource)
return bool(result)
@text_antialias.setter
def text_antialias(self, value):
library.DrawSetTextAntialias(self.resource, bool(value))
@property
def text_decoration(self):
"""(:class:`basestring`) The text decoration setting, a string
from :const:`TEXT_DECORATION_TYPES` list. It also can be set.
"""
text_decoration_index = library.DrawGetTextDecoration(self.resource)
if not text_decoration_index:
self.raise_exception()
return text(TEXT_DECORATION_TYPES[text_decoration_index])
@text_decoration.setter
def text_decoration(self, decoration):
if not isinstance(decoration, string_type):
raise TypeError('expected a string, not ' + repr(decoration))
elif decoration not in TEXT_DECORATION_TYPES:
raise ValueError('expected a string from TEXT_DECORATION_TYPES, '
'not ' + repr(decoration))
library.DrawSetTextDecoration(self.resource,
TEXT_DECORATION_TYPES.index(decoration))
@property
def text_direction(self):
"""(:class:`basestring`) The text direction setting. a string
from :const:`TEXT_DIRECTION_TYPES` list. It also can be set."""
if library.DrawGetTextDirection is None:
raise WandLibraryVersionError(
'the installed version of ImageMagick does not support '
'this feature'
)
text_direction_index = library.DrawGetTextDirection(self.resource)
if not text_direction_index:
self.raise_exception()
return text(TEXT_DIRECTION_TYPES[text_direction_index])
@text_direction.setter
def text_direction(self, direction):
if library.DrawGetTextDirection is None:
raise WandLibraryVersionError(
'The installed version of ImageMagick does not support '
'this feature'
)
if not isinstance(direction, string_type):
raise TypeError('expected a string, not ' + repr(direction))
elif direction not in TEXT_DIRECTION_TYPES:
raise ValueError('expected a string from TEXT_DIRECTION_TYPES, '
'not ' + repr(direction))
library.DrawSetTextDirection(self.resource,
TEXT_DIRECTION_TYPES.index(direction))
@property
def text_encoding(self):
"""(:class:`basestring`) The internally used text encoding setting.
Although it also can be set, but it's not encouraged.
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
text_encoding_p = library.DrawGetTextEncoding(self.resource)
return text(text_encoding_p.value)
@text_encoding.setter
def text_encoding(self, encoding):
if encoding is not None and not isinstance(encoding, string_type):
raise TypeError('expected a string, not ' + repr(encoding))
elif encoding is None:
# encoding specify an empty string to set text encoding
# to system's default.
encoding = b''
else:
encoding = binary(encoding)
library.DrawSetTextEncoding(self.resource, encoding)
@property
def text_interline_spacing(self):
"""(:class:`numbers.Real`) The setting of the text line spacing.
It also can be set.
"""
if library.DrawGetTextInterlineSpacing is None:
raise WandLibraryVersionError('The installed version of '
'ImageMagick does not support '
'this feature')
return library.DrawGetTextInterlineSpacing(self.resource)
@text_interline_spacing.setter
def text_interline_spacing(self, spacing):
if library.DrawSetTextInterlineSpacing is None:
raise WandLibraryVersionError('The installed version of '
'ImageMagick does not support '
'this feature')
if not isinstance(spacing, numbers.Real):
raise TypeError('expected a numbers.Real, but got ' +
repr(spacing))
library.DrawSetTextInterlineSpacing(self.resource, spacing)
@property
def text_interword_spacing(self):
"""(:class:`numbers.Real`) The setting of the word spacing.
It also can be set.
"""
return library.DrawGetTextInterwordSpacing(self.resource)
@text_interword_spacing.setter
def text_interword_spacing(self, spacing):
if not isinstance(spacing, numbers.Real):
raise TypeError('expeted a numbers.Real, but got ' + repr(spacing))
library.DrawSetTextInterwordSpacing(self.resource, spacing)
@property
def text_kerning(self):
"""(:class:`numbers.Real`) The setting of the text kerning.
It also can be set.
"""
return library.DrawGetTextKerning(self.resource)
@text_kerning.setter
def text_kerning(self, kerning):
if not isinstance(kerning, numbers.Real):
raise TypeError('expected a numbers.Real, but got ' +
repr(kerning))
library.DrawSetTextKerning(self.resource, kerning)
@property
def text_under_color(self):
"""(:class:`~wand.color.Color`) The color of a background rectangle
to place under text annotations. It also can be set.
"""
pixel = library.NewPixelWand()
library.DrawGetTextUnderColor(self.resource, pixel)
if MAGICK_VERSION_NUMBER < 0x700:
pixel_structure = MagickPixelPacket
else:
pixel_structure = PixelInfo
size = ctypes.sizeof(pixel_structure)
buffer = ctypes.create_string_buffer(size)
library.PixelGetMagickColor(pixel, buffer)
pixel = library.DestroyPixelWand(pixel)
return Color(raw=buffer)
@text_under_color.setter
def text_under_color(self, color):
if not isinstance(color, Color):
raise TypeError('expected a wand.color.Color object, not ' +
repr(color))
with color:
library.DrawSetTextUnderColor(self.resource, color.resource)
@property
def vector_graphics(self):
"""(:class:`basestring`) The XML text of the Vector Graphics.
It also can be set. The drawing-wand XML is experimental,
and subject to change.
Setting this property to None will reset all vector graphic properties
to the default state.
.. versionadded:: 0.4.0
.. versionchanged: 0.4.1
Safely release allocated memory with
:c:func:`MagickRelinquishMemory` instead of :c:func:`libc.free`.
"""
vector_graphics_p = library.DrawGetVectorGraphics(self.resource)
return '<wand>' + text(vector_graphics_p.value) + '</wand>'
@vector_graphics.setter
def vector_graphics(self, vector_graphics):
if vector_graphics is not None and not isinstance(vector_graphics,
string_type):
raise TypeError('expected a string, not ' + repr(vector_graphics))
elif vector_graphics is None:
# Reset all vector graphic properties on drawing wand.
library.DrawResetVectorGraphics(self.resource)
else:
vector_graphics = binary(vector_graphics)
okay = library.DrawSetVectorGraphics(self.resource,
vector_graphics)
if okay == 0:
raise ValueError("Vector graphic not understood.")
def affine(self, matrix):
"""Adjusts the current affine transformation matrix with the specified
affine transformation matrix. Note that the current affine transform is
adjusted rather than replaced.
.. sourcecode:: text
| sx rx 0 |
| x', y', 1 | = | x, y, 1 | * | ry sy 0 |
| tx ty 1 |
:param matrix: a list of :class:`~numbers.Real` to define affine
matrix ``[sx, rx, ry, sy, tx, ty]``
:type matrix: :class:`collections.Sequence`
.. versionadded:: 0.4.0
"""
if not isinstance(matrix, collections.Sequence) or len(matrix) != 6:
raise ValueError('matrix must be a list of size Real numbers')
for idx, val in enumerate(matrix):
if not isinstance(val, numbers.Real):
raise TypeError('expecting numbers.Real in position #' +
repr(idx))
amx = AffineMatrix(sx=matrix[0], rx=matrix[1],
ry=matrix[2], sy=matrix[3],
tx=matrix[4], ty=matrix[5])
library.DrawAffine(self.resource, ctypes.byref(amx))
def alpha(self, x=None, y=None, paint_method='undefined'):
"""Paints on the image's opacity channel in order to set effected pixels
to transparent.
To influence the opacity of pixels. The available methods are:
- ``'undefined'``
- ``'point'``
- ``'replace'``
- ``'floodfill'``
- ``'filltoborder'``
- ``'reset'``
.. note::
This method replaces :meth:`matte()` in ImageMagick version 7.
An :class:`AttributeError` will be raised if attempting
to call on a library without ``DrawAlpha`` support.
.. versionadded:: 0.5.0
"""
if library.DrawAlpha is None:
raise AttributeError(
'Method added with ImageMagick version 7. ' +
'Please use `wand.drawing.Drawing.matte()\' instead.'
)
if x is None or y is None:
raise TypeError('Both x & y coordinates need to be defined')
if not isinstance(paint_method, string_type):
raise TypeError('expected a string, not ' + repr(paint_method))
elif paint_method not in PAINT_METHOD_TYPES:
raise ValueError(
'expected a string from PAINT_METHOD_TYPES, not ' +
repr(paint_method)
)
library.DrawAlpha(self.resource, float(x), float(y),
PAINT_METHOD_TYPES.index(paint_method))
def arc(self, start, end, degree):
"""Draws a arc using the current :attr:`stroke_color`,
:attr:`stroke_width`, and :attr:`fill_color`.
:param start: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents starting x and y of the arc
:type start: :class:`~collections.Sequence`
:param end: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents ending x and y of the arc
:type end: :class:`~collections.Sequence`
:param degree: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents starting degree, and ending degree
:type degree: :class:`~collections.Sequence`
.. versionadded:: 0.4.0
"""
start_x, start_y = start
end_x, end_y = end
degree_start, degree_end = degree
library.DrawArc(self.resource,
float(start_x), float(start_y),
float(end_x), float(end_y),
float(degree_start), float(degree_end))
def bezier(self, points=None):
"""Draws a bezier curve through a set of points on the image, using
the specified array of coordinates.
At least four points should be given to complete a bezier path.
The first & forth point being the start & end point, and the second
& third point controlling the direction & curve.
Example bezier on ``image`` ::
with Drawing() as draw:
points = [(40,10), # Start point
(20,50), # First control
(90,10), # Second control
(70,40)] # End point
draw.stroke_color = Color('#000')
draw.fill_color = Color('#fff')
draw.bezier(points)
draw.draw(image)
:param points: list of x,y tuples
:type points: :class:`list`
.. versionadded:: 0.4.0
"""
(points_l, points_p) = _list_to_point_info(points)
library.DrawBezier(self.resource, points_l,
ctypes.cast(points_p, ctypes.POINTER(PointInfo)))
def circle(self, origin, perimeter):
"""Draws a circle from ``origin`` to ``perimeter``
:param origin: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents origin x and y of circle
:type origin: :class:`collections.Sequence`
:param perimeter: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents perimeter x and y of circle
:type perimeter: :class:`collections.Sequence`
.. versionadded:: 0.4.0
"""
origin_x, origin_y = origin
perimeter_x, perimeter_y = perimeter
library.DrawCircle(self.resource,
float(origin_x), float(origin_y), # origin
float(perimeter_x), float(perimeter_y)) # perimeter
def clear(self):
library.ClearDrawingWand(self.resource)
def clone(self):
"""Copies a drawing object.
:returns: a duplication
:rtype: :class:`Drawing`
"""
return type(self)(drawing=self)
def color(self, x=None, y=None, paint_method='undefined'):
"""Draws a color on the image using current fill color, starting
at specified position & method.
Available methods in :class:`wand.drawing.PAINT_METHOD_TYPES`:
- ``'undefined'``
- ``'point'``
- ``'replace'``
- ``'floodfill'``
- ``'filltoborder'``
- ``'reset'``
.. versionadded:: 0.4.0
"""
if x is None or y is None:
raise TypeError('Both x & y coordinates need to be defined')
if not isinstance(paint_method, string_type):
raise TypeError('expected a string, not ' + repr(paint_method))
elif paint_method not in PAINT_METHOD_TYPES:
raise ValueError(
'expected a string from PAINT_METHOD_TYPES, not ' +
repr(paint_method)
)
library.DrawColor(self.resource, float(x), float(y),
PAINT_METHOD_TYPES.index(paint_method))
def comment(self, message=None):
"""Adds a comment to the vector stream.
:param message: the comment to set.
:type message: :class:`basestring`
.. versionadded:: 0.4.0
"""
if message is not None and not isinstance(message, string_type):
raise TypeError('expected a string, not ' + repr(message))
elif message is None:
message = b''
else:
message = binary(message)
library.DrawComment(self.resource, message)
def composite(self, operator, left, top, width, height, image):
"""Composites an image onto the current image, using the specified
composition operator, specified position, and at the specified size.
:param operator: the operator that affects how the composite
is applied to the image. available values
can be found in the :const:`COMPOSITE_OPERATORS`
list
:param type: :const:`COMPOSITE_OPERATORS`
:param left: the column offset of the composited drawing source
:type left: :class:`numbers.Real`
:param top: the row offset of the composited drawing source
:type top: :class:`numbers.Real`
:param width: the total columns to include in the composited source
:type width: :class:`numbers.Real`
:param height: the total rows to include in the composited source
:type height: :class:`numbers.Real`
.. versionadded:: 0.4.0
"""
if not isinstance(operator, string_type):
raise TypeError('operator must be a string, not ' +
repr(operator))
elif not isinstance(left, numbers.Real):
raise TypeError('left must be an integer, not ' + repr(left))
elif not isinstance(top, numbers.Real):
raise TypeError('top must be an integer, not ' + repr(left))
elif not isinstance(width, numbers.Real):
raise TypeError('width must be an integer, not ' + repr(left))
elif not isinstance(height, numbers.Real):
raise TypeError('height must be an integer, not ' + repr(left))
try:
op = COMPOSITE_OPERATORS.index(operator)
except IndexError:
raise IndexError(repr(operator) + ' is an invalid composite '
'operator type; see wand.image.COMPOSITE_'
'OPERATORS dictionary')
okay = library.DrawComposite(self.resource, op, left, top, width,
height, image.wand)
if okay == 0:
self.raise_exception()
def draw(self, image):
"""Renders the current drawing into the ``image``. You can simply
call :class:`Drawing` instance rather than calling this method.
That means the following code which calls :class:`Drawing` object
itself::
drawing(image)
is equivalent to the following code which calls :meth:`draw()` method::
drawing.draw(image)
:param image: the image to be drawn
:type image: :class:`~wand.image.BaseImage`
"""
if not isinstance(image, BaseImage):
raise TypeError('image must be a wand.image.BaseImage instance,'
' not ' + repr(image))
if isinstance(image, SingleImage):
previous = library.MagickGetIteratorIndex(image.container.wand)
library.MagickSetIteratorIndex(image.container.wand, image.index)
res = library.MagickDrawImage(image.container.wand, self.resource)
library.MagickSetIteratorIndex(image.container.wand, previous)
else:
res = library.MagickDrawImage(image.wand, self.resource)
if not res:
self.raise_exception()
def ellipse(self, origin, radius, rotation=(0, 360)):
"""Draws a ellipse at ``origin`` with independent x & y ``radius``.
Ellipse can be partial by setting start & end ``rotation``.
:param origin: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents origin x and y of circle
:type origin: :class:`collections.Sequence`
:param radius: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents radius x and radius y of circle
:type radius: :class:`collections.Sequence`
:param rotation: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents start and end of ellipse.
Default (0,360)
:type rotation: :class:`collections.Sequence`
.. versionadded:: 0.4.0
"""
origin_x, origin_y = origin
radius_x, radius_y = radius
rotation_start, rotation_end = rotation
library.DrawEllipse(self.resource,
float(origin_x), float(origin_y), # origin
float(radius_x), float(radius_y), # radius
float(rotation_start), float(rotation_end))
def get_font_metrics(self, image, text, multiline=False):
"""Queries font metrics from the given ``text``.
:param image: the image to be drawn
:type image: :class:`~wand.image.BaseImage`
:param text: the text string for get font metrics.
:type text: :class:`basestring`
:param multiline: text is multiline or not
:type multiline: `boolean`
"""
if not isinstance(image, BaseImage):
raise TypeError('image must be a wand.image.BaseImage instance,'
' not ' + repr(image))
if not isinstance(text, string_type):
raise TypeError('text must be a string, not ' + repr(text))
if multiline:
font_metrics_f = library.MagickQueryMultilineFontMetrics
else:
font_metrics_f = library.MagickQueryFontMetrics
if isinstance(text, text_type):
if self.text_encoding:
text = text.encode(self.text_encoding)
else:
text = binary(text)
result = font_metrics_f(image.wand, self.resource, text)
args = (result[i] for i in xrange(13))
return FontMetrics(*args)
def line(self, start, end):
"""Draws a line ``start`` to ``end``.
:param start: (:class:`~numbers.Integral`, :class:`numbers.Integral`)
pair which represents starting x and y of the line
:type start: :class:`collections.Sequence`
:param end: (:class:`~numbers.Integral`, :class:`numbers.Integral`)
pair which represents ending x and y of the line
:type end: :class:`collections.Sequence`
"""
start_x, start_y = start
end_x, end_y = end
library.DrawLine(self.resource,
int(start_x), int(start_y),
int(end_x), int(end_y))
def matte(self, x=None, y=None, paint_method='undefined'):
"""Paints on the image's opacity channel in order to set effected pixels
to transparent.
To influence the opacity of pixels. The available methods are:
- ``'undefined'``
- ``'point'``
- ``'replace'``
- ``'floodfill'``
- ``'filltoborder'``
- ``'reset'``
.. note::
This method has been replace by :meth:`alpha()` in ImageMagick
version 7. An :class:`AttributeError` will be raised if attempting
to call on a library without ``DrawMatte`` support.
.. versionadded:: 0.4.0
"""
if library.DrawMatte is None:
raise AttributeError(
'Method removed from ImageMagick version. ' +
'Please use `wand.drawing.Drawing.alpha()\' instead.'
)
if x is None or y is None:
raise TypeError('Both x & y coordinates need to be defined')
if not isinstance(paint_method, string_type):
raise TypeError('expected a string, not ' + repr(paint_method))
elif paint_method not in PAINT_METHOD_TYPES:
raise ValueError(
'expected a string from PAINT_METHOD_TYPES, not ' +
repr(paint_method)
)
library.DrawMatte(self.resource, float(x), float(y),
PAINT_METHOD_TYPES.index(paint_method))
def path_close(self):
"""Adds a path element to the current path which closes
the current subpath by drawing a straight line from the current point
to the current subpath's most recent starting point.
.. versionadded:: 0.4.0
"""
library.DrawPathClose(self.resource)
return self
def path_curve(self, to=None, controls=None, smooth=False, relative=False):
"""Draws a cubic Bezier curve from the current point to given ``to``
(x,y) coordinate using ``controls`` points at the beginning and
the end of the curve.
If ``smooth`` is set to True, only one ``controls`` is expected
and the previous control is used, else two pair of coordinates are
expected to define the control points. The ``to`` coordinate then
becomes the new current point.
:param to: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents coordinates to draw to
:type to: :class:`collections.Sequence`
:param controls: (:class:`~numbers.Real`, :class:`numbers.Real`)
coordinate to used to influence curve
:type controls: :class:`collections.Sequence`
:param smooth: :class:`bool` assume last defined control coordinate
:type smooth: :class:`bool`
:param relative: treat given coordinates as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if to is None:
raise TypeError('to is missing')
if controls is None:
raise TypeError('controls is missing')
x, y = to
if smooth:
x2, y2 = controls
else:
(x1, y1), (x2, y2) = controls
if smooth:
if relative:
library.DrawPathCurveToSmoothRelative(self.resource,
x2, y2, x, y)
else:
library.DrawPathCurveToSmoothAbsolute(self.resource,
x2, y2, x, y)
else:
if relative:
library.DrawPathCurveToRelative(self.resource,
x1, y1, x2, y2, x, y)
else:
library.DrawPathCurveToAbsolute(self.resource,
x1, y1, x2, y2, x, y)
return self
def path_curve_to_quadratic_bezier(self, to=None, control=None,
smooth=False, relative=False):
"""Draws a quadratic Bezier curve from the current point to given
``to`` coordinate. The control point is assumed to be the reflection of
the control point on the previous command if ``smooth`` is True, else a
pair of ``control`` coordinates must be given. Each coordinates can be
relative, or absolute, to the current point by setting the ``relative``
flag. The ``to`` coordinate then becomes the new current point, and the
``control`` coordinate will be assumed when called again
when ``smooth`` is set to true.
:param to: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents coordinates to draw to
:type to: :class:`collections.Sequence`
:param control: (:class:`~numbers.Real`, :class:`numbers.Real`)
coordinate to used to influence curve
:type control: :class:`collections.Sequence`
:param smooth: assume last defined control coordinate
:type smooth: :class:`bool`
:param relative: treat given coordinates as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if to is None:
raise TypeError('to is missing')
x, y = to
if smooth:
if relative:
library.DrawPathCurveToQuadraticBezierSmoothRelative(
self.resource, float(x), float(y)
)
else:
library.DrawPathCurveToQuadraticBezierSmoothAbsolute(
self.resource, float(x), float(y)
)
else:
if control is None:
raise TypeError('control is missing')
x1, y1 = control
if relative:
library.DrawPathCurveToQuadraticBezierRelative(self.resource,
float(x1),
float(y1),
float(x),
float(y))
else:
library.DrawPathCurveToQuadraticBezierAbsolute(self.resource,
float(x1),
float(y1),
float(x),
float(y))
return self
def path_elliptic_arc(self, to=None, radius=None, rotation=0.0,
large_arc=False, clockwise=False, relative=False):
"""Draws an elliptical arc from the current point to given ``to``
coordinates. The ``to`` coordinates can be relative, or absolute,
to the current point by setting the ``relative`` flag.
The size and orientation of the ellipse are defined by
two radii (rx, ry) in ``radius`` and an ``rotation`` parameters,
which indicates how the ellipse as a whole is
rotated relative to the current coordinate system. The center of the
ellipse is calculated automagically to satisfy the constraints imposed
by the other parameters. ``large_arc`` and ``clockwise`` contribute to
the automatic calculations and help determine how the arc is drawn.
If ``large_arc`` is True then draw the larger of the available arcs.
If ``clockwise`` is true, then draw the arc matching a clock-wise
rotation.
:param to: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents coordinates to draw to
:type to: :class:`collections.Sequence`
:param radius: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents the radii of the ellipse to draw
:type radius: :class:`collections.Sequence`
:param rotate: degree to rotate ellipse on x-axis
:type rotate: :class:`~numbers.Real`
:param large_arc: draw largest available arc
:type large_arc: :class:`bool`
:param clockwise: draw arc path clockwise from start to target
:type clockwise: :class:`bool`
:param relative: treat given coordinates as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if to is None:
raise TypeError('to is missing')
if radius is None:
raise TypeError('radius is missing')
x, y = to
rx, ry = radius
if relative:
library.DrawPathEllipticArcRelative(self.resource,
float(rx), float(ry),
float(rotation),
bool(large_arc),
bool(clockwise),
float(x), float(y))
else:
library.DrawPathEllipticArcAbsolute(self.resource,
float(rx), float(ry),
float(rotation),
bool(large_arc),
bool(clockwise),
float(x), float(y))
return self
def path_finish(self):
"""Terminates the current path.
.. versionadded:: 0.4.0
"""
library.DrawPathFinish(self.resource)
return self
def path_horizontal_line(self, x=None, relative=False):
"""Draws a horizontal line path from the current point to the target
point. Given ``x`` parameter can be relative, or absolute, to the
current point by setting the ``relative`` flag. The target point then
becomes the new current point.
:param x: :class:`~numbers.Real`
x-axis point to draw to.
:type x: :class:`~numbers.Real`
:param relative: :class:`bool`
treat given point as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if x is None:
raise TypeError('x is missing')
if relative:
library.DrawPathLineToHorizontalRelative(self.resource, float(x))
else:
library.DrawPathLineToHorizontalAbsolute(self.resource, float(x))
return self
def path_line(self, to=None, relative=False):
"""Draws a line path from the current point to the given ``to``
coordinate. The ``to`` coordinates can be relative, or absolute, to the
current point by setting the ``relative`` flag. The coordinate then
becomes the new current point.
:param to: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents coordinates to draw to.
:type to: :class:`collections.Sequence`
:param relative: :class:`bool`
treat given coordinates as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if to is None:
raise TypeError('to is missing')
x, y = to
if relative:
library.DrawPathLineToRelative(self.resource, float(x), float(y))
else:
library.DrawPathLineToAbsolute(self.resource, float(x), float(y))
return self
def path_move(self, to=None, relative=False):
"""Starts a new sub-path at the given coordinates. Given ``to``
parameter can be relative, or absolute, by setting the ``relative``
flag.
:param to: (:class:`~numbers.Real`, :class:`numbers.Real`)
pair which represents coordinates to draw to.
:type to: :class:`collections.Sequence`
:param relative: :class:`bool`
treat given coordinates as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if to is None:
raise TypeError('to is missing')
x, y = to
if relative:
library.DrawPathMoveToRelative(self.resource, float(x), float(y))
else:
library.DrawPathMoveToAbsolute(self.resource, float(x), float(y))
return self
def path_start(self):
"""Declares the start of a path drawing list which is terminated by a
matching :meth:`path_finish()` command. All other `path_*` commands
must be enclosed between a :meth:`path_start()` and a
:meth:`path_finish()` command. This is because path drawing commands
are subordinate commands and they do not function by themselves.
.. versionadded:: 0.4.0
"""
library.DrawPathStart(self.resource)
return self
def path_vertical_line(self, y=None, relative=False):
"""Draws a vertical line path from the current point to the target
point. Given ``y`` parameter can be relative, or absolute, to the
current point by setting the ``relative`` flag. The target point then
becomes the new current point.
:param y: :class:`~numbers.Real`
y-axis point to draw to.
:type y: :class:`~numbers.Real`
:param relative: :class:`bool`
treat given point as relative to current point
:type relative: :class:`bool`
.. versionadded:: 0.4.0
"""
if y is None:
raise TypeError('y is missing')
if relative:
library.DrawPathLineToVerticalRelative(self.resource, float(y))
else:
library.DrawPathLineToVerticalAbsolute(self.resource, float(y))
return self
def polygon(self, points=None):
"""Draws a polygon using the current :attr:`stoke_color`,
:attr:`stroke_width`, and :attr:`fill_color`, using the specified
array of coordinates.
Example polygon on ``image`` ::
with Drawing() as draw:
points = [(40,10), (20,50), (90,10), (70,40)]
draw.polygon(points)
draw.draw(image)
:param points: list of x,y tuples
:type points: :class:`list`
.. versionadded:: 0.4.0
"""
(points_l, points_p) = _list_to_point_info(points)
library.DrawPolygon(self.resource, points_l,
ctypes.cast(points_p, ctypes.POINTER(PointInfo)))
def polyline(self, points=None):
"""Draws a polyline using the current :attr:`stoke_color`,
:attr:`stroke_width`, and :attr:`fill_color`, using the specified
array of coordinates.
Identical to :class:`~wand.drawing.Drawing.polygon`, but without closed
stroke line.
:param points: list of x,y tuples
:type points: :class:`list`
.. versionadded:: 0.4.0
"""
(points_l, points_p) = _list_to_point_info(points)
library.DrawPolyline(self.resource, points_l,
ctypes.cast(points_p, ctypes.POINTER(PointInfo)))
def point(self, x, y):
"""Draws a point at given ``x`` and ``y``
:param x: :class:`~numbers.Real` x of point
:type x: :class:`~numbers.Real`
:param y: :class:`~numbers.Real` y of point
:type y: :class:`~numbers.Real`
.. versionadded:: 0.4.0
"""
library.DrawPoint(self.resource,
float(x),
float(y))
def pop(self):
"""Pop destroys the current tip of the drawing context stack,
and restores the parent style context.
See :meth:`push()` method for an example.
.. note::
Popping the graphical context stack will not erase,
or alter, any previously executed drawing commands.
:returns: success of pop operation.
:rtype: `bool`
.. versionadded:: 0.4.0
"""
return bool(library.PopDrawingWand(self.resource))
def pop_clip_path(self):
"""Terminates a clip path definition.
.. versionadded:: 0.4.0
"""
library.DrawPopClipPath(self.resource)
def pop_defs(self):
"""Terminates a definition list.
.. versionadded:: 0.4.0
"""
library.DrawPopDefs(self.resource)
def pop_pattern(self):
"""Terminates a pattern definition.
.. versionadded:: 0.4.0
"""
library.DrawPopPattern(self.resource)
def push(self):
"""Grows the current drawing context stack by one, and inherits
the previous style attributes. Use :class:`Drawing.pop` to return
to restore previous style attributes.
This is useful for drawing shapes with diffrent styles
without repeatedly setting the similar
:meth:`fill_color <wand.drawing.Drawing.fill_color>` &
:meth:`stroke_color <wand.drawing.Drawing.stroke_color>` properties.
For example::
with Drawing() as ctx:
ctx.fill_color = Color('GREEN')
ctx.stroke_color = Color('ORANGE')
ctx.push()
ctx.fill_color = Color('RED')
ctx.text(x1, y1, 'this is RED with ORANGE outline')
ctx.push()
ctx.stroke_color = Color('BLACK')
ctx.text(x2, y2, 'this is RED with BLACK outline')
ctx.pop()
ctx.pop()
ctx.text(x3, y3, 'this is GREEN with ORANGE outline')
Which translate to the following MVG::
push graphic-context
fill "GREEN"
stroke "ORANGE"
push graphic-context
fill "RED"
text x1,y1 "this is RED with ORANGE outline"
push graphic-context
stroke "BLACK"
text x2,y2 "this is RED with BLACK outline"
pop graphic-context
pop graphic-context
text x3,y3 "this is GREEN with ORANGE outline"
pop graphic-context
.. note::
Pushing graphical context does not reset any previously
drawn artifacts.
:returns: success of push operation.
:rtype: `bool`
.. versionadded:: 0.4.0
"""
return bool(library.PushDrawingWand(self.resource))
def push_clip_path(self, clip_mask_id):
"""Starts a clip path definition which is comprised of any number of
drawing commands and terminated by a :class:`Drawing.pop_clip_path`
command.
:param clip_mask_id: string identifier to associate with the clip path.
:type clip_mask_id: :class:`basestring`
.. versionadded:: 0.4.0
"""
library.DrawPushClipPath(self.resource, binary(clip_mask_id))
def push_defs(self):
"""Indicates that commands up to a terminating :class:`Drawing.pop_defs`
command create named elements (e.g. clip-paths, textures, etc.) which
may safely be processed earlier for the sake of efficiency.
.. versionadded:: 0.4.0
"""
library.DrawPushDefs(self.resource)
def push_pattern(self, pattern_id, left, top, width, height):
"""Indicates that subsequent commands up to a
:class:`Drawing.pop_pattern` command comprise the definition of a named
pattern. The pattern space is assigned top left corner coordinates, a
width and height, and becomes its own drawing space. Anything which can
be drawn may be used in a pattern definition.
Named patterns may be used as stroke or brush definitions.
:param pattern_id: a unique identifier for the pattern.
:type pattern_id: :class:`basestring`
:param left: x ordinate of top left corner.
:type left: :class:`numbers.Real`
:param top: y ordinate of top left corner.
:type top: :class:`numbers.Real`
:param width: width of pattern space.
:type width: :class:`numbers.Real`
:param height: height of pattern space.
:type height: :class:`numbers.Real`
:returns: success of push operation
:rtype: `bool`
.. versionadded:: 0.4.0
"""
if not isinstance(pattern_id, string_type):
raise TypeError('pattern_id must be a string, not ' +
repr(pattern_id))
elif not isinstance(left, numbers.Real):
raise TypeError('left must be numbers.Real, not ' + repr(left))
elif not isinstance(top, numbers.Real):
raise TypeError('top must be numbers.Real, not ' + repr(top))
elif not isinstance(width, numbers.Real):
raise TypeError('width must be numbers.Real, not ' + repr(width))
elif not isinstance(height, numbers.Real):
raise TypeError('height must be numbers.Real, not ' + repr(height))
okay = library.DrawPushPattern(self.resource, binary(pattern_id),
left, top,
width, height)
return bool(okay)
def rectangle(self, left=None, top=None, right=None, bottom=None,
width=None, height=None, radius=None, xradius=None,
yradius=None):
"""Draws a rectangle using the current :attr:`stroke_color`,
:attr:`stroke_width`, and :attr:`fill_color`.
.. sourcecode:: text
+--------------------------------------------------+
| ^ ^ |
| | | |
| top | |
| | | |
| v | |
| <-- left --> +-------------------+ bottom |
| | ^ | | |
| | <-- width --|---> | | |
| | height | | |
| | | | | |
| | v | | |
| +-------------------+ v |
| <--------------- right ----------> |
+--------------------------------------------------+
:param left: x-offset of the rectangle to draw
:type left: :class:`numbers.Real`
:param top: y-offset of the rectangle to draw
:type top: :class:`numbers.Real`
:param right: second x-offset of the rectangle to draw.
this parameter and ``width`` parameter are exclusive
each other
:type right: :class:`numbers.Real`
:param bottom: second y-offset of the rectangle to draw.
this parameter and ``height`` parameter are exclusive
each other
:type bottom: :class:`numbers.Real`
:param width: the :attr:`width` of the rectangle to draw.
this parameter and ``right`` parameter are exclusive
each other
:type width: :class:`numbers.Real`
:param height: the :attr:`height` of the rectangle to draw.
this parameter and ``bottom`` parameter are exclusive
each other
:type height: :class:`numbers.Real`
:param radius: the corner rounding. this is a short-cut for setting
both :attr:`xradius`, and :attr:`yradius`
:type radius: :class:`numbers.Real`
:param xradius: the :attr:`xradius` corner in horizontal direction.
:type xradius: :class:`numbers.Real`
:param yradius: the :attr:`yradius` corner in vertical direction.
:type yradius: :class:`numbers.Real`
.. versionadded:: 0.3.6
.. versionchanged:: 0.4.0
Radius keywords added to create rounded rectangle.
"""
if left is None:
raise TypeError('left is missing')
elif top is None:
raise TypeError('top is missing')
elif right is None and width is None:
raise TypeError('right/width is missing')
elif bottom is None and height is None:
raise TypeError('bottom/height is missing')
elif not (right is None or width is None):
raise TypeError('parameters right and width are exclusive each '
'other; use one at a time')
elif not (bottom is None or height is None):
raise TypeError('parameters bottom and height are exclusive each '
'other; use one at a time')
elif not isinstance(left, numbers.Real):
raise TypeError('left must be numbers.Real, not ' + repr(left))
elif not isinstance(top, numbers.Real):
raise TypeError('top must be numbers.Real, not ' + repr(top))
elif not (right is None or isinstance(right, numbers.Real)):
raise TypeError('right must be numbers.Real, not ' + repr(right))
elif not (bottom is None or isinstance(bottom, numbers.Real)):
raise TypeError('bottom must be numbers.Real, not ' + repr(bottom))
elif not (width is None or isinstance(width, numbers.Real)):
raise TypeError('width must be numbers.Real, not ' + repr(width))
elif not (height is None or isinstance(height, numbers.Real)):
raise TypeError('height must be numbers.Real, not ' + repr(height))
if right is None:
if width < 0:
raise ValueError('width must be positive, not ' + repr(width))
right = left + width
elif right < left:
raise ValueError('right must be more than left ({0!r}), '
'not {1!r})'.format(left, right))
if bottom is None:
if height < 0:
raise ValueError('height must be positive, not ' +
repr(height))
bottom = top + height
elif bottom < top:
raise ValueError('bottom must be more than top ({0!r}), '
'not {1!r})'.format(top, bottom))
if radius is not None:
xradius = yradius = radius
if xradius is not None or yradius is not None:
if xradius is None:
xradius = 0.0
if yradius is None:
yradius = 0.0
if not isinstance(xradius, numbers.Real):
raise TypeError('xradius must be numbers.Real, not ' +
repr(xradius))
if not isinstance(yradius, numbers.Real):
raise TypeError('yradius must be numbers.Real, not ' +
repr(xradius))
library.DrawRoundRectangle(self.resource, left, top, right, bottom,
xradius, yradius)
else:
library.DrawRectangle(self.resource, left, top, right, bottom)
self.raise_exception()
def rotate(self, degree):
"""Applies the specified rotation to the current coordinate space.
:param degree: degree to rotate
:type degree: :class:`~numbers.Real`
.. versionadded:: 0.4.0
"""
library.DrawRotate(self.resource, float(degree))
def scale(self, x=None, y=None):
"""
Adjusts the scaling factor to apply in the horizontal and vertical
directions to the current coordinate space.
:param x: Horizontal scale factor
:type x: :class:`~numbers.Real`
:param y: Vertical scale factor
:type y: :class:`~numbers.Real`
.. versionadded:: 0.4.0
"""
if not isinstance(x, numbers.Real):
raise TypeError('expecting numbers.Real, not ' + repr(x))
if not isinstance(y, numbers.Real):
raise TypeError('expecting numbers.Real, not ' + repr(y))
library.DrawScale(self.resource, x, y)
def set_fill_pattern_url(self, url):
"""Sets the URL to use as a fill pattern for filling objects. Only local
URLs ("#identifier") are supported at this time. These local URLs are
normally created by defining a named fill pattern with
Drawing.push_pattern & Drawing.pop_pattern.
:param url: URL to use to obtain fill pattern.
:type url: :class:`basestring`
.. versionadded:: 0.4.0
"""
if not isinstance(url, string_type):
raise TypeError('expecting basestring, not ' + repr(url))
if url[0] != '#':
raise ValueError('value not a relative URL, '
'expecting "#identifier"')
okay = library.DrawSetFillPatternURL(self.resource, binary(url))
if okay == 0:
# ThrowDrawException(DrawError,"URLNotFound",fill_url)
self.raise_exception()
def set_stroke_pattern_url(self, url):
"""Sets the pattern used for stroking object outlines. Only local
URLs ("#identifier") are supported at this time. These local URLs are
normally created by defining a named stroke pattern with
Drawing.push_pattern & Drawing.pop_pattern.
:param url: URL to use to obtain stroke pattern.
:type url: :class:`basestring`
.. versionadded:: 0.4.0
"""
if not isinstance(url, string_type):
raise TypeError('expecting basestring, not ' + repr(url))
if url[0] != '#':
raise ValueError('value not a relative URL, '
'expecting "#identifier"')
okay = library.DrawSetStrokePatternURL(self.resource, binary(url))
if okay == 0:
# ThrowDrawException(DrawError,"URLNotFound",fill_url)
self.raise_exception()
def skew(self, x=None, y=None):
"""Skews the current coordinate system in the horizontal direction if
``x`` is given, and vertical direction if ``y`` is given.
:param x: Skew horizontal direction
:type x: :class:`~numbers.Real`
:param y: Skew vertical direction
:type y: :class:`~numbers.Real`
.. versionadded:: 0.4.0
"""
if x is not None:
library.DrawSkewX(self.resource, float(x))
if y is not None:
library.DrawSkewY(self.resource, float(y))
def text(self, x, y, body):
"""Writes a text ``body`` into (``x``, ``y``).
:param x: the left offset where to start writing a text
:type x: :class:`numbers.Integral`
:param y: the baseline where to start writing text
:type y: :class:`numbers.Integral`
:param body: the body string to write
:type body: :class:`basestring`
"""
if not isinstance(x, numbers.Integral) or x < 0:
exc = ValueError if x < 0 else TypeError
raise exc('x must be a natural number, not ' + repr(x))
elif not isinstance(y, numbers.Integral) or y < 0:
exc = ValueError if y < 0 else TypeError
raise exc('y must be a natural number, not ' + repr(y))
elif not isinstance(body, string_type):
raise TypeError('body must be a string, not ' + repr(body))
elif not body:
raise ValueError('body string cannot be empty')
if isinstance(body, text_type):
# According to ImageMagick C API docs, we can use only UTF-8
# at this time, so we do hardcoding here.
# http://imagemagick.org/api/drawing-wand.php#DrawSetTextEncoding
if not self.text_encoding:
self.text_encoding = 'UTF-8'
body = body.encode(self.text_encoding)
body_p = ctypes.create_string_buffer(body)
library.DrawAnnotation(
self.resource, x, y,
ctypes.cast(body_p, ctypes.POINTER(ctypes.c_ubyte))
)
def translate(self, x=None, y=None):
"""Applies a translation to the current coordinate system which moves
the coordinate system origin to the specified coordinate.
:param x: Skew horizontal direction
:type x: :class:`~numbers.Real`
:param y: Skew vertical direction
:type y: :class:`~numbers.Real`
.. versionadded:: 0.4.0
"""
if x is None or y is None:
raise TypeError('Both x & y coordinates need to be defined')
library.DrawTranslate(self.resource, float(x), float(y))
def viewbox(self, left, top, right, bottom):
"""Viewbox sets the overall canvas size to be recorded with the drawing
vector data. Usually this will be specified using the same size as the
canvas image. When the vector data is saved to SVG or MVG formats, the
viewbox is use to specify the size of the canvas image that a viewer
will render the vector data on.
:param left: the left most point of the viewbox.
:type left: :class:`~numbers.Integral`
:param top: the top most point of the viewbox.
:type top: :class:`~numbers.Integral`
:param right: the right most point of the viewbox.
:type right: :class:`~numbers.Integral`
:param bottom: the bottom most point of the viewbox.
:type bottom: :class:`~numbers.Integral`
.. versionadded:: 0.4.0
"""
if not isinstance(left, numbers.Integral):
raise TypeError('left must be an integer, not ' + repr(left))
if not isinstance(top, numbers.Integral):
raise TypeError('top must be an integer, not ' + repr(top))
if not isinstance(right, numbers.Integral):
raise TypeError('right must be an integer, not ' + repr(right))
if not isinstance(bottom, numbers.Integral):
raise TypeError('bottom must be an integer, not ' + repr(bottom))
library.DrawSetViewbox(self.resource, left, top, right, bottom)
def __call__(self, image):
return self.draw(image)
def _list_to_point_info(points):
"""
Helper method to convert a list of tuples to ``const * PointInfo``
:param points: a list of tuples
:type points: `list`
:returns: tuple of point length and c_double array
:rtype: `tuple`
:raises: `TypeError`
.. versionadded:: 0.4.0
"""
if not isinstance(points, list):
raise TypeError('points must be a list, not ' + repr(points))
point_length = len(points)
tuple_size = 2
point_info_size = point_length * tuple_size
# Allocate sequence of memory
point_info = (ctypes.c_double * point_info_size)()
for double_index in range(point_info_size):
tuple_index = double_index // tuple_size
tuple_offset = double_index % tuple_size
point_info[double_index] = ctypes.c_double(
points[tuple_index][tuple_offset]
)
return (point_length, point_info)
|
[
"514387454@qq.com"
] |
514387454@qq.com
|
77e5796a70dd85d3f790f641fff4f58e94fb2bd5
|
24e795724343272baa1f17d2dd114eb6bb3d88d1
|
/Practice_18.py
|
039196accaea7c3f30227bbcd74e55fee847c15e
|
[] |
no_license
|
WangYangLau/learnpython
|
e36e1e0c33049c1429e39305aac2041894854577
|
8e30e32c899a1f3ff863856b2ce49d3cdeec715d
|
refs/heads/master
| 2021-05-09T04:07:52.370614
| 2018-02-23T09:50:08
| 2018-02-23T09:50:08
| 119,263,086
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 549
|
py
|
# -*- coding:utf-8 -*-
#功能:使用匿名函数修改代码
print(u'功能:使用匿名函数修改代码:')
print('>>>------------------------------------------------------------------------<<<')
print(u'源代码:')
print(u'def is_odd(n):\n return n % 2 == 1')
def is_odd(n):
return n % 2 == 1
L = list(filter(is_odd,range(1,20)))
print(L)
print('>>>------------------------------------------------------------------------<<<')
print(u'修改后:')
print(u'lambda n:n%2==1')
L = list(filter(lambda n:n%2==1,range(1,20)))
print(L)
|
[
"d13532641570@sina.com"
] |
d13532641570@sina.com
|
3bea2d2337ff018591755799851db7cdb825e197
|
b5417a88698d27da258aafece80c6799c72ed17a
|
/python/gui.py
|
f31758fbf9d79b9b87a5e4fdbe811a49bd86d1e5
|
[] |
no_license
|
yangyiLTS/myGraduationDisign
|
e22f69f4b6dc4aad26d6f61715df7cfdebafdeb2
|
5f748e015a719a193e94e0e59699cf048fb60029
|
refs/heads/master
| 2021-09-10T18:50:22.166261
| 2018-03-31T02:51:25
| 2018-03-31T02:51:25
| 124,567,455
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,049
|
py
|
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Jun 17 2015)
## http://www.wxformbuilder.org/
##
## PLEASE DO "NOT" EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class MyFrame1
###########################################################################
class MyFrame1 ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
self.m_gauge1 = wx.Gauge( self, wx.ID_ANY, 100, wx.DefaultPosition, wx.DefaultSize, wx.GA_HORIZONTAL )
self.m_gauge1.SetValue( 0 )
bSizer1.Add( self.m_gauge1, 0, wx.ALL, 5 )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE )
self.m_staticText1.Wrap( -1 )
bSizer1.Add( self.m_staticText1, 0, wx.ALL, 5 )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"MyLabel", wx.DefaultPosition, wx.DefaultSize, wx.ALIGN_CENTRE )
self.m_staticText2.Wrap( -1 )
bSizer1.Add( self.m_staticText2, 0, wx.ALL, 5 )
self.m_slider2 = wx.Slider( self, wx.ID_ANY, 50, 0, 100, wx.DefaultPosition, wx.DefaultSize, wx.SL_AUTOTICKS|wx.SL_HORIZONTAL )
bSizer1.Add( self.m_slider2, 0, wx.ALL, 5 )
self.m_textCtrl1 = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_CENTRE )
bSizer1.Add( self.m_textCtrl1, 0, wx.ALL, 5 )
self.m_button1 = wx.Button( self, wx.ID_ANY, u"Set", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer1.Add( self.m_button1, 0, wx.ALL, 5 )
self.SetSizer( bSizer1 )
self.Layout()
self.Centre( wx.BOTH )
def __del__( self ):
pass
|
[
"641016261@qq.com"
] |
641016261@qq.com
|
5c01e6ebccbde1df2187e06649bbcd2e11b766df
|
a3ef7fea843abf7623810e4eb9169a1ab53e948f
|
/sHIeR_hogwarts/test_lubo/pytest_lubo_demo.py
|
9ae4ed5b654c32d83a30967efdca833b876ae1b4
|
[] |
no_license
|
Lee-121/sHIeR
|
ba6f816b7628786d9d58936b7ff26be89347f780
|
c40f94577959a7a03db1d0f12191124ac01b65c9
|
refs/heads/master
| 2022-11-30T14:11:52.686005
| 2020-08-07T00:21:39
| 2020-08-07T00:21:39
| 283,985,503
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
import pytest
import yaml
class TestClass:
@pytest.mark.parametrize("env",yaml.safe_load(open("./env.yml")))
def testclass(self,env):
if "test" in env:
print("这是测试环境")
print("测试环境IP是:",env["test"])
|
[
"lidong27969@163.com"
] |
lidong27969@163.com
|
80e2beb51f9bc744c2ad3281e31ca4984400db1e
|
9ffc6df101c55f66b8a392c5f0076ef23aeef8d9
|
/GUI/ui_ExplorationWindow.py
|
6019e33644ebdb7885e63041c3f22a24a7de1d66
|
[] |
no_license
|
anliec/py-mosaic
|
92b85db16b9ce003fe190019a58ad5740f2c3693
|
1e7dc30da1d1b4fd7e761cfdd287b35c83c26de8
|
refs/heads/master
| 2021-01-18T21:11:03.841090
| 2017-07-10T19:46:09
| 2017-07-10T19:46:09
| 87,012,399
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,133
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'explorationwindows.ui'
#
# Created by: PyQt5 UI code generator 5.8
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(436, 121)
self.verticalLayout = QtWidgets.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.label_2 = QtWidgets.QLabel(Dialog)
self.label_2.setWordWrap(True)
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.label_2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtWidgets.QLabel(Dialog)
self.label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.lb_filePath = QtWidgets.QLabel(Dialog)
self.lb_filePath.setTextFormat(QtCore.Qt.AutoText)
self.lb_filePath.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.lb_filePath.setWordWrap(True)
self.lb_filePath.setObjectName("lb_filePath")
self.horizontalLayout.addWidget(self.lb_filePath)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label_2.setText(_translate("Dialog", "Please wait during the indexation of the picures in the database."))
self.label.setText(_translate("Dialog", "Exploring: "))
self.lb_filePath.setText(_translate("Dialog", "file path"))
|
[
"nicolas.six@insa-lyon.fr"
] |
nicolas.six@insa-lyon.fr
|
06c160162788075020cfb5afa273340034f0291d
|
32fe3b08e89267a05ab0ae8636de094e3f47f71d
|
/Week_07/547.friend-circles.py
|
c9ca51064dbfb0a6af4239a7bddfeb2a4ab3ec04
|
[] |
no_license
|
luyanfei/algorithm016
|
eb66e118a8fb517c95cc663f87f600cc7ff06d94
|
585026fb158fb6fe8737123d66447c305b1e4c51
|
refs/heads/master
| 2023-02-08T07:08:53.267872
| 2020-12-29T02:59:58
| 2020-12-29T02:59:58
| 293,283,847
| 0
| 0
| null | 2020-09-06T13:36:03
| 2020-09-06T13:36:02
| null |
UTF-8
|
Python
| false
| false
| 2,207
|
py
|
#
# @lc app=leetcode.cn id=547 lang=python3
#
# [547] 朋友圈
#
# https://leetcode-cn.com/problems/friend-circles/description/
#
# algorithms
# Medium (58.59%)
# Total Accepted: 72.4K
# Total Submissions: 123.2K
# Testcase Example: '[[1,1,0],[1,1,0],[0,0,1]]'
#
# 班上有 N 名学生。其中有些人是朋友,有些则不是。他们的友谊具有是传递性。如果已知 A 是 B 的朋友,B 是 C 的朋友,那么我们可以认为 A 也是 C
# 的朋友。所谓的朋友圈,是指所有朋友的集合。
#
# 给定一个 N * N 的矩阵 M,表示班级中学生之间的朋友关系。如果M[i][j] = 1,表示已知第 i 个和 j
# 个学生互为朋友关系,否则为不知道。你必须输出所有学生中的已知的朋友圈总数。
#
#
#
# 示例 1:
#
# 输入:
# [[1,1,0],
# [1,1,0],
# [0,0,1]]
# 输出:2
# 解释:已知学生 0 和学生 1 互为朋友,他们在一个朋友圈。
# 第2个学生自己在一个朋友圈。所以返回 2 。
#
#
# 示例 2:
#
# 输入:
# [[1,1,0],
# [1,1,1],
# [0,1,1]]
# 输出:1
# 解释:已知学生 0 和学生 1 互为朋友,学生 1 和学生 2 互为朋友,所以学生 0 和学生 2 也是朋友,所以他们三个在一个朋友圈,返回 1
# 。
#
#
#
#
# 提示:
#
#
# 1 <= N <= 200
# M[i][i] == 1
# M[i][j] == M[j][i]
#
#
#
class Solution:
def findCircleNum(self, M: List[List[int]]) -> int:
n = len(M)
parent = [i for i in range(n)]
size = [1] * n
count = n
def find(x):
while x != parent[x]:
x = parent[x]
parent[x] = parent[parent[x]]
return x
def connect(x, y):
nonlocal count
px, py = find(x), find(y)
if px == py:
return
if size[px] < size[py]:
parent[px] = py
size[py] += size[px]
else:
parent[py] = px
size[px] += size[py]
count -= 1
for i in range(1, n):
for j in range(i):
if M[i][j] == 1:
connect(i, j)
return count
|
[
"luyanfei78@163.com"
] |
luyanfei78@163.com
|
e7d2b285ef08d7fec28ec285da4ef32b68669586
|
c265a33e3bacded3d0699831ae6e0d7dcf317c06
|
/googleplaystore.py
|
eaf4489f8e29ba7acaa22ff991c283c8259ccdc1
|
[] |
no_license
|
luma24/CNA330-SQL-FinalProject
|
dad742b6af3f2dfcff5ca514f9db492302190730
|
16fbbd9c52becfe9473982aee39529ee08cc9b47
|
refs/heads/main
| 2023-01-31T03:41:06.406705
| 2020-12-10T01:54:29
| 2020-12-10T01:54:29
| 315,171,935
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,121
|
py
|
# CNA330 12/2/2020
# This script is to migrate the resulting of the database(googleplaystore) from this project into a cloud(I used AWS).
import pandas as pd
from sqlalchemy import create_engine
hostname = "18.217.178.166"
uname = "pythoneverything"
pwd = "python123"
dbname = "googleplaystore"
engine = create_engine("mysql+pymysql://{user}:{pw}@{host}/{db}"
.format(host=hostname, db=dbname, user=uname, pw=pwd))
df = pd.read_csv(r'C:\Users\Luma\PycharmProjects\GroupProject\googleplaystore.csv', index_col='App', nrows=1000)
connection = engine.connect()
df.to_sql('googleplaystore', con=engine, if_exists='append')
engine.execute('CREATE TABLE googleplaystore_temp Like googleplaystore')
engine.execute('INSERT INTO googleplaystore_temp SELECT DISTINCT App, Category,Rating, Reviews, Size,Installs, Type, Price, ContentRating, Genres, LastUpdated, CurrentVer, AndroidVer FROM googleplaystore ')
engine.execute('DROP TABLE googleplaystore')
engine.execute('ALTER TABLE googleplaystore_temp RENAME TO googleplaystore')
connection.close()
|
[
"noreply@github.com"
] |
luma24.noreply@github.com
|
52136f2ea223a4914f9aa8ac2be2ae948c9c7326
|
2ac829af0520dcba8f4eee987715eda7d2a43422
|
/src/supplementary.py
|
25fa8d4b97c9cfa39ce1ccbbab83827ced28aea5
|
[] |
no_license
|
Maslyaev/Evolutionary
|
9024e88fce0d306668e2c2a835e3098b1dd538fc
|
61d8159550f7cd98b2a698ec3ae479b91d2d25be
|
refs/heads/master
| 2021-07-24T17:58:30.731966
| 2020-03-04T16:45:59
| 2020-03-04T16:45:59
| 195,274,198
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,038
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 13 16:33:34 2020
@author: mike_ubuntu
"""
import numpy as np
def Slice_Data_3D(matrix, part = 4, part_tuple = None): # Input matrix slicing for separate domain calculation
if part_tuple:
for i in range(part_tuple[0]):
for j in range(part_tuple[1]):
yield matrix[:, i*int(matrix.shape[1]/float(part_tuple[0])):(i+1)*int(matrix.shape[1]/float(part_tuple[0])),
j*int(matrix.shape[2]/float(part_tuple[1])):(j+1)*int(matrix.shape[2]/float(part_tuple[1]))], i, j
part_dim = int(np.sqrt(part))
for i in range(part_dim):
for j in range(part_dim):
yield matrix[:, i*int(matrix.shape[1]/float(part_dim)):(i+1)*int(matrix.shape[1]/float(part_dim)),
j*int(matrix.shape[2]/float(part_dim)):(j+1)*int(matrix.shape[2]/float(part_dim))], i, j
def Define_Derivatives(dimensionality, max_order = 2):
var_names = ['1', 'u']
for var_idx in range(dimensionality):
for order in range(max_order):
if order == 0:
var_names.append('du/dx'+str(var_idx+1))
else:
var_names.append('d^'+str(order+1)+'u/dx'+str(var_idx+1)+'^'+str(order+1))
return tuple(var_names)
def Create_Var_Matrices(U_input, max_order = 3):
var_names = ['1', 'u']
for var_idx in range(U_input.ndim):
for order in range(max_order):
if order == 0:
var_names.append('du/dx'+str(var_idx+1))
else:
var_names.append('d^'+str(order+1)+'u/dx'+str(var_idx+1)+'^'+str(order+1))
variables = np.ones((len(var_names),) + U_input.shape)
return variables, tuple(var_names)
def Prepare_Data_matrixes(raw_matrix, dim_info):
resulting_matrix = np.reshape(raw_matrix, dim_info)
return resulting_matrix
def Decode_Gene(gene, token_names, parameter_labels, n_params = 1):
term_dict = {}
for token_idx in range(0, gene.shape[0], n_params):
term_params = {}#coll.OrderedDict()
for param_idx in range(0, n_params):
term_params[parameter_labels[param_idx]] = gene[token_idx*n_params + param_idx]
term_dict[token_names[int(token_idx/n_params)]] = term_params
return term_dict
def Encode_Gene(label_dict, token_names, parameter_labels, n_params = 1):
# print(type(variables_names), variables_names)
gene = np.zeros(shape = len(token_names) * n_params)
for i in range(len(token_names)):
if token_names[i] in label_dict:
#print(token_names, label_dict[token_names[i]])
for key, value in label_dict[token_names[i]].items():
gene[i*n_params + parameter_labels.index(key)] = value
return gene
def Population_Sort(input_population):
pop_sorted = [x for x, _ in sorted(zip(input_population, [individual.fitness_value for individual in input_population]), key=lambda pair: pair[1])]
return list(reversed(pop_sorted))
|
[
"noreply@github.com"
] |
Maslyaev.noreply@github.com
|
64bf4574ffaf7c9e7254ba5ed68d070ff062dda6
|
53f8516c3d3aae5104dd376355947000cee942c5
|
/app/views.py
|
713aa858d5c17b980eb8f4a1be7e57b84c11fd58
|
[] |
no_license
|
NoriKaneshige/Django_Multiple_File_Upload_by_Formset_without_Models
|
7f8318fa7017ad356df945b87e6ba12dbbfca4c1
|
c55949321deb0ba1b92725f773f8f30d57846d33
|
refs/heads/master
| 2022-07-09T16:16:09.608785
| 2020-05-07T23:03:39
| 2020-05-07T23:03:39
| 262,169,244
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 406
|
py
|
from django.views import generic
from .forms import UploadFormSet
class MultiUploadView(generic.FormView):
form_class = UploadFormSet
template_name = 'app/upload.html'
def form_valid(self, form):
download_url_list = form.save()
context = {
'download_url_list': download_url_list,
'form': form,
}
return self.render_to_response(context)
|
[
"atsunori@umich.edu"
] |
atsunori@umich.edu
|
19aef23ec0d15019d2caa02e699ed5b558bd5a3e
|
66981c1cc17b7d720bf09458aa528c14164b1696
|
/policyopt/nn.py
|
a6d0d5e5b17a49db853e6d707bb214bf28e7fd44
|
[
"MIT"
] |
permissive
|
KAIST-AILab/gmmil
|
d451dbf9d050f602c5ced15188b7999b7b3a900d
|
f49d3c9bf6221cb534142b8ddf22d60dd9fa711f
|
refs/heads/master
| 2020-03-21T22:10:47.641735
| 2018-10-02T01:38:47
| 2018-10-02T01:38:47
| 139,109,233
| 13
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,313
|
py
|
from contextlib import contextmanager
import collections
import h5py
import hashlib
import json
import numpy as np
import os
import os.path
import tables, warnings; warnings.filterwarnings('ignore', category=tables.NaturalNameWarning)
import util
import theano
from theano import tensor
# Global variable scoping utility, modeled after Tensorflow
_curr_active_scope = None
class variable_scope(object):
def __init__(self, name):
global _curr_active_scope
self.name = name
if self.name is None:
# This is the root scope
assert _curr_active_scope is None
self.parent = None
self.fullname = ''
else:
# This is not the root scope
assert _curr_active_scope is not None
assert '/' not in name
self.parent = _curr_active_scope
assert self.name not in self.parent.children, 'Scope {} already exists in parent scope {}'.format(self.name, self.parent.fullname)
self.parent.children[self.name] = self
self.fullname = self.parent.fullname + '/' + self.name
self.children = collections.OrderedDict()
self.vars = collections.OrderedDict()
def __enter__(self):
global _curr_active_scope
_curr_active_scope = self
return self
def __exit__(self, exc_type, exc_val, exc_tb):
global _curr_active_scope
assert _curr_active_scope == self
_curr_active_scope = self.parent
def get_child_variables(self, trainable_only):
vs = [v for v, trainable in self.vars.itervalues() if (not trainable_only or trainable)]
for c in self.children.itervalues():
vs += c.get_child_variables(trainable_only)
return vs
# to be called by get_variable
def _register_variable(self, name, init_value, broadcastable, trainable):
assert '/' not in name
assert name not in self.vars, 'Variable name {} already registered in scope {}'.format(name, self.fullname)
v = theano.shared(value=init_value, name=self.fullname + '/' + name, broadcastable=broadcastable)
self.vars[name] = (v, trainable)
return v
_curr_active_scope = variable_scope(None) # this is the root scope
def get_variable(name, init_value, broadcastable=None, trainable=True):
global _curr_active_scope
return _curr_active_scope._register_variable(name, init_value, broadcastable, trainable)
def reset_global_scope():
global _curr_active_scope
_curr_active_scope = None
_curr_active_scope = variable_scope(None)
def _hash_name2array(name2array):
'''
Hashes a list of (name,array) tuples.
The hash is invariant to permutations of the list.
'''
def hash_array(a):
return '%.10f,%.10f,%d' % (np.mean(a), np.var(a), np.argmax(a))
return hashlib.sha1('|'.join('%s %s' for n, h in sorted([(name, hash_array(a)) for name, a in name2array]))).hexdigest()
import abc
class Model(object):
'''
A model abstraction. Stores variables and can save/load them to HDF5 files.
'''
__metaclass__ = abc.ABCMeta
@abc.abstractproperty
def varscope(self): pass
def get_all_variables(self):
return self.varscope.get_child_variables(trainable_only=False)
def get_trainable_variables(self):
return self.varscope.get_child_variables(trainable_only=True)
def print_trainable_variables(self):
for v in self.get_trainable_variables():
util.header('- %s (%d parameters)' % (v.name, v.get_value().size))
util.header('Total: %d parameters' % (self.get_num_params(),))
def get_num_params(self):
return sum(v.get_value().size for v in self.get_trainable_variables())
### Utilities for getting/setting flattened parameter vectors ###
def set_params(self, x):
# print 'setting param vars:\n{}'.format('\n'.join([v.name for v in self.get_trainable_variables()]))
assert x.ndim == 1
pos = 0
for v in self.get_trainable_variables():
val = v.get_value()
s = val.size
v.set_value(x[pos:pos+s].reshape(val.shape).astype(theano.config.floatX))
pos += s
assert pos == x.shape[0]
def get_params(self):
return util.flatcat([v.get_value() for v in self.get_trainable_variables()])
@contextmanager
def try_params(self, x):
orig_x = self.get_params()
self.set_params(x)
yield
self.set_params(orig_x)
# HDF5 saving and loading
# The hierarchy in the HDF5 file reflects the hierarchy in the Tensorflow graph.
def savehash(self):
return _hash_name2array([(v.name, v.get_value()) for v in self.get_all_variables()])
def save_h5(self, h5file, key, extra_attrs=None):
with h5py.File(h5file, 'a') as f:
if key in f:
util.warn('WARNING: key %s already exists in %s' % (key, h5file))
dset = f[key]
else:
dset = f.create_group(key)
for v in self.get_all_variables():
dset[v.name] = v.get_value()
dset.attrs['hash'] = self.savehash()
if extra_attrs is not None:
for k, v in extra_attrs:
if k in dset.attrs:
util.warn('Warning: attribute %s already exists in %s' % (k, dset.name))
dset.attrs[k] = v
def load_h5(self, h5file, key):
with h5py.File(h5file, 'r') as f:
dset = f[key]
for v in self.get_all_variables():
assert v.name[0] == '/'; vname = v.name[1:]
print 'Reading', vname
if vname in dset:
v.set_value(dset[vname][...])
elif vname+':0' in dset:
# Tensorflow saves variables with :0 appended to the name,
# so try this for backwards compatibility
v.set_value(dset[vname+':0'][...])
else:
raise RuntimeError('Variable %s not found in %s' % (vname, dset))
h = self.savehash()
assert h == dset.attrs['hash'], 'Checkpoint hash %s does not match loaded hash %s' % (dset.attrs['hash'], h)
# Layers for feedforward networks
class Layer(Model):
@abc.abstractproperty
def output(self):
pass
@abc.abstractproperty
def output_shape(self):
'''Shape refers to the shape without the batch axis, which always implicitly goes first'''
pass
class ReshapeLayer(Layer):
def __init__(self, input_, new_shape):
self._output_shape = tuple(new_shape)
util.header('Reshape(new_shape=%s)' % (str(self._output_shape),))
with variable_scope(type(self).__name__) as self.__varscope:
self._output = input_.reshape((-1,)+self._output_shape)
@property
def varscope(self): return self.__varscope
@property
def output(self): return self._output
@property
def output_shape(self): return self._output_shape
class AffineLayer(Layer):
def __init__(self, input_B_Di, input_shape, output_shape, initializer):
assert len(input_shape) == len(output_shape) == 1
util.header('Affine(in=%d, out=%d)' % (input_shape[0], output_shape[0]))
self._output_shape = (output_shape[0],)
with variable_scope(type(self).__name__) as self.__varscope:
if initializer is None:
# initializer = np.random.randn(input_shape[0], output_shape[0]) * np.sqrt(2./input_shape[0])
# Glorot/Bengio 2010
s = np.sqrt(6. / (input_shape[0] + output_shape[0]))
initializer = np.random.uniform(low=-s, high=s, size=(input_shape[0],output_shape[0]))
else:
assert initializer.shape == (input_shape[0], output_shape[0])
self.W_Di_Do = get_variable('W', initializer.astype(theano.config.floatX))
self.b_1_Do = get_variable('b', np.zeros((1, output_shape[0]), dtype=theano.config.floatX), broadcastable=(True,False))
self._output_B_Do = input_B_Di.dot(self.W_Di_Do) + self.b_1_Do
@property
def varscope(self): return self.__varscope
@property
def output(self): return self._output_B_Do
@property
def output_shape(self): return self._output_shape
class NonlinearityLayer(Layer):
def __init__(self, input_B_Di, output_shape, func):
util.header('Nonlinearity(func=%s)' % func)
self._output_shape = output_shape
with variable_scope(type(self).__name__) as self.__varscope:
self._output_B_Do = {
'relu': tensor.nnet.relu,
'lrelu': lambda x: tensor.nnet.relu(x, .01),
'elu': tensor.nnet.elu,
'tanh': tensor.tanh,
}[func](input_B_Di)
@property
def varscope(self): return self.__varscope
@property
def output(self): return self._output_B_Do
@property
def output_shape(self): return self._output_shape
def _check_keys(d, keys, optional):
s = set(d.keys())
if not (s == set(keys) or s == set(keys+optional)):
raise RuntimeError('Got keys %s, but expected keys %s with optional keys %s' % (str(s, str(keys), str(optional))))
def _parse_initializer(layerspec):
if 'initializer' not in layerspec:
return None
initspec = layerspec['initializer']
raise NotImplementedError('Unknown layer initializer type %s' % initspec['type'])
class FeedforwardNet(Layer):
def __init__(self, input_B_Di, input_shape, layerspec_json):
'''
Args:
layerspec (string): JSON string describing layers
'''
assert len(input_shape) >= 1
self.input_B_Di = input_B_Di
layerspec = json.loads(layerspec_json)
util.header('Loading feedforward net specification')
print json.dumps(layerspec, indent=2, separators=(',', ': '))
self.layers = []
with variable_scope(type(self).__name__) as self.__varscope:
prev_output, prev_output_shape = input_B_Di, input_shape
for i_layer, ls in enumerate(layerspec):
with variable_scope('layer_%d' % i_layer):
if ls['type'] == 'reshape':
_check_keys(ls, ['type', 'new_shape'], [])
self.layers.append(ReshapeLayer(prev_output, ls['new_shape']))
elif ls['type'] == 'fc':
_check_keys(ls, ['type', 'n'], ['initializer'])
self.layers.append(AffineLayer(
prev_output, prev_output_shape, output_shape=(ls['n'],), initializer=_parse_initializer(ls)))
elif ls['type'] == 'nonlin':
_check_keys(ls, ['type', 'func'], [])
self.layers.append(NonlinearityLayer(prev_output, prev_output_shape, ls['func']))
else:
raise NotImplementedError('Unknown layer type %s' % ls['type'])
prev_output, prev_output_shape = self.layers[-1].output, self.layers[-1].output_shape
self._output, self._output_shape = prev_output, prev_output_shape
@property
def varscope(self): return self.__varscope
@property
def output(self): return self._output
@property
def output_shape(self): return self._output_shape
def _printfields(fields, sep=' | ', width=8, precision=4, print_header=True):
names, vals, fmts = [], [], []
for name, val, typeinfo in fields:
names.append(name)
if val is None:
# display Nones as empty entries
vals.append('')
fmts.append('{:%ds}' % width)
else:
vals.append(val)
if typeinfo is int:
fmts.append('{:%dd}' % width)
elif typeinfo is float:
fmts.append('{:%d.%df}' % (width, precision))
else:
raise NotImplementedError(typeinfo)
if print_header:
header = ((('{:^%d}' % width) + sep) * len(names))[:-len(sep)].format(*names)
print '-'*len(header)
print header
print '-'*len(header)
print sep.join(fmts).format(*vals)
def _type_to_col(t, pos):
if t is int: return tables.Int32Col(pos=pos)
if t is float: return tables.Float32Col(pos=pos)
raise NotImplementedError(t)
class TrainingLog(object):
'''A training log backed by PyTables. Stores diagnostic numbers over time and model snapshots.'''
def __init__(self, filename, attrs):
if filename is None:
util.warn('Warning: not writing log to any file!')
self.f = None
else:
if os.path.exists(filename):
raise RuntimeError('Log file %s already exists' % filename)
self.f = tables.open_file(filename, mode='w')
for k, v in attrs: self.f.root._v_attrs[k] = v
self.log_table = None
self.schema = None # list of col name / types for display
def close(self):
if self.f is not None: self.f.close()
def write(self, kvt, display=True, **kwargs):
# Write to the log
if self.f is not None:
if self.log_table is None:
desc = {k: _type_to_col(t, pos) for pos, (k, _, t) in enumerate(kvt)}
self.log_table = self.f.create_table(self.f.root, 'log', desc)
row = self.log_table.row
for k,v,_ in kvt: row[k] = v
row.append()
self.log_table.flush()
if display:
if self.schema is None:
self.schema = [(k,t) for k,_,t in kvt]
else:
# If we are missing columns, fill them in with Nones
nonefilled_kvt = []
kvt_dict = {k:(v,t) for k,v,t in kvt}
for schema_k, schema_t in self.schema:
if schema_k in kvt_dict:
v, t = kvt_dict[schema_k]
nonefilled_kvt.append((schema_k, v, t)) # check t == schema_t too?
else:
nonefilled_kvt.append((schema_k, None, schema_t))
kvt = nonefilled_kvt
_printfields(kvt, **kwargs)
def write_snapshot(self, model, key_iter):
if self.f is None: return
# Save all variables into this group
snapshot_root = '/snapshots/iter%07d' % key_iter
for v in model.get_all_variables():
assert v.name[0] == '/'
fullpath = snapshot_root + v.name
groupname, arrayname = fullpath.rsplit('/', 1)
self.f.create_array(groupname, arrayname, v.get_value(), createparents=True)
# Store the model hash as an attribute
self.f.get_node(snapshot_root)._v_attrs.hash = model.savehash()
self.f.flush()
class NoOpStandardizer(object):
def __init__(self, dim, eps=1e-6): pass
def update(self, points_N_D): pass
def standardize_expr(self, x_B_D): return x_B_D
def unstandardize_expr(self, y_B_D): return y_B_D
def standardize(self, x_B_D): return x_B_D
def unstandardize(self, y_B_D): return y_B_D
class Standardizer(Model):
def __init__(self, dim, eps=1e-6, init_count=0, init_mean=0., init_meansq=1.):
'''
Args:
dim: dimension of the space of points to be standardized
eps: small constant to add to denominators to prevent division by 0
init_count, init_mean, init_meansq: initial values for accumulators
Note:
if init_count is 0, then init_mean and init_meansq have no effect beyond
the first call to update(), which will ignore their values and
replace them with values from a new batch of data.
'''
self._eps = eps
self._dim = dim
with variable_scope(type(self).__name__) as self.__varscope:
self._count = get_variable('count', np.array(float(init_count)), trainable=False)
self._mean_1_D = get_variable('mean_1_D', np.full((1, self._dim), init_mean), broadcastable=(True,False), trainable=False)
self._meansq_1_D = get_variable('meansq_1_D', np.full((1, self._dim), init_meansq), broadcastable=(True,False), trainable=False)
self._stdev_1_D = tensor.sqrt(tensor.nnet.relu(self._meansq_1_D - tensor.square(self._mean_1_D)))
# Relu ensures inside is nonnegative. maybe the better choice would have been to
# add self._eps inside the square root, but I'm keeping things this way to preserve
# backwards compatibility with existing saved models.
self.get_mean = self._mean_1_D.get_value
self.get_stdev = theano.function([], self._stdev_1_D[0,:]) # TODO: return with shape (1,D)
@property
def varscope(self): return self.__varscope
def update(self, points_N_D):
assert points_N_D.ndim == 2 and points_N_D.shape[1] == self._dim
num = points_N_D.shape[0]
count = float(self._count.get_value())
a = count/(count+num)
self._mean_1_D.set_value(a*self._mean_1_D.get_value() + (1.-a)*points_N_D.mean(axis=0, keepdims=True))
self._meansq_1_D.set_value(a*self._meansq_1_D.get_value() + (1.-a)*(points_N_D**2).mean(axis=0, keepdims=True))
self._count.set_value(count + num)
def standardize_expr(self, x_B_D):
return (x_B_D - self._mean_1_D) / (self._stdev_1_D + self._eps)
def unstandardize_expr(self, y_B_D):
return y_B_D*(self._stdev_1_D + self._eps) + self._mean_1_D
def standardize(self, x_B_D):
assert x_B_D.ndim == 2
return (x_B_D - self.get_mean()) / (self.get_stdev() + self._eps)
def unstandardize(self, y_B_D):
assert y_B_D.ndim == 2
return y_B_D*(self.get_stdev() + self._eps) + self.get_mean()
def test_standardizer():
D = 10
s = Standardizer(D, eps=0)
x_N_D = np.random.randn(200, D)
s.update(x_N_D)
x2_N_D = np.random.randn(300, D)
s.update(x2_N_D)
allx = np.concatenate([x_N_D, x2_N_D], axis=0)
assert np.allclose(s._mean_1_D.get_value()[0,:], allx.mean(axis=0))
assert np.allclose(s.get_stdev(), allx.std(axis=0))
print 'ok'
if __name__ == '__main__':
test_standardizer()
|
[
"tzs930@gmail.com"
] |
tzs930@gmail.com
|
7c394d68658896ec9290dcefbe2b972c0e4a4ac0
|
93d8aec11b078ec8fb6c0465a6b351118855e92c
|
/customers/__init__.py
|
b35f5072c2b4723ce5738e7bc3d78e378adb5555
|
[] |
no_license
|
Knevels1/Kennels-python-server
|
a17a5e4b3f93b36d78059792613da3d61df818f4
|
658af3865a72dd7751ae9582823bbc24e9d845bf
|
refs/heads/main
| 2023-02-23T19:58:46.563503
| 2021-01-26T11:53:49
| 2021-01-26T11:53:49
| 329,032,602
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 83
|
py
|
from .request import get_all_customers, get_single_customer, get_customers_by_email
|
[
"kyle.a.nevels@icloud.com"
] |
kyle.a.nevels@icloud.com
|
e771e33ed425370e65d67ee7532a3bfb37c7fbd1
|
ab621c65fc91f5194c4032d68e750efaa5f85682
|
/pabi_account_report/reports/jasper_report_cd_receivable_follow_up.py
|
fb689df765d1abcd09ee3ddafcbb3e456b13c360
|
[] |
no_license
|
pabi2/pb2_addons
|
a1ca010002849b125dd89bd3d60a54cd9b9cdeef
|
e8c21082c187f4639373b29a7a0905d069d770f2
|
refs/heads/master
| 2021-06-04T19:38:53.048882
| 2020-11-25T03:18:24
| 2020-11-25T03:18:24
| 95,765,121
| 6
| 15
| null | 2022-10-06T04:28:27
| 2017-06-29T10:08:49
|
Python
|
UTF-8
|
Python
| false
| false
| 3,158
|
py
|
# -*- coding: utf-8 -*-
from openerp import models, fields, api
from datetime import datetime
import time
class JasperReportCDReceivableFollowUp(models.TransientModel):
_name = 'jasper.report.cd.receivable.follow.up'
_inherit = 'report.account.common'
filter = fields.Selection(
readonly=True,
default='filter_date',
)
date_report = fields.Date(
string='Report Date',
required=True,
default=lambda self: fields.Date.context_today(self),
)
groupby = fields.Selection(
[('groupby_borrower_partner', 'Customer CD'),
('groupby_partner', 'Customer (bank)')],
string='Group By',
default='groupby_borrower_partner',
required=True,
)
borrower_partner_ids = fields.Many2many(
'res.partner',
'receivable_follow_borrower_partner_rel',
'follow_id', 'partner_id',
string='Customer CD',
domain=[('customer', '=', True)],
)
partner_ids = fields.Many2many(
'res.partner',
'receivable_follow_partner_rel',
'follow_id', 'partner_id',
string='Customer (bank)',
domain=[('customer', '=', True)],
)
@api.onchange('groupby')
def _onchange_groupby(self):
self.borrower_partner_ids = False
self.partner_ids = False
@api.multi
def _get_report_name(self):
self.ensure_one()
report_name = "cd_receivable_follow_up_group_by_customer"
if self.groupby == 'groupby_partner':
report_name = "cd_receivable_follow_up_group_by_bank"
return report_name
@api.multi
def _get_domain(self):
self.ensure_one()
dom = [('loan_agreement_id.state', 'in', ('bank_paid', 'done')),
('loan_agreement_id.sale_id.state', 'in', ('progress', 'done')),
('invoice_plan_id.ref_invoice_id.date_paid', '=', False),
('invoice_plan_id.ref_invoice_id.cancel_move_id', '=', False),
('invoice_plan_id.ref_invoice_id.date_due', '<=',
self.date_report)]
if self.borrower_partner_ids:
dom += [('loan_agreement_id.borrower_partner_id', 'in',
self.borrower_partner_ids.ids)]
if self.partner_ids:
dom += [('loan_agreement_id.partner_id', 'in',
self.partner_ids.ids)]
return dom
@api.multi
def _get_datas(self):
self.ensure_one()
data = {'parameters': {}}
dom = self._get_domain()
data['ids'] = \
self.env['pabi.common.loan.agreement.report.view'].search(dom).ids
date_report = datetime.strptime(self.date_report, '%Y-%m-%d')
data['parameters']['date_report'] = date_report.strftime('%d/%m/%Y')
data['parameters']['user'] = self.env.user.display_name
data['parameters']['date_run'] = time.strftime('%d/%m/%Y')
return data
@api.multi
def run_report(self):
self.ensure_one()
return {
'type': 'ir.actions.report.xml',
'report_name': self._get_report_name(),
'datas': self._get_datas(),
}
|
[
"tharathip.chaweewongphan@gmail.com"
] |
tharathip.chaweewongphan@gmail.com
|
500bec668d0497d0f70c937059f1fe7ef4e296ce
|
766896c67dc8099523a1895b884c020645a0c081
|
/6-8.py
|
1250e54666bcca42d87c9d9d4a0870ffa33fb4b6
|
[] |
no_license
|
ebertx/thinking-python
|
cf9daa02bc5106f872ec8c4f083d6a2d32fcb778
|
bb08f58806e26a14d3420454f7b7271a8de85c28
|
refs/heads/master
| 2016-09-06T05:30:23.659158
| 2015-02-27T07:51:10
| 2015-02-27T07:51:10
| 29,249,177
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 91
|
py
|
def gcd(a, b):
if b == 0:
return a
return gcd(b, a % b)
print gcd(18, 25)
|
[
"ebertx@gmail.com"
] |
ebertx@gmail.com
|
81473f6d78e0e83898ca6d588502428d6b6fc236
|
4e9c7fb648bbe4b15edeef4e9ea07cd47d10d6b7
|
/primenum.py
|
a537646041bca23d8e404115cf546f60e79fd46c
|
[] |
no_license
|
SowndaryaDhanasekaran/Set-2
|
eb79a67dbe8f729f51e3eb089eccac63b5cbdde0
|
4292b85c26d5e62e6b5b14cec1b1850635649def
|
refs/heads/master
| 2020-03-22T21:36:15.127187
| 2018-07-12T11:23:38
| 2018-07-12T11:23:38
| 140,699,860
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 192
|
py
|
number=int(input())
count=0
if(number<=1000):
for i in range(2,number):
if(number%2==0):
count=count+1
if(count==2):
print("yes")
else:
print("no")
else:
print("no")
|
[
"noreply@github.com"
] |
SowndaryaDhanasekaran.noreply@github.com
|
db9cb26e608d2d2e03001dcabc64eed4c52063ec
|
5b641d7b69ba0d43bd224d19831ab67a5b101582
|
/talks/julyPlenary/comparison.py
|
4fd56eff5230a4d8f0c18fd7d54cc32193bdc749
|
[] |
no_license
|
jpivarski-talks/1999-2006_gradschool-2
|
9d7a4e91bbde6dc7f472ea21253b32a0eca104e4
|
ca889b4d09814a226513e39625ae2f140c97b5d5
|
refs/heads/master
| 2022-11-19T11:37:55.623868
| 2020-07-25T01:19:32
| 2020-07-25T01:19:32
| 282,235,528
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 210
|
py
|
from math import *
import biggles, Numeric
scoarse1S = (1.43, 0.07)
coarse1S = (1.32, 0.01)
fine1S = (1.28, 0.01)
scoarse2S = (1.2, 0.1)
coarse2S = (0.77, 0.05)
fine2S = (0.67, 0.03)
res = [0.2, 0.4, 0.75]
|
[
"jpivarski@gmail.com"
] |
jpivarski@gmail.com
|
7d5721bd1de2add787d846527418b63ef851cb95
|
adddf15b6ad343b2dc4e835fb913f56ebf81d7dd
|
/example/mnist_train.py
|
05d9a524d8511b8024dec6fcdba973b04c92b5a5
|
[] |
no_license
|
antinucleon/TVMFlow
|
79edb55a6884115965b4e14a74362dca77132b12
|
eabbc9aa08bf38b87453bf12ceebd7c98a0513a0
|
refs/heads/master
| 2021-06-30T00:23:23.693293
| 2017-09-19T09:29:03
| 2017-09-19T09:29:03
| 102,164,065
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,170
|
py
|
"""Tinyflow example code.
This code is adapted from Tensorflow's MNIST Tutorial with minimum code changes.
"""
import tvmflow as tf
from tvmflow.datasets import get_mnist
# Create the model
x = tf.placeholder(tf.float32, [None, 784], name="x")
fc1 = tf.nn.fully_connected(x, 32, 784, "fc1")
act1 = tf.nn.relu(fc1, "relu1")
fc2 = tf.nn.fully_connected(x, 10, 784, "fc2")
y = tf.nn.softmax(fc2, name="sm")
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
loss = tf.nn.logloss(y, y_)
train_step = tf.train.GradientDescentOptimizer(1e-1).minimize(loss)
sess = tf.Session()
sess.run(tf.initialize_all_variables())
# get the mnist dataset
mnist = get_mnist(flatten=True, onehot=True)
for i in range(10000):
batch_xs, batch_ys = mnist.train.next_batch(100)
out = tf.Group([train_step, loss])
ce = sess.run(out, feed_dict={x: batch_xs, y_: batch_ys})[-1]
if i % 100 == 0:
print ce
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(correct_prediction, keepdims=True)
print(sess.run(
accuracy, feed_dict={x: mnist.test.images,
y_: mnist.test.labels}))
|
[
"bingxu@apple.com"
] |
bingxu@apple.com
|
a595129b4b0e3c4a78b7775a7c210e32ce11707b
|
fa6a98c44a46c9880711c1b2fba473fd223ee479
|
/fab_series.py
|
1e05315591c020ff00ccd562c44558e136a8d2c7
|
[] |
no_license
|
RajeevSharma110/HashCode-jr-Explorer-Age
|
340796190abfb73380a1a68169f3dc22574956e1
|
86d9f0b052fbf866bdce999fb9014ebbbfa2f70e
|
refs/heads/main
| 2023-08-07T19:28:32.521550
| 2021-10-01T12:00:49
| 2021-10-01T12:00:49
| 408,006,264
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 539
|
py
|
noofterms = int(input("How many terms you want? "))
# first two terms
n1 = 0
n2 = 1
count = 2
# check if the number of terms is valid
if noofterms <= 0:
print("Plese enter a positive integer")
elif noofterms == 1:
print("Fibonacci sequence:")
print(n1)
else:
print("Fibonacci sequence:")
print(n1,",",n2,end=', ')
while count < noofterms:
nth = n1 + n2
print(nth,end=' , ')
# update values
n1 = n2
n2 = nth
count += 1
|
[
"noreply@github.com"
] |
RajeevSharma110.noreply@github.com
|
e3d631d2ed4799f063e63482720856bba00f372b
|
b55c22ae8ad2317e09b3c75fa5e57963cfa9e20e
|
/server1.py
|
1a5647124189a0964a2c572afd7328c9e23b412a
|
[] |
no_license
|
AstraLee/finalproject_industry4
|
0180e0f58283aa63afb7fa2a5c8681b1eafd6596
|
1ca0846ee0474aa0ac363475f0f36255b7cb3ad7
|
refs/heads/master
| 2020-06-02T11:08:11.936281
| 2019-06-27T04:20:52
| 2019-06-27T04:20:52
| 191,135,732
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,249
|
py
|
import socket
import time
import pymongo
from bson import ObjectId
import pprint
def main():
# Connect to MongoDB instance
connection = pymongo.MongoClient("mongodb://localhost:27017")
database = connection['my_database']
collection = database['my_collection']
encoder = database.encoder_values
print('Total Record for the collection: ' + str(encoder.count()))
results = encoder.find()
def insert_data(data):
"""
Insert new data or document in collection
:param data:
:return:
"""
document = collection.insert_one(data)
return document.inserted_id
def update_or_create(document_id, data):
"""
This will create new document in collection
IF same document ID exist then update the data
:param document_id:
:param data:
:return:
"""
# TO AVOID DUPLICATES - THIS WILL CREATE NEW DOCUMENT IF SAME ID NOT EXIST
document = collection.update_one({'_id': ObjectId(document_id)}, {"$set": data}, upsert=True)
return document.acknowledged
def get_single_data(document_id):
"""
get document data by document ID
:param document_id:
:return:
"""
data = collection.find_one({'_id': ObjectId(document_id)})
return data
def get_multiple_data():
"""
get document data by document ID
:return:
"""
data = collection.find()
return list(data)
def update_existing(document_id, data):
"""
Update existing document data by document ID
:param document_id:
:param data:
:return:
"""
document = collection.update_one({'_id': ObjectId(document_id)}, {"$set": data})
return document.acknowledged
def remove_data(document_id):
document = collection.delete_one({'_id': ObjectId(document_id)})
return document.acknowledged
def server_program():
# get the hostname
host = ''
port = 5001 # initiate port no above 1024
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # get instance
print("Server socket created")
# look closely. The bind() function takes tuple as argument
server_socket.bind((host, port)) # bind host address and port together
print("Server socket bound with host {} port {}".format(host, port))
print('Listening...')
# configure how many client the server can listen simultaneously
server_socket.listen(5) #for eg. 5
conn, address = server_socket.accept() # accept new connection
count = 0
print("Connection from: " + str(address))
while True:
count = count + 1
print("Accepted {} connections so far".format(count))
# receive data stream. it won't accept data packet greater than 1024 bytes
data = conn.recv(1024).decode()
currentTime = time.ctime(time.time()) + "\r\n"
conn.send(currentTime.encode('ascii'))
if not data:
# if data is not received break
break
print("from connected user: \'" + str(data) + '\' ' + str(currentTime))
data = input(' -> ')
conn.send(data.encode()) # send data to the client
conn.close() # close the connection
if __name__ == '__main__':
server_program()
#main()
## dataset (ITERATIVELY)
|
[
"astra_1995@hotmail.com"
] |
astra_1995@hotmail.com
|
b97e09bc6e666dccaa4b793e0fb9ffcfc4ed81df
|
c8b2e8ab65e82ed4a0a245fd238e98f51aa2d0ac
|
/SylNet_model_tf2.py
|
c7fceb76994ff7efb3ea8f263e8d175c586fc991
|
[
"MIT"
] |
permissive
|
shreyas253/SylNet
|
da80bb67b78afb4b65d4ed325d5be4dc15b5e222
|
29eb34d614907073735f9c2d8c31da22c1be2282
|
refs/heads/master
| 2023-06-09T02:43:52.786558
| 2023-05-25T06:59:19
| 2023-05-25T06:59:19
| 168,553,422
| 20
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,188
|
py
|
__author__ = "Lauri Juvela, lauri.juvela@aalto.fi; Shreyas Seshadri, shreyas.seshadri@aalto.fi"
import os
import sys
import math
import numpy as np
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
_FLOATX = tf.float32
def get_weight_variable(name, shape=None, initializer=tf.keras.initializers.glorot_normal()):
if shape is None:
return tf.get_variable(name)
else:
return tf.get_variable(name, shape=shape, dtype=_FLOATX, initializer=initializer)
def get_bias_variable(name, shape=None, initializer=tf.constant_initializer(value=0.0)):
if shape is None:
return tf.get_variable(name)
else:
return tf.get_variable(name, shape=shape, dtype=_FLOATX, initializer=initializer)
class CNET():
def __init__(self,
name,
endList,
seqLen,
isTrain=True,
DRrate=0.3,
residual_channels=64,
filter_width=3,
dilations=[1, 2, 4, 8, 1, 2, 4, 8],
input_channels=123,
output_channels=1,
cond_dim = None,
cond_channels = 64,
postnet_channels=256,
do_postproc = True,
do_GLU = True):
self.input_channels = input_channels
self.output_channels = output_channels
self.filter_width = filter_width
self.dilations = dilations
self.residual_channels = residual_channels
self.postnet_channels = postnet_channels
self.do_postproc = do_postproc
self.do_GLU = do_GLU
self.seqLen = seqLen
self.endList = endList
self.isTrain = isTrain
self.DRrate = DRrate
if cond_dim is not None:
self._use_cond = True
self.cond_dim = cond_dim
self.cond_channels = cond_channels
else:
self._use_cond = False
self._name = name
def get_variable_list(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self._name)
def get_variable_list_post(self):
return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope='S/postproc_module')
def _causal_conv(self,X, W, filter_width):
"""
Assume X input shape (batch, timesteps, channels)
"""
pad = (filter_width - 1)
# pad to causal
Y = tf.pad(X, paddings=[[0,0], [pad,0], [0,0]]) # left pad to time-axis
Y = tf.nn.convolution(Y, W, padding="VALID")
return Y
def _input_layer(self, main_input):
fw = self.filter_width
r = self.residual_channels
i = self.input_channels
with tf.variable_scope('input_layer'):
W = get_weight_variable('W', (fw, i, 2*r))
b = get_bias_variable('b', (2*r))
X = main_input
Y = tf.nn.convolution(X, W, padding='SAME')
#Y = self._causal_conv(X, W, fw)
Y += b
Y = tf.tanh(Y[:, :, :r])*tf.sigmoid(Y[:, :, r:])
Y = tf.layers.dropout(Y,rate=self.DRrate ,training=self.isTrain)
return Y
def _embed_cond(self, cond_input):
cd = self.cond_dim
c = self.cond_channels
with tf.variable_scope('embed_cond'):
W = get_weight_variable('W',(1, cd, c))
b = get_bias_variable('b',(c))
Y = tf.nn.convolution(cond_input, W, padding='SAME') # 1x1 convolution
Y += b
return tf.tanh(Y)
def _conv_module(self, main_input, residual_input, module_idx, dilation, cond_input=None):
fw = self.filter_width
r = self.residual_channels
s = self.postnet_channels
with tf.variable_scope('conv_modules'):
with tf.variable_scope('module{}'.format(module_idx)):
W = get_weight_variable('filter_gate_W',(fw, r, 2*r))
b = get_bias_variable('filter_gate_b',(2*r))
X = main_input
Y = tf.nn.convolution(X, W, padding='SAME', dilation_rate=[dilation])
#Y = self._causal_conv(X, W, fw)
Y += b
if self._use_cond:
c = self.cond_channels
V_cond = get_weight_variable('cond_filter_gate_W',(1, c, 2*r))
b_cond = get_bias_variable('cond_filter_gate_b',(2*r))
C = tf.nn.convolution(cond_input, V_cond, padding='SAME') # 1x1 convolution
#C = self._causal_conv(cond_input, V_cond, 1)
C += b_cond
C = tf.tanh(C)
Y += C
# filter and gate
Y = tf.tanh(Y[:, :, :r])*tf.sigmoid(Y[:, :, r:])
Y = tf.layers.dropout(Y,rate=self.DRrate ,training=self.isTrain)
# add residual channel
if self.do_postproc:
W_s = get_weight_variable('skip_gate_W',(fw, r, s))
b_s = get_weight_variable('skip_gate_b',s)
skip_out = tf.nn.convolution(Y, W_s, padding='SAME')
#skip_out = self._causal_conv(Y, W_s, fw)
skip_out += b_s
else:
skip_out = []
if self.do_GLU:
W_p = get_weight_variable('post_filter_gate_W',(1, r, r))
b_p = get_weight_variable('post_filter_gate_b',r)
Y = tf.nn.convolution(Y, W_p, padding='SAME')
#Y = self._causal_conv(Y, W_p, 1)
Y += b_p
Y += X
return Y, skip_out
def _postproc_module(self, residual_module_outputs):
fw = self.filter_width
r = self.residual_channels
s = self.postnet_channels
op = self.output_channels
with tf.variable_scope('postproc_module'):
W1 = get_weight_variable('W1',(fw, r, s))
b1 = get_bias_variable('b1',s)
W2 = get_weight_variable('W2', (s, op))
b2 = get_bias_variable('b2',op)
num_units = self.postnet_channels
# sum of residual module outputs
X = tf.zeros_like(residual_module_outputs[0])
for R in residual_module_outputs:
X += R
Y = tf.nn.convolution(X, W1, padding='SAME')
#Y = self._causal_conv(X, W1, fw)
Y += b1
Y = tf.nn.relu(X)
Y = tf.layers.dropout(Y,rate=self.DRrate ,training=self.isTrain)
lstm_layer=tf.nn.rnn_cell.LSTMCell(num_units, reuse=tf.AUTO_REUSE, name='lstm_layer')
outputsM,_=tf.nn.dynamic_rnn(lstm_layer,Y,sequence_length=self.seqLen,dtype="float32")
outputs = tf.gather_nd(outputsM,self.endList)
prediction=tf.matmul(outputs,W2)+b2
return prediction
def _last_layer(self, last_layer_ip):
fw = self.filter_width
r = self.residual_channels
s = self.postnet_channels
with tf.variable_scope('last_layer'):
W1 = get_weight_variable('W1',(fw, r, s))
b1 = get_bias_variable('b1',s)
W2 = get_weight_variable('W2', (s, 1))
b2 = get_bias_variable('b2',1)
num_units = self.postnet_channels
# sum of residual module outputs
X = last_layer_ip
Y = tf.nn.convolution(X, W1, padding='SAME')
Y += b1
Y = tf.nn.relu(Y)
lstm_layer=tf.nn.rnn_cell.LSTMCell(num_units, reuse=tf.AUTO_REUSE, name='lstm_layer')
outputsM,_=tf.nn.dynamic_rnn(lstm_layer,Y,sequence_length=self.seqLen,dtype="float32")
outputs = tf.gather_nd(outputsM,self.endList)
prediction=tf.matmul(outputs,W2)+b2
predictionALL = 1#tf.matmul(outputsM,W2)+b2
return prediction,predictionALL
def forward_pass(self, X_input, cond_input=None):
skip_outputs = []
with tf.variable_scope(self._name, reuse=tf.AUTO_REUSE):
if self._use_cond:
C = self._embed_cond(cond_input)
else:
C = None
R = self._input_layer(X_input)
X = R
for i, dilation in enumerate(self.dilations):
X, skip = self._conv_module(X, R, i, dilation, cond_input=C)
skip_outputs.append(skip)
if self.do_postproc:
Y= self._postproc_module(skip_outputs)
else:
Y = self._last_layer(X)
return Y
|
[
"okko.rasanen@tuni.fi"
] |
okko.rasanen@tuni.fi
|
34c94c676890ed347e27f25fe7cdd3a283af8b78
|
d71884281ed47f0f16aae5ad562f6b2ce5b24018
|
/movies/models.py
|
d2075881f17128b2aff0c45614963a16f81f9c43
|
[] |
no_license
|
Boyko03/CinemaReservation
|
5ac5e404a272e2515b291620a159755df98f2e70
|
5892eeecf1edd22f61c22de7dfbce22c3ab5a2df
|
refs/heads/master
| 2022-07-31T14:54:17.441699
| 2020-05-18T16:30:08
| 2020-05-18T16:30:08
| 261,232,968
| 0
| 0
| null | 2020-05-13T16:20:35
| 2020-05-04T16:08:10
|
Python
|
UTF-8
|
Python
| false
| false
| 325
|
py
|
from sqlalchemy import Column, Integer, String, Float
from db import Base
class Movies(Base):
__tablename__ = "Movies"
id = Column(Integer, primary_key=True)
name = Column(String)
rating = Column(Float)
def __str__(self):
return "[ {} ] - {} - {}".format(Movies.id, Movies.name, Movies.rating)
|
[
"krastanova98@gmail.com"
] |
krastanova98@gmail.com
|
98125cf0dcf28667282bed9339013daf93de5a2c
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/64/usersdata/161/30070/submittedfiles/atm.py
|
a4945e6d1dbd957e26b68dd8e72ebecc61734ea0
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146
| 2017-12-22T16:05:45
| 2017-12-22T16:05:45
| 69,566,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,119
|
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
#COMECE SEU CODIGO AQUI
valor=int(input('Informe o valor em dinheiro: '))
if (valor%20)==0:
print(valor//20)
print('%d'%0)
print('%d'%0)
print('%d'%0)
print('%d'%0)
if (valor%20)!=0 and (valor%10==0):
print(valor//20)
print((valor-((valor//20)*20))//10)
print('%d'%0)
print('%d'%0)
print('%d'%0)
if (valor%20)!=0 and (valor%10)!=0 and (valor%5)==0:
print(valor//20)
print((valor-((valor//20)*20))//10)
print((valor-((valor//10)*10))//5)
print('%d'%0)
print('%d'%0)
if (valor%20)!=0 and (valor%10)!=0 and (valor%5)!=0 and (valor%2)==0:
print(valor//20)
print((valor-((valor//20)*20))//10)
print((valor-((valor//10)*10))//5)
print((valor-((valor//5)*5))/2)
print('%d'%0)
if (valor%20)!=0 and (valor%10)!=0 and (valor%5)!=0 and (valor%2)!=0:
print(valor//20)
print((valor-((valor//20)*20))//10)
print((valor-((valor//10)*10))//5)
print((valor-((valor//5)*5))//2)
print(valor-((valor//5)*5))
|
[
"rafael.mota@ufca.edu.br"
] |
rafael.mota@ufca.edu.br
|
8d1672c74d7f7273373662eb84061813c809a934
|
b6567dce0fc91b8aec053e5bb999f2fa0b93e09c
|
/reparacao/calc_rules.py
|
51ccacb22872533eb15a2fde92212ef2bbae87a4
|
[] |
no_license
|
gates17/repairShopBE
|
5ceef9ab2239662ddeb72c49b230ec1eb88957e4
|
15ae1cdf4b943f13b8f343f27407c7e26f972406
|
refs/heads/master
| 2020-03-29T00:39:16.108867
| 2019-02-03T22:48:14
| 2019-02-03T22:48:14
| 149,349,443
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 732
|
py
|
from decimal import Decimal
def calc_total(rep):
tax = float(rep['tax'])
tax = Decimal(tax)
total_to_pay = Decimal(rep["price"]) * int(rep["units"]) * int(rep["quantity"])
if rep["discount"] and Decimal(rep["discount"]) != Decimal(0):
total_to_pay = total_to_pay * Decimal((100 - Decimal(rep["discount"])) / 100)
total_to_pay_with_tax = total_to_pay *Decimal((tax+100)/100)
tax_to_pay = total_to_pay_with_tax - total_to_pay
total_to_pay = round(total_to_pay,2)
total_to_pay_with_tax=round(total_to_pay_with_tax,2)
tax_to_pay=round(tax_to_pay,2)
total_dict={'total_to_pay':total_to_pay,'total_to_pay_with_tax':total_to_pay_with_tax,'tax_to_pay':tax_to_pay}
return total_dict
|
[
"alexandre.lecom@gmail.com"
] |
alexandre.lecom@gmail.com
|
bc240fd6f86c1db725b7e6bf82f96c38f4ab8c96
|
3cef8c7f23950a88cd0c20e2f678432aab18378a
|
/python_exercise/ex_036.py
|
35642ea7c79d43513bbe40d839b16ced43006dec
|
[] |
no_license
|
sunjoopark/python-algorithm
|
577be9a085956305397382f8cda44c06c71464ae
|
7ce543c261059116ef2a61abcaede2d7955fa4f2
|
refs/heads/master
| 2020-03-31T17:42:28.636092
| 2018-10-10T14:03:32
| 2018-10-10T14:03:32
| 152,431,788
| 2
| 1
| null | 2019-01-21T04:33:22
| 2018-10-10T13:53:32
|
Python
|
UTF-8
|
Python
| false
| false
| 489
|
py
|
cnt = 0
checked = [[[0 for i in range(21)] for j in range(21)] for k in range(21)]
def solve(n, a, b, c):
global cnt
if a + b + c == n :
if a <= b and b <= c and a + b > c and checked[a][b][c] == 0:
cnt = cnt + 1
checked[a][b][c] = 1
return
solve(n, a + 1, b, c)
solve(n, a, b + 1, c)
solve(n, a, b, c + 1)
if __name__ == "__main__":
n = 10
solve(n, 1, 1, 1)
print("만들 수 있는 삼각형의 갯수 : %d"%(cnt))
|
[
"iyeo@colubrismx.priv"
] |
iyeo@colubrismx.priv
|
291b35b66d83c24702b346d9db3fc6d1cd80d4de
|
e21837ee462fb31a088bd903ecdbb96631020d0a
|
/Arcade/Intro/Rains of reason/variableName.py
|
5ede1dc9bc1864e2ee3c1695fd7d3ee1e9b6c4a0
|
[] |
no_license
|
delta94/Code_signal-
|
7965ee96a858425c65c7a51a47833d80c8e6d8d3
|
1383a528b4353b7b8db4a6634ea0caa2b5895f9d
|
refs/heads/master
| 2020-07-31T05:38:22.158742
| 2019-04-12T18:46:16
| 2019-04-12T18:46:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 663
|
py
|
""""
Correct variable names consist only of English letters, digits and underscores and they can't start with a digit.
Check if the given string is a correct variable name.
Example
For name = "var_1__Int", the output should be
variableName(name) = true;
For name = "qq-q", the output should be
variableName(name) = false;
For name = "2w2", the output should be
variableName(name) = false.
""""
def variableName(name):
if ord(name[0]) in range(48,58)[:]:
return False
for r in name:
if not ((ord(r) in range(97,123)[:]) or r == '_' or (ord(r) in range(48,58)[:]) or ord(r) in range(65,91)[:]):
return False
return True
|
[
"noreply@github.com"
] |
delta94.noreply@github.com
|
a4af59c14926be97205727c0741b5416537d37cc
|
c3ad65eb01dd1a51fdc918ee088f2f763d467740
|
/pythonCgi/pattern_handler.cgi.beta
|
76242ddf57f3fb897b0bafc0aa6f3f8acf6415a0
|
[] |
no_license
|
pking74/Dumpling
|
550b417fa1f02b202cb2c39e303149f8795de9cb
|
64ea5a72f7679fcb03ee9541a07ace2b73045cb1
|
refs/heads/master
| 2021-01-18T12:17:12.548337
| 2015-09-15T14:12:11
| 2015-09-15T14:12:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,374
|
beta
|
#!/usr/bin/python
import cgi
import json
category = ['used', 'designed', 'equivalent',u'\u539f']
def main():
form = cgi.FieldStorage()
drop_str = form.getvalue('drop_str').lower()
all = []
for cat in category:
if cat == u'\u539f':
fh = open('./category_data/chn','r')
else:
fh = open('./category_data/'+cat,'r')
jdata = json.loads(fh.read())
fh.close()
try:
if cat in drop_str:
format_json = {"responseHeader": {"status":0,"QTime":0,"params":{"q":"*:%s"%(cat+' for'), "indent":"true","wt":"json"}},"response":{"numFound": len(jdata), "start":0, "docs": jdata}}
print ('Content-Type: text/plain\r\n')
print json.dumps(format_json)
return
except:
if cat in unicode(drop_str.decode('utf-8')):
format_json = {"responseHeader": {"status":0,"QTime":0,"params":{"q":"*:%s"%(cat+' for'), "indent":"true","wt":"json"}},"response":{"numFound": len(jdata), "start":0, "docs": jdata.encode('utf-8')}}
print ('Content-Type: text/plain\r\n')
print json.dumps(format_json.encode('utf-8'))
return
all = all + jdata
format_json = {"responseHeader": {"status":0,"QTime":0,"params":{"q":"*:*", "indent":"true","wt":"json"}},"response":{"numFound": len(jdata), "start":0, "docs": all}}
print ('Content-Type: text/plain\r\n')
print json.dumps(format_json)
if __name__ == "__main__":
main()
|
[
"jiezhou0731@gmail.com"
] |
jiezhou0731@gmail.com
|
8096fb7e71797b826c3e0a8c6051d98dbdf27df1
|
921ee6a3c41dd43ffd4f5099bd5526d008e1a768
|
/leetcode/027_RemoveElement.py
|
b81d379a8348664181291537c721cd648b5c49a2
|
[] |
no_license
|
dionysus/coding_challenge
|
12f70eb7e5065375e156669d89e5a727345dbbef
|
e6ba73b70cf5d1dc66e464376bf72699415b76ec
|
refs/heads/master
| 2023-07-13T13:09:43.523606
| 2021-08-18T23:52:54
| 2021-08-18T23:52:54
| 297,789,968
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 757
|
py
|
from typing import List
def removeElement(nums: List[int], val: int) -> int:
"""
Given an array nums and a value val, remove all instances of that value
in-place and return the new length.
>>> nums = [3,2,2,3]
>>> val = 3
>>> removeElement(nums, val)
2
>>> nums[0]
2
>>> nums[1]
2
>>> nums = [0, 1, 2, 3]
>>> val = 4
>>> removeElement(nums, val)
4
>>> nums
[0, 1, 2, 3]
>>> nums = [3, 3, 3, 3]
>>> val = 3
>>> removeElement(nums, val)
0
"""
if nums == []:
return 0
i = 0
j = len(nums)
while i < j:
if nums[i] == val:
j -= 1
nums[i], nums[j] = nums[j], nums[i]
else:
i += 1
return i
|
[
"dionysus.cho@gmail.com"
] |
dionysus.cho@gmail.com
|
adad125739cb72daf13347575f77b4e909d2ca3d
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/Autocase_Result/SJQX/YW_GPMM_QXJY_012.py
|
bf747396d21241b8715864d0b42b8e55beae4aac
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460
| 2020-07-30T01:43:30
| 2020-07-30T01:43:30
| 280,388,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,080
|
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from ServiceConfig import *
from mainService import *
from QueryStkPriceQty import *
from log import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_GPMM_QXJY_012(xtp_test_case):
# YW_GPMM_QXJY_012
def test_YW_GPMM_QXJY_012(self):
title = 'OMS初始化—可选权限—Secuinfo表,沪A证券账号无市价委托权限--买入'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '废单',
'errorID': 11000561,
'errorMSG': queryOrderErrorMsg(11000561),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('999999', '1', '0', '2', '0', 'B', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type': Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_BUY'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 200,
'position_effect': Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_INIT']
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
logger.warning('执行结果为' + str(rs['用例测试结果']) + ','
+ str(rs['用例错误源']) + ',' + str(rs['用例错误原因']))
self.assertEqual(rs['用例测试结果'], True) # 0
if __name__ == '__main__':
unittest.main()
|
[
"418033945@qq.com"
] |
418033945@qq.com
|
de9b1c33c01487e3fdabe86c0976cdb1dc6dbdc7
|
5e46a4506e002db6d62e4a6ab38c7af1fa5c0eb9
|
/hw1/code/9/code9.py
|
c55ba4a7a2877979d6f9ff8f16b1009c27d8ba6c
|
[] |
no_license
|
king1224/Cryptography-and-Network-Security
|
050ff51033290674f301d87a8ec0d8db18261f4a
|
f51b01b65a48f3a7e0d743d2d3cf2a68c1c18f52
|
refs/heads/master
| 2020-05-05T10:14:01.216079
| 2019-04-07T13:24:18
| 2019-04-07T13:24:18
| 178,863,981
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,575
|
py
|
from random import randint
import gmpy2
p = 28661294800114069007768236017771012251607018576093986823286814149509513675452275635042638987354048629725185006983949952108837417118101024196427874059746112373146674688108102191658628381225899560628677933856157056627887468689106995559937935463599189450455206845179774222254667824788120465189001600194073757297794949787319524466635098273575710447185401574795742616708210395524755264624260682423348748123914632427585203446721466593339015399125761744284777424125509546314701569108898934480431326685681803242146702497611445457941195705272186806178159360836165609438994389786824034040397877095231384671425898312053134662669
g = 16078909570239876795055844516958246040709670677352681543313753053742973386508316274779434207505711677850871497649465535051866957457021948204451138330623660110191150301811323442658421231468580615274747861693791813916691182214785963319378314164808593693096050898468910883788576053845247354173273067934871765729622501051769175928793373665854926345829773055861683607699372255679226577615328998611278891869859367786539895393361508257631990706373751978989473197793935179727162255300656316829056421905796513359716410495375718068635872275352455310154328769091838733283528171239199077479704783804081954231420368626696801127642
A = 9340452235281687649179730131347600098035863140428398900981411533633371264718145252165753061065519626540250139067066846166158109482861629930934553097060819398166910839016649649389400890564461994003517904562426085846135074299789413165048217091262887032553237951780946374567140045468137848997309879390015437159256440742904227487263694000680455249547753807999100362666118355759928723607488280501640696133337414383498319589705206603309621303727416574866970950071925883156805160340145795061042509620095769270530810442957140774320438686238478056698045187991043722493407959150698880688428379196792889590689932219828874647750
B = 26923678295279019615441397805045962794004606567356312544405381922111654086218407546817914506984351145096161124431937703671408667110573346077797775065061005655125448280034263489750362311512284835239408247678109055129780522865641310684720459200881605571205689031364461558771208172858717637782802878739859330877034107783974891564334345018138580903618219381613763392446297669870167803434140046176854035263846037467714326529961765466554973348241899513352858975305014553164351658927866277335110203514745977353702879859614780742308712391605520182472687426382896587151667314003989030444701542438790937903449260688071080288807
cipher = 21415417452862385822209271012817793150895390435770476978046862065625337202884791511920664986408841095900654738070236727797129762672224612357265828740478833337740444073740756067502998861070838401401680603766287937704416650587657439702450162322209198221779685030352473193878589584731120025567610438855058432121554123472417518418523366569057525298543223124010040598652144578966584834782274187095435034339994426555536056645750433383503505768388846624008773128977330539145239402105552071835989196181032937384073405971339677972093187563528876255816400751585513195407386698978227320390696973150369705374547095189643212682512
"""
for i in range(691829):
if pow(g, i, p)==A:
print 'true_a:', i #352497
break
print 'iter:', i
for i in range(691829):
if pow(g, i, p)==B:
print 'true_b:', i #3672
break
print 'iter:', i
"""
a = 352497
b = 3672
A = pow(g, a, p)
B = pow(g, b, p)
s = pow(A, b, p)
s = pow(B, a, p)
k_inv = gmpy2.invert(s, p)
pt = (k_inv*cipher)%p
print hex(pt)[2:].decode('hex')
|
[
"a0987856762@gmail.com"
] |
a0987856762@gmail.com
|
9eae0a8d2e1e53dcc804f3cf5e9c20a9c9178eb4
|
2c2cfd8cf631bb2026b085a7491a1f9208d21b22
|
/Binary Search/Implementation.py
|
626f85aed2dae027a935504889097f8162fb570b
|
[] |
no_license
|
Dragonriser/DSA_Practice
|
58ec3dc9744d6cac8290eaa5df21674dabc1ca15
|
785d519820a729def5a0be3e574001e32badca71
|
refs/heads/master
| 2023-02-25T14:33:40.766968
| 2021-01-22T17:00:12
| 2021-01-22T17:00:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 626
|
py
|
#Basic implementation of the Binary Search algorithm. Finding rank / position of a number in a given sorted (ascending / descending) array.
def findRank(x, arr):
low = 0
high = len(arr)
while low <= high:
mid = (low + high) // 2
if arr[mid] == x:
return (mid + 1)
elif arr[mid] < x:
low = mid + 1
else:
high = mid - 1
return 0
n = int(input())
arr = list(map(int, input().split(" ")))
if arr[0] > arr[1]:
arr.sort() #sort to ascending when in descending order
v = int(input())
for _ in range(v):
print(findRank(int(input()), arr))
|
[
"noreply@github.com"
] |
Dragonriser.noreply@github.com
|
478ae66c65d2cff2220c211635de5552950932aa
|
6f69eee0892b761b666fdd20bab4ca7adec7d01f
|
/google_foobar/Lib/site-packages/poetry/core/semver/__init__.py
|
2cff22d6e34d3dec6026fac5578a54dd13132c6b
|
[] |
no_license
|
amane-toda/google_foobar
|
de7de5623f3ec1d45477b4971515a4b910dac043
|
6fad67620f49125539784740fb646aed412c7dea
|
refs/heads/main
| 2023-08-25T14:00:43.827537
| 2021-10-31T15:36:37
| 2021-10-31T15:36:37
| 414,150,017
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,422
|
py
|
import re
from typing import Union
from .empty_constraint import EmptyConstraint
from .exceptions import ParseConstraintError
from .patterns import BASIC_CONSTRAINT
from .patterns import CARET_CONSTRAINT
from .patterns import TILDE_CONSTRAINT
from .patterns import TILDE_PEP440_CONSTRAINT
from .patterns import X_CONSTRAINT
from .version import Version
from .version_constraint import VersionConstraint
from .version_range import VersionRange
from .version_union import VersionUnion
VersionTypes = Union[Version, VersionRange, VersionUnion, EmptyConstraint]
def parse_constraint(constraints): # type: (str) -> VersionTypes
if constraints == "*":
return VersionRange()
or_constraints = re.split(r"\s*\|\|?\s*", constraints.strip())
or_groups = []
for constraints in or_constraints:
and_constraints = re.split(
"(?<!^)(?<![~=>< ,]) *(?<!-)[, ](?!-) *(?!,|$)", constraints
)
constraint_objects = []
if len(and_constraints) > 1:
for constraint in and_constraints:
constraint_objects.append(parse_single_constraint(constraint))
else:
constraint_objects.append(parse_single_constraint(and_constraints[0]))
if len(constraint_objects) == 1:
constraint = constraint_objects[0]
else:
constraint = constraint_objects[0]
for next_constraint in constraint_objects[1:]:
constraint = constraint.intersect(next_constraint)
or_groups.append(constraint)
if len(or_groups) == 1:
return or_groups[0]
else:
return VersionUnion.of(*or_groups)
def parse_single_constraint(constraint): # type: (str) -> VersionTypes
m = re.match(r"(?i)^v?[xX*](\.[xX*])*$", constraint)
if m:
return VersionRange()
# Tilde range
m = TILDE_CONSTRAINT.match(constraint)
if m:
version = Version.parse(m.group(1))
high = version.stable.next_minor
if len(m.group(1).split(".")) == 1:
high = version.stable.next_major
return VersionRange(version, high, include_min=True)
# PEP 440 Tilde range (~=)
m = TILDE_PEP440_CONSTRAINT.match(constraint)
if m:
precision = 1
if m.group(3):
precision += 1
if m.group(4):
precision += 1
version = Version.parse(m.group(1))
if precision == 2:
high = version.stable.next_major
else:
high = version.stable.next_minor
return VersionRange(version, high, include_min=True)
# Caret range
m = CARET_CONSTRAINT.match(constraint)
if m:
version = Version.parse(m.group(1))
return VersionRange(version, version.next_breaking, include_min=True)
# X Range
m = X_CONSTRAINT.match(constraint)
if m:
op = m.group(1)
major = int(m.group(2))
minor = m.group(3)
if minor is not None:
version = Version(major, int(minor), 0)
result = VersionRange(version, version.next_minor, include_min=True)
else:
if major == 0:
result = VersionRange(max=Version(1, 0, 0))
else:
version = Version(major, 0, 0)
result = VersionRange(version, version.next_major, include_min=True)
if op == "!=":
result = VersionRange().difference(result)
return result
# Basic comparator
m = BASIC_CONSTRAINT.match(constraint)
if m:
op = m.group(1)
version = m.group(2)
if version == "dev":
version = "0.0-dev"
try:
version = Version.parse(version)
except ValueError:
raise ValueError(
"Could not parse version constraint: {}".format(constraint)
)
if op == "<":
return VersionRange(max=version)
elif op == "<=":
return VersionRange(max=version, include_max=True)
elif op == ">":
return VersionRange(min=version)
elif op == ">=":
return VersionRange(min=version, include_min=True)
elif op == "!=":
return VersionUnion(VersionRange(max=version), VersionRange(min=version))
else:
return version
raise ParseConstraintError(
"Could not parse version constraint: {}".format(constraint)
)
|
[
"toda.amane@gmail.com"
] |
toda.amane@gmail.com
|
c66bb61f9ca6339e9e882273034b25d7c9057ec9
|
4dc0e2947f23873a24e6c14ff2cc830be5eced34
|
/Interpreters/F1WAE/JITRPparser.py
|
93031b73cdc52e71d9cc42cc50c098f1ecbcde18
|
[] |
no_license
|
leodh/RPython-internship
|
ea2aededfd0291225b454d329e7be1d6c53fe46a
|
7a797bb938429f9bf91c8286c3dd82af2e2af73a
|
refs/heads/master
| 2021-01-21T10:13:11.345695
| 2012-09-03T23:38:18
| 2012-09-03T23:38:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,268
|
py
|
import RPtreeClass
# Build associativity of braces
# To optmisize: calculate size of stack and dic beforehand
def BuildAssociativity(fileToUse):
""" Build a associative table of braces. """
bracketMap = {}
leftstack = []
pc = 0
for char in fileToUse:
if char == '(' or char == '{':
leftstack.append(pc)
elif char == ')':
left = leftstack.pop()
if fileToUse[left] != '(':
raise ValueError("Should be } at " + str(pc))
right = pc
bracketMap[left] = right
bracketMap[right] = left
elif char == '}':
left = leftstack.pop()
if fileToUse[left] != '{':
raise ValueError("Should be ) at "+str(pc))
right = pc
bracketMap[left] = right
bracketMap[right] = left
else:
pass
pc += 1
return bracketMap
# Spliting the original code into blocks
def belong(n,s,e):
return n<e and n>=s
def FilterDic(dictry, start, end):
"""Return a new dictionnary containning only pairing of indices between start and (end-1) inclued."""
newDic ={}
for i,k in dictry.items():
if belong(i,start,end) and belong(k,start,end):
newDic[i-start]=k-start
elif belong(i,start,end) and not belong(k,start,end):
raise ValueError("Not a valid bracket matching")
else:
pass
return newDic
def CutWord(string,i):
"""Find the first blank character following string[i] in string. If there's none, return the length of the list."""
if string[i] in (' ','\n','\t'):
return i
else:
try:
return CutWord(string,i+1)
except IndexError:
return i+1
def SplittingCode(fileToUse, bracketMap):
""" Splits the code into meaningful blocks. """
pc = 0
length = len(fileToUse)
blocks = []
while pc<length:
ch = fileToUse[pc]
if (ch in (' ', '\t', '\n')):
pass
elif ch =='(' or ch=='{':
matchingBracket = bracketMap[pc] + 1
assert(matchingBracket >=0 )
block = fileToUse[pc:matchingBracket]
newDic = FilterDic(bracketMap, pc, matchingBracket)
blocks.append((block, newDic))
pc = matchingBracket
else:
end = CutWord(fileToUse,pc)
assert(end >= 0)
blocks.append((fileToUse[pc:end], {}))
pc = end
pc += 1
return blocks
# Parsing function
def ParseFunc((block, dic)):
""" Given a block defining a function, return the correspondant representation. There are only simple spaces. """
n = dic[0]
if not (block[0] == '{' and block[n] == '}'):
raise ValueError("Not a function block :\n"+ block)
else:
assert(n>=0)
workingBlock = block[1:n]
dic2 = FilterDic(dic,1,n)
subBlocks = SplittingCode(workingBlock, dic2)
#
if len(subBlocks) != 2:
raise ValueError("Only two sub-blocks expected in block :\n"+block)
else:
declaration, dd = subBlocks[0]
#
if len(dd.values()) != 2 :
raise ValueError("No sub-blocks expected inside of :\n" + declaration)
#
end = dd[0]
assert(end>=0)
declareList = declaration[1:end].split(" ")
if len(declareList) != 2:
raise ValueError("Wrong declaration: \n" + declaration + "\nExpected form: <id> <id>")
name = declareList[0]
argName = declareList[1]
bodyTree = ParseF1WAE(subBlocks[1])
return name, RPtreeClass.Func(name,argName,bodyTree)
##Block of functions to define an identifier or a number
# Can't use .isalpha() ;-(
alphaOrUnd = ('a','z','e','r','t','y','u','i','o','p','q','s','d','f','g','h','j','k','l','m','w','x','c','v','b','n','_')
digit = ('0','1','2','3','4','5','6','7','8','9')
def isAlphaOrUndChar(c):
""" Check if the forst character belongs to alphaOrUnd. """
try:
return c[0] in alphaOrUnd
except IndexError:
return False
def isAlphaNumOrUnd(c):
""" Check if every character is either in alphaOrUnd or in digit. """
length =len(c)
pc = 0
answer = True
while answer and pc < length:
answer = answer and (c[pc] in alphaOrUnd or c[pc] in digit)
pc +=1
return answer and length >0
def IsIdentifier(word):
"""True if word is a correct identifier."""
return (isAlphaOrUndChar(word) and isAlphaNumOrUnd(word))
def IsNumber(c):
""" True iff the string is only made of digits and non-empty."""
length =len(c)
pc = 0
answer = True
while answer and pc < length:
answer = answer and c[pc] in digit
pc +=1
return answer and length>0
##
## Replace str.partition(' ') unavailable with TTC
def partitionSpace(word):
""" Same as string method partition(' ') """
length = len(word)
pc = 0
head = tail = ''
while pc < length:
if word[pc]==' ':
assert(pc>=0)
head = word[0:pc]
tail = word[pc+1:length]
break
else:
pc += 1
return head, tail
##Replace str.qplit() available, but we need the number of spaces deleted at the beging of the word
def StripSpace(word):
""" Same as str.split(' ') but also return the number of spaces deleted at the begining. """
beg = 0
end = len(word)
count = 0
while beg < end:
if word[beg] == ' ':
count += 1
beg += 1
else:
break
while end > beg:
end -= 1
if word[end] != ' ':
break
if beg == end == len(word):
return '', len(word)
else:
end += 1
assert(end>=0)
return word[beg: end], count
##
def ParseF1WAE((block, dic)):
"""Parses <F1WAE>. Only simple spaces."""
if block[0] == '{':
raise ValueError("Function declaration is not allowed in <F1WAE> :\n" + block)
#
elif block[0] == '(':
lastPos = dic[0]
assert(lastPos >= 0)
blockContent, count = StripSpace(block[1:lastPos])
# First word in blockContent allows to identify the case
head, tail = partitionSpace(blockContent)
#
if head == 'with':
bodyWith = SplittingCode(tail, FilterDic(dic,len(head+' ')+1+count,dic[0]))
if len(bodyWith) != 2:
raise ValueError("Two expressions expected following keyword 'with':\n" + block)
else:
falseApp = ParseF1WAE(bodyWith[0]) #Same syntax as an App
if not(isinstance(falseApp, RPtreeClass.App)):
raise ValueError("Wrong assignement in with block:\n" + block)
else:
return RPtreeClass.With(falseApp.funName, falseApp.arg, ParseF1WAE(bodyWith[1]))
#
elif head[0] in ('+','-','*','/','%'): # Treats the case were space is forgotten after operator
bodyOp = SplittingCode((head[1:len(head)] + tail),FilterDic(dic,len(head+' ')+1+count,dic[0]))
if len(bodyOp) != 2:
raise ValueError("Two expressions expected following operator :\n" + block)
else:
return RPtreeClass.Op(head[0], ParseF1WAE(bodyOp[0]), ParseF1WAE(bodyOp[1]))
#
else: # An App or a parenthesized Num or Id
bodyApp = SplittingCode(tail, FilterDic(dic,len(head+' ')+1+count,dic[0]))
lenght = len(bodyApp)
if lenght == 0: # Parenthesized Num or Id
return ParseF1WAE((head, FilterDic(dic,1,dic[0])))
elif lenght == 1: #An App
return RPtreeClass.App(head, ParseF1WAE(bodyApp[0]))
#
else:
#
if IsIdentifier(block):
return RPtreeClass.Id(block)
elif IsNumber(block):
return RPtreeClass.Num(int(block))
else:
raise ValueError("Syntax Error in identifier :\n" + block)
def Parse(myFile):
""" The global parsing function. """
myFile = (myFile.replace('\n',' ')).replace('\t',' ')
# There are only simple spaces. Makes it easier to deal with.
bracketMap = BuildAssociativity(myFile)
codeInPieces = SplittingCode(myFile, bracketMap)
#
funcToDefine = []
prog = []
#
for couple in codeInPieces:
s,d = couple
if s[0] == '{':
funcToDefine.append((s,d))
else:
prog.append((s,d))
#
try: # Check that BNF is respected
prog[1]
raise ValueError("Only one <Prog> is allowed.")
except IndexError:
pass
#
# Create the function dictionnary
funcDict = {}
for funcDef in funcToDefine:
name, descr = ParseFunc(funcDef)
try:
uselessVar = funcDict[name]
raise ValueError("Function "+name+" already defined.")
except KeyError:
funcDict[name] = descr
#
# Create AST of main program
ast = ParseF1WAE(prog[0])
return ast, funcDict
|
[
"leonard.de.haro@ens.fr"
] |
leonard.de.haro@ens.fr
|
f2e1d4fca648a683465ab786834d9b24a18d50d0
|
b5bc72861644c274b75e42374201ea8cdb84c1a2
|
/class_examples/class_destryo.py
|
dce888b556029231c26bd08cb702aa1c7650dbb8
|
[] |
no_license
|
Aadhya-Solution/PythonExample
|
737c3ddc9ad5e3d0cde24ac9f366ce2de2fa6cfe
|
34bc04570182130ebc13b6c99997c81834ad5f53
|
refs/heads/master
| 2022-12-18T09:54:30.857011
| 2020-08-24T13:53:59
| 2020-08-24T13:53:59
| 288,183,879
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
#!/usr/bin/python
class Point:
def __init__( self, x=0, y=0):
self.x = x
self.y = y
def __del__(self):
class_name = self.__class__.__name__
print class_name, "destroyed"
def __str__(self):
return "This is end"
pt1 = Point()
pt2 = pt1
pt3 = pt1
print id(pt1), id(pt2), id(pt3) # prints the ids of the obejcts
del pt1
print "===",pt2
print "====",pt3
|
[
"shashikant.pattar@gmail.com"
] |
shashikant.pattar@gmail.com
|
a2132f2c3e2ef9a784cab0051fff83ebff6eaff0
|
46f4b4e720a83e1fbb81f8b218af4e0b92548ae3
|
/jsontest.py
|
c26558a4d7410e93c731e0cc5bc8de2da6f71880
|
[] |
no_license
|
Dhanushroyal123/filehandling
|
14062d610a6a3c561eefb753cd14af0227a0add4
|
ab748ad6ecaa8e562625815e96c6882586114c48
|
refs/heads/master
| 2023-06-03T01:13:52.503502
| 2021-06-23T03:54:00
| 2021-06-23T03:54:00
| 379,472,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 232
|
py
|
# loads json file in to the dictionary
import json
with open('data.json') as f:
data = json.load(f)
for i in data.keys():
for j in range(len(data[i])):
print(data[i][j])
for i in data['emp_details']:
print(i)
|
[
"dhanushroyal516@gmail"
] |
dhanushroyal516@gmail
|
1bb87fb09710dcb031228250e4df9f655c33959e
|
7f8ab8ed76218fb55a2d6cf34a83e7e6486ab1dc
|
/test_gmail.py
|
0229dd74dae104d84ad1b9896f44ce6028a3738c
|
[] |
no_license
|
alabavery/e_dist_3
|
02fbeb6e459c3bc74dfba1e3fe9ea71469ca852d
|
2ee0c081a052a0f350138ad5a93300ee8cd45e1b
|
refs/heads/master
| 2021-05-11T11:19:13.939744
| 2018-01-29T03:48:09
| 2018-01-29T03:48:09
| 117,634,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 912
|
py
|
import unittest
#python -m unittest -v test_module
class Gmail__get_unread_email_ids(unittest.TestCase):
def setUp(self):
# mock gmail_client.users().messages().list(userId='me',q='is:unread').execute()
pass
class Gmail__get_all_unread(unittest.TestCase):
def test_reply_to_our_email(self):
pass
def test_sender_replies_to_self(self):
pass
def test_third_party_replies(self):
pass
# this is situation where sender forwards to or cc's 3rd party and they
# send response at some point. Not even sure what behavior should be here.
class Gmail__mark_as_unread(unittest.TestCase):
pass
class Gmail__get_sender(unittest.TestCase):
def test_reply_to_our_email(self):
pass
def test_sender_replies_to_self(self):
pass
class Gmail__get_text(unittest.TestCase):
pass
class Gmail__get_attachment(unittest.TestCase):
pass
class Gmail__send_email(unittest.TestCase):
pass
|
[
"averyab0@sewanee.edu"
] |
averyab0@sewanee.edu
|
588127ec34fa8b08aba7a0862e5a6a19b642d2d0
|
87086df54494cdd42a686413639892ebf4978a3e
|
/MIT_Vadim Smolyakov_Algorithmic Trading/pairs_trading.py
|
1f498af00486976f99d0ff4e83cb1c9174b16312
|
[
"MIT"
] |
permissive
|
bopopescu/Machine_Learning_Collections
|
acbe70d55a2551f98d7a86132608d432c957c369
|
e77d81754e77c7acdfec86ec395892400657482e
|
refs/heads/master
| 2022-04-11T01:39:09.848778
| 2019-09-15T15:23:31
| 2019-09-15T15:23:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,150
|
py
|
import numpy as np
import statsmodels.api as sm
import pandas as pd
from zipline.utils import tradingcalendar
import pytz
def initialize(context):
# Quantopian backtester specific variables
set_symbol_lookup_date('2014-01-01')
set_slippage(slippage.VolumeShareSlippage(volume_limit=0.025, price_impact=0.1))
set_commission(commission.PerShare(cost=0.0075, min_trade_cost=1))
context.stock_pairs = [(symbol('ABGB'), symbol('FSLR')),
(symbol('CSUN'), symbol('ASTI')),
(symbol('KO'), symbol('PEP')),
(symbol('AAPL'), symbol('IBM')),
(symbol('FB'), symbol('YHOO')),
(symbol('TWTR'), symbol('YHOO'))]
context.stocks = [symbol('ABGB'), symbol('FSLR'), symbol('CSUN'), symbol('ASTI'),\
symbol('KO'), symbol('PEP'), symbol('AAPL'), symbol('IBM'), symbol('FB'),\
symbol('YHOO'),symbol('TWTR')]
context.num_pairs = len(context.stock_pairs)
# strategy specific variables
context.lookback = 20 # used for regression
context.z_window = 20 # used for zscore calculation, must be <= lookback
context.spread = np.ndarray((context.num_pairs, 0))
# context.hedgeRatioTS = np.ndarray((context.num_pairs, 0))
context.inLong = [False] * context.num_pairs
context.inShort = [False] * context.num_pairs
schedule_function(func=check_pair_status, date_rule=date_rules.every_day(),\
time_rule=time_rules.market_close(minutes=90))
def check_pair_status(context, data):
if get_open_orders():
return
prices = data.history(context.stocks, fields='price', bar_count=35, frequency='1d').\
iloc[-context.lookback::]
new_spreads = np.ndarray((context.num_pairs, 1))
for i in range(context.num_pairs):
(stock_y, stock_x) = context.stock_pairs[i]
Y = prices[stock_y]
X = prices[stock_x]
try:
hedge = hedge_ratio(Y, X, add_const=True)
record(hedge=hedge)
except ValueError as e:
log.debug(e)
return
# context.hedgeRatioTS = np.append(context.hedgeRatioTS, hedge)
new_spreads[i, :] = Y[-1] - hedge * X[-1]
if context.spread.shape[1] > context.z_window:
# Keep only the z-score lookback period
spreads = context.spread[i, -context.z_window:]
zscore = (spreads[-1] - spreads.mean()) / spreads.std()
record(zscore=zscore)
if context.inShort[i] and zscore < 0.0:
order_target(stock_y, 0)
order_target(stock_x, 0)
context.inShort[i] = False
context.inLong[i] = False
record(X_pct=0, Y_pct=0)
return
if context.inLong[i] and zscore > 0.0:
order_target(stock_y, 0)
order_target(stock_x, 0)
context.inShort[i] = False
context.inLong[i] = False
record(X_pct=0, Y_pct=0)
return
if zscore < -1.0 and (not context.inLong[i]):
# Only trade if NOT already in a trade
y_target_shares = 1 #long y
X_target_shares = -hedge #short x
context.inLong[i] = True
context.inShort[i] = False
(y_target_pct, x_target_pct) = computeHoldingsPct( y_target_shares,X_target_shares, Y[-1], X[-1] )
order_target_percent( stock_y, y_target_pct * (1.0/context.num_pairs) )
order_target_percent( stock_x, x_target_pct * (1.0/context.num_pairs) )
record(Y_pct=y_target_pct, X_pct=x_target_pct)
return
if zscore > 1.0 and (not context.inShort[i]):
# Only trade if NOT already in a trade
y_target_shares = -1 #short y
X_target_shares = hedge #long x
context.inShort[i] = True
context.inLong[i] = False
(y_target_pct, x_target_pct) = computeHoldingsPct( y_target_shares, X_target_shares, Y[-1], X[-1] )
order_target_percent( stock_y, y_target_pct * (1.0/context.num_pairs))
order_target_percent( stock_x, x_target_pct * (1.0/context.num_pairs))
record(Y_pct=y_target_pct, X_pct=x_target_pct)
context.spread = np.hstack([context.spread, new_spreads])
def hedge_ratio(Y, X, add_const=True):
if add_const:
X = sm.add_constant(X)
model = sm.OLS(Y, X).fit()
return model.params[1]
model = sm.OLS(Y, X).fit()
return model.params.values
def computeHoldingsPct(yShares, xShares, yPrice, xPrice):
yDol = yShares * yPrice
xDol = xShares * xPrice
notionalDol = abs(yDol) + abs(xDol)
y_target_pct = yDol / notionalDol
x_target_pct = xDol / notionalDol
return (y_target_pct, x_target_pct)
def handle_data(context, data):
pass
|
[
"pmiori@gmail.com"
] |
pmiori@gmail.com
|
73d1365d893dea2f34cb4da1462faccc339e49be
|
01e34a5ec95d4d09f092fda2d8686797edb48d57
|
/CAUSALMTA/re_weighting/base_model.py
|
970766df0fcb5d421aead19d448732bcd2ed743c
|
[] |
no_license
|
lvchakele/CAUSALMTA
|
58d08586f3e9527b1eb523c3664a66f493a9bb6e
|
7880da748bc08993b1945c221bea5b39067959b7
|
refs/heads/main
| 2023-08-29T05:58:21.490999
| 2021-11-07T10:08:43
| 2021-11-07T10:08:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,823
|
py
|
import torch
import torch.nn as nn
class LSTM_Encoder(nn.Module):
def __init__(
self,
input_dim,
hidden_dim,
LSTM_hidden_layer_depth = 1,
dropout_rate = 0.2
):
super(LSTM_Encoder, self).__init__()
self.model = nn.LSTM(
input_size = input_dim,
hidden_size = hidden_dim,
num_layers = LSTM_hidden_layer_depth,
batch_first = True,
dropout = dropout_rate
)
def forward(self, x):
_, (h_end, c_end) = self.model(x)
# h_end = h_end[:, -1, :]
return h_end[-1]
class LSTM_Decoder(nn.Module):
def __init__(
self,
input_dim,
hidden_dim,
batch_size,
LSTM_hidden_layer_depth = 1,
dropout_rate = 0.2,
device = "cuda:0"
):
super(LSTM_Decoder, self).__init__()
self.batch_size = batch_size
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.LSTM_hidden_layer_depth = LSTM_hidden_layer_depth
self.device = device
self.model = nn.LSTM(
input_size = input_dim,
hidden_size = hidden_dim,
num_layers = LSTM_hidden_layer_depth,
batch_first = True,
dropout = dropout_rate
)
def forward(self, h_state, seq_len):
decoder_inputs = torch.zeros(h_state.shape[0], seq_len, self.input_dim).to(self.device)
c_0 = torch.zeros(self.LSTM_hidden_layer_depth, h_state.shape[0], self.hidden_dim).to(self.device)
h_0 = h_state.repeat(self.LSTM_hidden_layer_depth, 1, 1)
# print(decoder_inputs.shape)
# print(c_0.shape)
# print(h_0.shape)
decoder_output, _ = self.model(decoder_inputs, (h_0, c_0))
return decoder_output
|
[
"noreply@github.com"
] |
lvchakele.noreply@github.com
|
fbf4923b56f955382583ca609f34c7a4888907e0
|
041d5ffe538bd2ecc29defa58226287ebac487dd
|
/listOverlap.py
|
f65ccc5904a35a94eea5920b1c062398a24dad0e
|
[] |
no_license
|
RobertBedrosian/pythonFiles
|
a2a34b748e5e8f95ff229bc6fd64fdcd79132651
|
1f68c90c137cf9ad4b48a6c296d5ebc4a247a424
|
refs/heads/master
| 2021-05-06T18:24:34.190848
| 2017-11-25T07:25:57
| 2017-11-25T07:25:57
| 111,913,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 425
|
py
|
import random
def makelists(random_int):
numList=[]
for count in range(random_int):
numList.append(random.randint(1,100))
return numList
def main():
a=random.randint(1,51)
print(a)
actualA=makelists(a)
print(actualA)
a=random.randint(1,51)
print(a)
actualB=makelists(a)
print(actualB)
intersection=[]
for number in actualA:
if number in actualB:
intersection.append(number)
print(intersection)
main()
|
[
"chrisx32@gmail.com"
] |
chrisx32@gmail.com
|
e3b261b30ed261b305318a7466abb9199f23d58f
|
386e14d7be483a054e003aa9aea7a3506815f17e
|
/venv/bin/easy_install-3.5
|
8c3fe94cf288facd01bc4866da01461b332f008b
|
[] |
no_license
|
freetoyer/djangoshop
|
20fa4734b4ba9818ddd0db79e4c9fc97f502ba5a
|
7d523ea82bba27f208028707d4757aeedf61a118
|
refs/heads/master
| 2020-06-19T06:51:22.512149
| 2019-08-14T18:20:52
| 2019-08-14T18:20:52
| 196,597,795
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 273
|
5
|
#!/home/andrey/django_projects/djangoshop/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"andrey_89-89@mail.ru"
] |
andrey_89-89@mail.ru
|
1f59392af82fe70424df8b1ad20ba8b0d74c9622
|
e8e92bbff0c06a8900e43a51feb820bcb91bedc5
|
/manage.py
|
0e8a43fb0a3e45570ca53ddabb53d50a7a592da1
|
[] |
no_license
|
Jidnahn/products-ms
|
b35a76e831ee24dfabda433e310e4583422ca3ff
|
6a0ea5fb07a92284cf2c22ffef6101f78d235f3b
|
refs/heads/main
| 2023-07-05T04:55:53.403784
| 2021-08-20T18:41:20
| 2021-08-20T18:41:20
| 398,348,925
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 671
|
py
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'productsProject.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
|
[
"78552981+Jidnahn@users.noreply.github.com"
] |
78552981+Jidnahn@users.noreply.github.com
|
9ad89d8be3bee3befeb351601c75c9f1883c03a0
|
e344904487c0d7a5fb2a682a8ac7e3e4d8e63f51
|
/ex4/ex4.py
|
8316d360bcc56bff1f852fe01d7af561b67b60cc
|
[] |
no_license
|
weeshree/ML-AndrewNg
|
e2fee8b8421a84666f2946bd6eda92ee21444a2a
|
b74b1693acc37e9f87e6307204749c1bdbee10fd
|
refs/heads/master
| 2020-03-23T22:36:57.675448
| 2018-08-01T18:08:02
| 2018-08-01T18:08:02
| 142,185,850
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,779
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 21 13:14:53 2018
@author: weesh
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from scipy.io import loadmat
from scipy.optimize import minimize
#bpcount = 0;
def wrap(Theta1, Theta2):
''' Linearizes Theta1, Theta2 into single vector '''
g1 = np.array(Theta1.flatten(), ndmin=2)
g2 = np.array(Theta2.flatten(), ndmin=2)
ar = np.c_[g1, g2].flatten()
return ar;
def unwrap(arr, inputS, hiddenS, K):
''' Reshapes 1-d vector into Theta1 and Theta2 '''
Theta1 = np.reshape(arr[:hiddenS * (inputS + 1)], (hiddenS, inputS + 1))
Theta2 = np.reshape(arr[hiddenS * (inputS + 1):], (K, hiddenS + 1))
return Theta1, Theta2
def sigmoid(z):
''' Sigmoid function '''
return 1/(1+np.exp(-z))
def costFunction(params, inputS, hiddenS, K, X, y, lam):
''' First computes hX using forward prop, then computes cost of logistic regression + regularization '''
Theta1, Theta2 = unwrap(params, inputS, hiddenS, K)
a1, a2, hX, z1, z2 = forwardProp(Theta1, Theta2, X)
m = len(X)
J = np.sum(-y*np.log(hX) - (1-y)*np.log(1-hX))/m
# ar = Theta1[0:,1:]
# print(ar.shape)
J += lam/2/m*np.sum(np.power(Theta1[:,1:], 2)) + lam/2/m*np.sum(np.power(Theta2[:,1:], 2))
return J
def forwardProp(Theta1, Theta2, X):
''' Given Theta vectors and X, computes activations and Z-values of each layer '''
A1 = X
A1 = np.c_[np.ones(A1.shape[0]), A1] # 5000 401
Z2 = A1 @ Theta1.T # 5000 x 25
A2 = sigmoid(Z2)
A2 = np.c_[np.ones(A2.shape[0]), A2] # 5000 x 26
Z3 = A2 @ Theta2.T # 5000 x 10
A3 = sigmoid(Z3) # 5000 x 10
# print(A3)
return A1, A2, A3, Z2, Z3
def sigGrad(z):
''' Computes derivative of sigmoid function w/ respect to z '''
return sigmoid(z)*(1-sigmoid(z))
def randInit(pre, post):
''' Randomly initializes theta '''
eps = np.sqrt(6) / (np.sqrt(pre+post));
theta = np.random.rand(pre, post) * 2 * eps - eps;
return theta;
def backProp(params, inputS, hiddenS, K, X, y, lam):
''' backProp computes gradient vectors, d(costFunction)/d(Theta1/2) '''
# global bpcount;
# print()
# print(bpcount)
# bpcount = bpcount + 1
Theta1, Theta2 = unwrap(params, inputS, hiddenS, K)
m = len(X);
D1 = np.zeros(Theta1.shape)
D2 = np.zeros(Theta2.shape)
J = 0
a1, a2, a3, z2, z3 = forwardProp(Theta1, Theta2, X)
for i in range(m):
a1i = a1[i:i+1,:] # 1 x 401
a2i = a2[i:i+1,:] # 1 x 26
a3i = a3[i:i+1,:] # 1 x 10
z2i = z2[i:i+1,:] # 1 x 25
yi = y[i:i+1, :] # 1 x 10
d3 = a3i - yi; # 1 x 10
d2 = (d3 @ Theta2)[:,1:] * sigGrad(z2i) # 1 x 25
D1 += d2.T @ a1i # 25 x 401
D2 += d3.T @ a2i # 10 x 26
J += -np.sum(yi*np.log(a3i) + (1-yi)*np.log(1-a3i))
J += lam/2 * (np.sum(np.power(Theta1[:,1:], 2)) + np.sum(np.power(Theta2[:,1:], 2)))
D1 += Theta1; D2 += Theta2;
D1[:,0:1] -= Theta1[:, 0:1]; D2[:, 0:1] -= Theta2[:, 0:1];
D1 /= m; D2 /= m; J /= m;
# print(J)
return J, wrap(D1, D2).flatten();
def gradCheck(X, y, Theta1, Theta2, lam):
''' Checks backProp's computation of gradient vectors in a slow approximation '''
t1 = Theta1.flatten(); t2 = Theta2.flatten();
eps = 0.001;
D1 = np.zeros(t1.shape); D2 = np.zeros(t2.shape);
for i in range(len(t1)):
print(i)
t1[i] += eps;
JUp = costFunction( wrap(np.reshape(t1, Theta1.shape) , Theta2), X.shape[1], Theta1.shape[0], Theta2.shape[0], X, y, lam)
t1[i] -= 2*eps;
JDown = costFunction( wrap(np.reshape(t1, Theta1.shape) , Theta2), X.shape[1], Theta1.shape[0], Theta2.shape[0], X, y, lam)
D1[i] = (JUp-JDown)/2/eps;
for i in range(len(t2)):
print("X",i)
t2[i] += eps;
JUp = costFunction( wrap(Theta1, np.reshape(t2, Theta2.shape)) , X.shape[1], Theta1.shape[0], Theta2.shape[0], X, y, lam)
t2[i] -= 2*eps;
JDown = costFunction( wrap(Theta1, np.reshape(t2, Theta2.shape)) , X.shape[1], Theta1.shape[0], Theta2.shape[0], X, y, lam)
D2[i] = (JUp-JDown)/2/eps;
D1 = np.reshape(D1, Theta1.shape); D2 = np.reshape(D2, Theta2.shape);
return D1, D2;
''' Input '''
data = loadmat('ex4data1.mat')
X = np.array(data['X']) # 5000 x 400
y = np.array(data['y'])
y = np.reshape(np.eye(10)[y-1], [5000, 10]) # 5000 x 10
data = loadmat('ex4weights.mat')
Theta1 = np.array(data['Theta1'])
Theta2 = np.array(data['Theta2'])
''' Some initialization '''
inputS = 400; hiddenS= 25; K = 10;
Theta1 = randInit(hiddenS, inputS + 1) # 25 x 401
Theta2 = randInit(K, hiddenS + 1) # 10 x 26
lam = 1.5
#a1, a2, hX, z2, z3 = forwardProp(Theta1, Theta2, X)
''' Run backProp to find gradient vectors and use scipy.minimize to find Theta vectors '''
cos, wT = backProp( wrap(Theta1, Theta2), inputS, hiddenS, K, X, y, lam)
d1, d2 = unwrap(wT, inputS, hiddenS, K);
#dt1, dt2 = gradCheck(X, y, Theta1, Theta2, lam)
#print(d1,dt1)
#print(d2, dt2)
fmin = minimize(fun=backProp, x0=wrap(Theta1, Theta2), args=(inputS, hiddenS, K, X, y, lam),
method='TNC', jac=True, options={'maxiter': 250, 'disp':True})
Theta1, Theta2 = unwrap(fmin.x, inputS, hiddenS, K);
#print(costFunction(wrap(Theta1, Theta2), inputS, hiddenS, K, X, y, lam))
''' Run forwardProp to find optimal prediction, hX, and compute its accuracy '''
a1, a2, hX, z2, z3 = forwardProp(Theta1, Theta2, X)
#print(hX)
def getMaxAr(ar):
return np.array(np.argmax(ar, axis=1).T + 1, ndmin=2);
#print(np.array(np.argmax(hX, axis=1).T + 1, ndmin=2))
#print(y.T)
#acc = np.mean(np.array(np.argmax(hX, axis=1).T + 1, ndmin=2) == y.T)
print(getMaxAr(hX))
print(getMaxAr(y))
acc = np.mean(getMaxAr(hX) == getMaxAr(y))
print(acc)
|
[
"weeshree@gmail.com"
] |
weeshree@gmail.com
|
6b50df7aaef6f9d9ed053aa0588a3097045151c2
|
09510bcf4df1fc89bdcf4e95921b90d2bad55703
|
/obspyTaup.py
|
09bd6532b510e4820c2bd8169da1d0af0ac2f93a
|
[] |
no_license
|
mprocha/scripts
|
075c338d02ae12243aa5bb746e9285498ab77a01
|
5654ddfd86e7578b073c05c91042518819fe44b9
|
refs/heads/master
| 2021-01-22T13:46:54.590555
| 2016-02-08T15:16:55
| 2016-02-08T15:16:55
| 18,016,459
| 2
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 208
|
py
|
#!/usr/bin/python
from obspy.taup.taup import getTravelTimes
tt = getTravelTimes(delta=52.474, depth=611.0, model='ak135')
lentt=len(tt)
for i in range(0,lentt):
print(tt[i])
# print(tt[i]['time'])
|
[
"marcelorocha.unb@gmail.com"
] |
marcelorocha.unb@gmail.com
|
2d444e193746f4b9d9f393a61e3faffc1f20c2c4
|
a433cbe30591a986e8fb7287a251198e48e17a3f
|
/mnist.py
|
f48f2f8da5e8612ba5f98a88c012c08568da8d86
|
[] |
no_license
|
alilliam/multi_gpu_distribute
|
578b63878c6b19e834c3bc6c3f77f27c066d8ad5
|
2d7896b961e870d480c08b6d299f51b48bbe9b9a
|
refs/heads/master
| 2022-11-04T21:23:35.344049
| 2020-07-30T15:04:39
| 2020-07-30T15:04:39
| 283,215,674
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,447
|
py
|
#The file is part of pytorh example of imagenet.
import argparse
import os
import random
import shutil
import time
import warnings
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.distributed as dist
import torch.optim
import torch.multiprocessing as mp
import torch.utils.data
import torch.utils.data.distributed
import torchvision.transforms as transforms
import torchvision.datasets as datasets
import torchvision.models as models
import torch.nn.functional as F
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='PyTorch ImageNet Training')
parser.add_argument('data', metavar='DIR',
help='path to dataset')
parser.add_argument('-a', '--arch', metavar='ARCH', default='resnet18',
choices=model_names,
help='model architecture: ' +
' | '.join(model_names) +
' (default: resnet18)')
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--epochs', default=90, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('-b', '--batch-size', default=256, type=int,
metavar='N',
help='mini-batch size (default: 256), this is the total '
'batch size of all GPUs on the current node when '
'using Data Parallel or Distributed Data Parallel')
parser.add_argument('--lr', '--learning-rate', default=0.1, type=float,
metavar='LR', help='initial learning rate', dest='lr')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--wd', '--weight-decay', default=1e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)',
dest='weight_decay')
parser.add_argument('-p', '--print-freq', default=1, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--pretrained', dest='pretrained', action='store_true',
help='use pre-trained model')
parser.add_argument('--world-size', default=-1, type=int,
help='number of nodes for distributed training')
parser.add_argument('--rank', default=-1, type=int,
help='node rank for distributed training')
parser.add_argument('--dist-url', default='tcp://224.66.41.62:23456', type=str,
help='url used to set up distributed training')
parser.add_argument('--dist-backend', default='nccl', type=str,
help='distributed backend')
parser.add_argument('--seed', default=None, type=int,
help='seed for initializing training. ')
parser.add_argument('--gpu', default=None, type=int,
help='GPU id to use.')
parser.add_argument('--multiprocessing-distributed', action='store_true',
help='Use multi-processing distributed training to launch '
'N processes per node, which has N GPUs. This is the '
'fastest way to use PyTorch for either single node or '
'multi node data parallel training')
NUM_CLASSES = 10
class ConvNet(nn.Module):
def __init__(self, num_classes=NUM_CLASSES):
super(ConvNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 20, kernel_size=3,stride=1,padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2,stride=2),
)
self.classifier = nn.Sequential(
nn.Dropout(p=0.2),
nn.Linear(5120, 20),
nn.ReLU(inplace=True),
nn.Dropout(p=0.2),
nn.Linear(20, num_classes),
nn.Sigmoid(),
)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
def train(epoch, train_loader, optimizer, model, args,criterion):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.cuda(), target.cuda()
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss.backward()
optimizer.step()
if batch_idx % args.print_freq == 0:
print('Train Epoch: {} [{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data),
100. * batch_idx / len(train_loader), loss.item()))
# def test():
# model.eval()
# test_loss = 0.
# test_accuracy = 0.
# for data, target in test_loader:
# if args.cuda:
# data, target = data.cuda(), target.cuda()
# output = model(data)
# # sum up batch loss
# test_loss += F.nll_loss(output, target, size_average=False).item()
# # get the index of the max log-probability
# pred = output.data.max(1, keepdim=True)[1]
# test_accuracy += pred.eq(target.data.view_as(pred)).cpu().float().sum()
# # Horovod: use test_sampler to determine the number of examples in
# # this worker's partition.
# test_loss /= len(test_sampler)
# test_accuracy /= len(test_sampler)
# # Horovod: average metric values across workers.
# test_loss = metric_average(test_loss, 'avg_loss')
# test_accuracy = metric_average(test_accuracy, 'avg_accuracy')
# # Horovod: print output only on first rank.
# if hvd.rank() == 0:
# print('\nTest set: Average loss: {:.4f}, Accuracy: {:.2f}%\n'.format(
# test_loss, 100. * test_accuracy))
def main():
args = parser.parse_args()
if args.seed is not None:
random.seed(args.seed)
torch.manual_seed(args.seed)
cudnn.deterministic = True
warnings.warn('You have chosen to seed training. '
'This will turn on the CUDNN deterministic setting, '
'which can slow down your training considerably! '
'You may see unexpected behavior when restarting '
'from checkpoints.')
if args.gpu is not None:
warnings.warn('You have chosen a specific GPU. This will completely '
'disable data parallelism.')
if args.dist_url == "env://" and args.world_size == -1:
args.world_size = int(os.environ["WORLD_SIZE"])
args.distributed = args.world_size > 1 or args.multiprocessing_distributed
ngpus_per_node = 1#torch.cuda.device_count()
if args.multiprocessing_distributed:
args.world_size = ngpus_per_node * args.world_size
mp.spawn(main_worker, nprocs=ngpus_per_node, args=(ngpus_per_node, args))
else:
main_worker(args.gpu, ngpus_per_node, args)
def main_worker(gpu, ngpus_per_node, args):
args.gpu = gpu
if args.gpu is not None:
print("Use GPU: {} for training".format(args.gpu))
if args.distributed:
if args.dist_url == "env://" and args.rank == -1:
args.rank = int(os.environ["RANK"])
if args.multiprocessing_distributed:
args.rank = args.rank * ngpus_per_node + gpu
dist.init_process_group(backend=args.dist_backend, init_method=args.dist_url,
world_size=args.world_size, rank=args.rank)
#######################################
model = models.__dict__[args.arch]()#ConvNet()
#
##########################################
if args.distributed:
if args.gpu is not None:
torch.cuda.set_device(args.gpu)
model.cuda(args.gpu)
args.batch_size = int(args.batch_size / ngpus_per_node)
args.workers = int((args.workers + ngpus_per_node - 1) / ngpus_per_node)
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu])
else:
model.cuda()
model = torch.nn.parallel.DistributedDataParallel(model)
elif args.gpu is not None:
torch.cuda.set_device(args.gpu)
model = model.cuda(args.gpu)
else:
if args.arch.startswith('net'):
model.features = torch.nn.DataParallel(model.features)
model.cuda()
else:
model = torch.nn.DataParallel(model).cuda()
optimizer = torch.optim.SGD(model.parameters(), args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
criterion = nn.CrossEntropyLoss().cuda(args.gpu)
cudnn.benchmark = True
train_dataset = \
datasets.CIFAR10(args.data, train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010))
]))
if args.distributed:
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset)
else:
train_sampler = None
train_loader = torch.utils.data.DataLoader(
train_dataset, batch_size=args.batch_size, shuffle=(train_sampler is None),
num_workers=args.workers, pin_memory=True, sampler=train_sampler)
for epoch in range(args.start_epoch, args.epochs):
if args.distributed:
train_sampler.set_epoch(epoch)
adjust_learning_rate(optimizer, epoch, args)
train(epoch, train_loader, optimizer, model, args,criterion)
def adjust_learning_rate(optimizer, epoch, args):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.lr * (0.1 ** (epoch // 30))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
if __name__ == '__main__':
main()
|
[
"448635573@qq.com"
] |
448635573@qq.com
|
7bde7a7d800ff5f29bfbaa829a108cb47204af71
|
d1e8f4be036e1868633b24c552a643e6ced39081
|
/tartist/data/rflow/configs.py
|
c7d39d49acfe9806be35438782ddb0aea2857e07
|
[
"MIT"
] |
permissive
|
ml-lab/TensorArtist
|
8c6c806ca34cf25bda787dff46811b531a137bc2
|
cbbe4055657c4c4a648c6924eed4c37cfbbe85a7
|
refs/heads/master
| 2021-01-22T02:13:25.025501
| 2017-05-20T08:07:49
| 2017-05-20T08:07:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,184
|
py
|
# -*- coding:utf8 -*-
# File : configs.py
# Author : Jiayuan Mao
# Email : maojiayuan@gmail.com
# Date : 2/28/17
#
# This file is part of TensorArtist
NS_CTL_PROTOCAL = 'tcp'
NS_CTL_HOST = '*'
NS_CTL_PORT = '43521'
NS_HEARTBEAT_INTERVAL = 3
NS_CLEANUP_WAIT = 10
CTL_CTL_SND_COUNTDOWN = 5
CTL_CTL_HWM = 5
CTL_DAT_SND_COUNTDOWN = 5
CTL_DAT_HWM = 5
CTL_DAT_PROTOCAL = 'tcp'
CTL_DAT_HOST = '*'
class Actions:
NS_REGISTER_CTL_REQ = 'ns-register-ctl-req'
NS_REGISTER_CTL_REP = 'ns-register-ctl-rep'
NS_REGISTER_PIPE_REQ = 'ns-register-pipe-req'
NS_REGISTER_PIPE_REP = 'ns-register-pipe-rep'
NS_HEARTBEAT_REQ = 'ns-heartbeat-req'
NS_HEARTBEAT_REP = 'ns-heartbeat-rep'
NS_QUERY_OPIPE_REQ = 'ns-query-opipe-req'
NS_QUERY_OPIPE_REP = 'ns-query-opipe-rep'
CTL_NOTIFY_OPEN_REQ = 'ctl-notify-open-req'
CTL_NOTIFY_OPEN_REP = 'ctl-notify-open-rep'
CTL_NOTIFY_CLOSE_REQ = 'ctl-notify-close-req'
CTL_NOTIFY_CLOSE_REP = 'ctl-notify-close-rep'
CTL_CONNECT_REQ = 'ctl-connect-req'
CTL_CONNECT_REP = 'ctl-connect-rep'
CTL_CONNECTED_REQ = 'ctl-connected-req'
CTL_CONNECTED_REP = 'ctl-connected-rep'
QUERY_REP_COUNTDOWN = 5
|
[
"maojiayuan@gmail.com"
] |
maojiayuan@gmail.com
|
8366b996ec4f750f41b5485dbf1e3b841d5df8e3
|
7b012d0b6a6d7894f45c9148612e4946ab75d746
|
/src/sentry/snmp_handler.py
|
67648931e78ed0769c5a4af40223bee1556889d4
|
[] |
no_license
|
ksaper/sentry_pdu
|
5fff5a7e26686b402625d4302e9dcf98956e3c5f
|
346555f0600726c2a55aad035648a13f786f7c18
|
refs/heads/master
| 2021-10-15T19:32:24.547829
| 2018-02-08T00:00:45
| 2018-02-08T00:00:45
| 103,969,923
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,049
|
py
|
import os
from cloudshell.shell.core.context_utils import get_attribute_by_name
from cloudshell.snmp.quali_snmp import QualiSnmp
from cloudshell.snmp.snmp_parameters import SNMPV3Parameters, SNMPV2WriteParameters, SNMPV2ReadParameters
from pysnmp.smi.rfc1902 import ObjectType
from log_helper import LogHelper
from data_model import *
class SnmpHandler:
def __init__(self, context):
self.context = context
self.resource = SentryPdu.create_from_context(context)
self.logger = LogHelper.get_logger(context)
self.address = self.context.resource.address
self.community_read = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_read_community) or 'public'
self.community_write = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_write_community) or 'private'
self.password = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_v3_password) or '',
self.user = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_v3_user) or '',
self.version = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_version) or ''
self.private_key = get_attribute_by_name(context=self.context,
attribute_name=self.resource.snmp_v3_private_key)
def get(self, object_identity):
handler = self._get_handler('get')
return handler.get(ObjectType(object_identity))
def set(self, object_identity, value):
handler = self._get_handler('set')
return handler._command(handler.cmd_gen.setCmd, ObjectType(object_identity, value))
def get_raw_handler(self, action):
return self._get_handler(action)
def _get_handler(self, action):
mib_path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'mibs'))
snmp_parameters = self._get_snmp_parameters(action)
handler = QualiSnmp(snmp_parameters, self.logger)
handler.update_mib_sources(mib_path)
handler.load_mib(['Sentry3-MIB'])
return handler
def _get_snmp_parameters(self, action):
if '3' in self.version:
return SNMPV3Parameters(ip=self.address, snmp_user=self.user, snmp_password=self.password, snmp_private_key=self.private_key)
else:
if action.lower() == 'set':
# community = self.community_write
return SNMPV2WriteParameters(ip=self.address, snmp_write_community=self.community_write)
else:
# community = self.community_read
return SNMPV2ReadParameters(ip=self.address, snmp_read_community=self.community_read)
# return SNMPV2Parameters(ip=self.address, snmp_community=community)
|
[
"kimo.s@qualisystems.com"
] |
kimo.s@qualisystems.com
|
31f11bd5af7be8e6f6f866746a0bc3861a325095
|
ee7350af73f18b35b832cef5f2eb5be8beacfe23
|
/src/logger.py
|
2c0b778404a5cce1710699c711d203f1b48dc8ee
|
[] |
no_license
|
alexandr7035/vk-lftable
|
6cce830d965e3be9c4821a15413173fe1b21162c
|
ce15c325e0e5534d2b3eb7ef9ea7b08233353ccd
|
refs/heads/master
| 2023-05-14T16:47:05.428477
| 2022-11-06T15:14:55
| 2022-11-06T15:14:55
| 179,868,806
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 652
|
py
|
import os
import logging
from src.static import log_dir, log_file
# Don't run if can't create logging dir
# Nothing will work without logging
if not os.path.exists(log_dir):
try:
os.mkdir(log_dir)
except Exception:
print("CRITICAL ERROR: can't create log directory '" + log_dir + "'. Exit")
sys.exit()
logging_filename = log_file
logger = logging.getLogger('lftable')
logger.setLevel(logging.DEBUG)
filehandler = logging.FileHandler(filename=logging_filename)
filehandler.setFormatter(logging.Formatter('%(filename)s [LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s'))
logger.addHandler(filehandler)
|
[
"alexandr7035.adm@gmail.com"
] |
alexandr7035.adm@gmail.com
|
6741004fa36fcd3910d7d35422f5b7222fb73074
|
9f8eb9b80fb64fa00e64506cf747c5e0ff3c7b4a
|
/src/model/Data_Sources.py
|
60ba4b396ed7cb4ced38faa9228c6ad65320d9a4
|
[] |
no_license
|
udianand/CreditSuisseDemo
|
12c92a1dfcdcf9f7bcc36b5139abba2cdff85cd0
|
d1dc63b67fe181b8720cbcf08aab732dbf180358
|
refs/heads/master
| 2020-05-07T11:20:33.529586
| 2019-11-09T23:59:13
| 2019-11-09T23:59:13
| 180,457,636
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,970
|
py
|
import csv
import json
import sys
import csv
import re
import os
import string
from textblob import TextBlob
from tweepy import OAuthHandler
from tweepy import API
from tweepy import Cursor
from datetime import datetime, date, time, timedelta
from collections import Counter
from simple_salesforce import Salesforce
class Data_Sources:
def __init__(self):
self.consumer_key= 'sljGvj4bKLY9LPKlHpySGuMW5'
self.consumer_secret= 'MGDfahGilqS0pJBxLOVk5FPAfqluLs51XCzZUWdd8TF2QuQ7WQ'
self.access_token = '2894483640-G5e7wjEW8FoKCmCrq8lWG0OR2E1e07gqympzid9'
self.access_secret= 'D8EMLNXFT3SiaXpKjeWBX5f0cxbOjBdFjqvbek5YR2QxA'
self.auth = OAuthHandler(self.consumer_key, self.consumer_secret)
self.auth.set_access_token(self.access_token, self.access_secret)
self.auth_api = API(self.auth)
self.sf = None
#def open_sf_connection(self):
#self.sf = Salesforce(username='anmolsgandhi@gmail.com', password='Password575757', security_token='R0bSAm9uQT0A3pol6Cl0Q0L9D')
def get_twitter_info(self,usernames):
self.account_list = usernames
self.twitter_obj = {}
if len(self.account_list) > 0:
for self.target in self.account_list:
self.hashtags = []
self.mentions = []
self.tweet_count = 0
self.end_date = datetime.utcnow() - timedelta(days=300)
for self.status in Cursor(self.auth_api.user_timeline, id=self.target).items():
self.tweet_count += 1
if hasattr(self.status, "entities"):
self.entities = self.status.entities
if "hashtags" in self.entities:
for self.ent in self.entities["hashtags"]:
if self.ent is not None:
if "text" in self.ent:
self.hashtag = self.ent["text"]
if self.hashtag is not None:
self.hashtags.append(self.hashtag)
if "user_mentions" in self.entities:
for self.ent in self.entities["user_mentions"]:
if self.ent is not None:
if "screen_name" in self.ent:
self.name = self.ent["screen_name"]
if self.name is not None:
self.mentions.append(self.name)
if self.status.created_at < self.end_date:
break
self.mentions_d = {}
self.hashtags_d = {}
for self.item, self.count in Counter(self.mentions).most_common(10):
self.mentions_d[self.item] = self.count
for self.item, self.count in Counter(self.hashtags).most_common(10):
self.hashtags_d[self.item] = self.count
self.number_of_tweets=10
self.tweets = self.auth_api.user_timeline(screen_name=self.target)
self.sentiment_d ={}
self.tweets_for_csv = [tweet.text for tweet in self.tweets] # CSV file created
for self.j in self.tweets_for_csv:
self.sentence = self.j
self.exclude = set(string.punctuation)
self.sentence = ''.join(ch for ch in self.sentence if ch not in self.exclude or ch == ":" or ch == "#")
self.sentence = re.sub(r"https\S+", "", self.sentence)
self.sentence = re.sub(' +', ' ', self.sentence)
self.blob = TextBlob(self.sentence)
if(self.blob.sentiment.polarity > 0):
self.sentiment = "Positive"
elif(self.blob.sentiment.polarity == 0):
self.sentiment = "Neutral"
else:
self.sentiment = "Negative"
self.sentiment_d[self.sentence] = self.sentiment
for i,j in zip(["sentiment","mentions","hashtags"],[self.sentiment_d,self.mentions_d,self.hashtags_d]):
self.twitter_obj[i] = j
return(self.twitter_obj)
def get_email_info_from_sf(self, to_address):
user_name = "foobar@dummy.com"
pass_word = "thisdemoislit!!!123"
security_token = "kMWcBvuvgYsPCCTi5idSDfSy"
if to_address == None:
to_address = "pierreebayomidyar@gmail.com"
self.sf = Salesforce(username=user_name, password=pass_word, security_token= security_token)
salesforce_query_result = self.sf.query_all("SELECT CreatedDate, Subject, TextBody, ToAddress, FromAddress FROM EmailMessage Where ToAddress = '%s'" %to_address)
salesforce_email_obj = {}
num_of_emails = len(salesforce_query_result["records"])
for i in range(0,num_of_emails):
email_record = salesforce_query_result["records"][i]
email_record_url = email_record["attributes"]["url"]
email_url_list = email_record_url.split('/')
email_record_identification = email_url_list[len(email_url_list)-1]
salesforce_email_obj[email_record_identification] = {}
for names in ["CreatedDate","Subject","TextBody", "ToAddress", "FromAddress"]:
salesforce_email_obj[email_record_identification][names] = email_record[names]
return(salesforce_email_obj)
def create_json(self, csv_file, firstName):
csv_rows = []
with open(csv_file) as csvfile:
reader = csv.DictReader(csvfile)
title = reader.fieldnames
for row in reader:
fName = row['firstName']
fName = fName.replace(" ","")
if fName == firstName:
csv_rows.extend([{title[i]: row[title[i]] for i in range(len(title))}])
return json.dumps(csv_rows, sort_keys=False, indent=4, separators=(',',':'), ensure_ascii = False)
|
[
"uditanand@ymail.com"
] |
uditanand@ymail.com
|
e7b4590b7a59bb17f004f3ac0f95f6b36aca1b32
|
a6e4a6f0a73d24a6ba957277899adbd9b84bd594
|
/sdk/python/pulumi_azure_native/media/v20180330preview/get_job.py
|
7081ab36e5fa9f8008edc6e909cf945868dead86
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
MisinformedDNA/pulumi-azure-native
|
9cbd75306e9c8f92abc25be3f73c113cb93865e9
|
de974fd984f7e98649951dbe80b4fc0603d03356
|
refs/heads/master
| 2023-03-24T22:02:03.842935
| 2021-03-08T21:16:19
| 2021-03-08T21:16:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,491
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetJobResult',
'AwaitableGetJobResult',
'get_job',
]
@pulumi.output_type
class GetJobResult:
"""
A Job resource type. The progress and state can be obtained by polling a Job or subscribing to events using EventGrid.
"""
def __init__(__self__, created=None, description=None, id=None, input=None, last_modified=None, name=None, outputs=None, priority=None, state=None, type=None):
if created and not isinstance(created, str):
raise TypeError("Expected argument 'created' to be a str")
pulumi.set(__self__, "created", created)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if input and not isinstance(input, dict):
raise TypeError("Expected argument 'input' to be a dict")
pulumi.set(__self__, "input", input)
if last_modified and not isinstance(last_modified, str):
raise TypeError("Expected argument 'last_modified' to be a str")
pulumi.set(__self__, "last_modified", last_modified)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if outputs and not isinstance(outputs, list):
raise TypeError("Expected argument 'outputs' to be a list")
pulumi.set(__self__, "outputs", outputs)
if priority and not isinstance(priority, str):
raise TypeError("Expected argument 'priority' to be a str")
pulumi.set(__self__, "priority", priority)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def created(self) -> str:
"""
The UTC date and time when the Job was created, in 'YYYY-MM-DDThh:mm:ssZ' format.
"""
return pulumi.get(self, "created")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
Optional customer supplied description of the Job.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def input(self) -> Any:
"""
The inputs for the Job.
"""
return pulumi.get(self, "input")
@property
@pulumi.getter(name="lastModified")
def last_modified(self) -> str:
"""
The UTC date and time when the Job was last updated, in 'YYYY-MM-DDThh:mm:ssZ' format.
"""
return pulumi.get(self, "last_modified")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def outputs(self) -> Sequence['outputs.JobOutputAssetResponse']:
"""
The outputs for the Job.
"""
return pulumi.get(self, "outputs")
@property
@pulumi.getter
def priority(self) -> Optional[str]:
"""
Priority with which the job should be processed. Higher priority jobs are processed before lower priority jobs. If not set, the default is normal.
"""
return pulumi.get(self, "priority")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the job.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetJobResult(GetJobResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetJobResult(
created=self.created,
description=self.description,
id=self.id,
input=self.input,
last_modified=self.last_modified,
name=self.name,
outputs=self.outputs,
priority=self.priority,
state=self.state,
type=self.type)
def get_job(account_name: Optional[str] = None,
job_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
transform_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetJobResult:
"""
A Job resource type. The progress and state can be obtained by polling a Job or subscribing to events using EventGrid.
:param str account_name: The Media Services account name.
:param str job_name: The Job name.
:param str resource_group_name: The name of the resource group within the Azure subscription.
:param str transform_name: The Transform name.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['jobName'] = job_name
__args__['resourceGroupName'] = resource_group_name
__args__['transformName'] = transform_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:media/v20180330preview:getJob', __args__, opts=opts, typ=GetJobResult).value
return AwaitableGetJobResult(
created=__ret__.created,
description=__ret__.description,
id=__ret__.id,
input=__ret__.input,
last_modified=__ret__.last_modified,
name=__ret__.name,
outputs=__ret__.outputs,
priority=__ret__.priority,
state=__ret__.state,
type=__ret__.type)
|
[
"noreply@github.com"
] |
MisinformedDNA.noreply@github.com
|
086838af8d465cf4bf83f4c8add16eebae10e673
|
03651baa50f617c1915c2e0338a02a0c081bcadf
|
/rbm_generate.py
|
d31a7c7d858b42f1ffc042c0144678b9d5e07915
|
[] |
no_license
|
jxihong/music_generator
|
5a056a563314565c6c66645af76f337b0e073e40
|
37219fa4764ea7a778c72d7145f598548c035b3d
|
refs/heads/master
| 2020-12-30T16:02:49.931953
| 2017-06-10T06:17:42
| 2017-06-10T06:17:42
| 90,955,635
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,533
|
py
|
from dbn import *
from midi_parser import *
import glob
def get_songs(path):
files = glob.glob('{}/*.mid*'.format(path))
songs = []
for f in files:
try:
song = np.array(midiToStatematrix(f))
if np.array(song).shape[0] > 50:
songs.append(song)
except:
# Just ignore songs that can't be parsed
continue
return songs
if __name__=='__main__':
songs = get_songs('Jazz_Music_Midi')
print "{} songs processed".format(len(songs))
### HyperParameters
lowest_note = 24
highest_note = 102
note_range = highest_note-lowest_note
num_timesteps = 5
# Size of input layer
input_len = 2 * note_range * num_timesteps
model = RBM()
X = []
for song in songs:
song = np.array(song)
# Round down to nearest multiple
song = song[:int(np.floor((song.shape[0]/num_timesteps) * num_timesteps))]
# Reshape into blocks of num_timesteps
song = np.reshape(song, [song.shape[0]/num_timesteps, song.shape[1]*num_timesteps])
X.extend(song)
X = np.array(X)
model.fit(X)
gen = model.sample(np.zeros((10, input_len)))
for i in range(gen.shape[0]):
if not any(gen[i, :]):
continue
#Here we reshape the vector to be time x notes, and then save the vector as a midi file
s = np.reshape(gen[i,:], (num_timesteps, 2*note_range))
statematrixToMidi(s, "generated_{}".format(i))
|
[
"jxihong@gmail.com"
] |
jxihong@gmail.com
|
cc70a3e5655aa2cbbfd7065fa7a376a4e758a112
|
33a6696efdaf3f91756c6a4db61c9fc376d55086
|
/main.py
|
2f033dfd9a3eabe4408f6b3251b978037f662a7e
|
[] |
no_license
|
AsadMadaminov/issiqnonbot
|
67844abb70fe8a1c86f38955785f3b64db076ed4
|
86050670287afe5e9131a0eb14c60d8dcb2077ba
|
refs/heads/master
| 2023-03-14T09:16:54.537567
| 2021-03-08T11:41:08
| 2021-03-08T11:41:08
| 344,433,240
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 29,626
|
py
|
import telebot
from telebot import types
import mysql.connector
mydb = mysql.connector.connect(
host="localhost",
user="root",
password="",
database="issiqnonbot"
)
mycursor = mydb.cursor()
# sql = "INSERT INTO test (chat_id) VALUES (%s)"
# sql1 = "INSERT INTO test (chat_id) VALUES (%s)"
# sql2 = "UPDATE test SET name = %s WHERE test.chat_id = %s"
# sql3 = "UPDATE test SET number = %s WHERE test.chat_id = %s"
# sql4 = "UPDATE test SET location = %s WHERE test.chat_id = %s"
# sql5 = "UPDATE test SET set = %s WHERE test.chat_id = %s"
# val = (message.text, message.chat.id)
#
# mycursor.execute(sql, val)
# mydb.commit()
# print(message.text)
name = ''
surname = ''
age = 0
number=0
bot = telebot.TeleBot("1522468171:AAEWg026vU5I3SXMc_OdA8L5HPg_vE72z7Y")
bot.setWebhook()
user_data={}
@bot.message_handler(commands=['start'])
def send_welcome(message):
#
sqldel = "DELETE FROM test WHERE chat_id = %s"
adr = (message.chat.id,)
mycursor.execute(sqldel, adr)
mydb.commit()
#
# print(mycursor.rowcount, "record(s) deleted")
#
sql = "INSERT INTO test (chat_id, name) VALUES (%s, %s)"
val = (message.chat.id, "New")
mycursor.execute(sql, val)
mydb.commit()
# print(message.text)
# keyboard
markup = types.InlineKeyboardMarkup(row_width=2)
item1 = types.InlineKeyboardButton("🇺🇸", callback_data="states")
item2 = types.InlineKeyboardButton("🇷🇺", callback_data="russia")
item3 = types.InlineKeyboardButton("🇺🇿", callback_data="uzb")
markup.add(item1, item2, item3)
bot.send_message(message.chat.id, "🇺🇿\nHush kelibsiz, {0.first_name}!\nMening ismim <b>{1.first_name}</b>. Bizning hizmatimizdan foydalanayotganingizdan mamnunmiz!\n👇🏾Iltimos, tilni tanglang"
"\n\n🇷🇺\nДобро пожаловать, {0.first_name}!\nМоё имя <b>{1.first_name}</b>. Мы рады, что Вы пользуетесь нашим сервисом!\n👇🏾Пожалуйста, выберите язык"
"\n\n🇺🇸\nWelcome, {0.first_name}!\nMy name is <b>{1.first_name}</b>. We are pleasant that You are using our service!\n👇🏾Please, choose the language".format(message.from_user,bot.get_me()),
parse_mode= "html", reply_markup=markup)
@bot.callback_query_handler(func=lambda call:True)
def callback_inline(call):
try:
if call.message:
if call.data == 'states':
bot.send_message(call.message.chat.id, "Please, input your full name")
bot.register_next_step_handler(call.message, echo_usa)
elif call.data == 'russia':
bot.send_message(call.message.chat.id, "Пожалуйста, введите ваше имя и фамилию")
bot.register_next_step_handler(call.message, echo_russia)
elif call.data == 'uzb':
bot.send_message(call.message.chat.id, "Iltimos, ismingiz va familiyangizni yozing")
bot.register_next_step_handler(call.message, echo_uzb)
# print(call.message.chat.id)
# val1 = call.message.chat.id
# mycursor.execute(sql1, val1)
# mydb.commit()
print(call.data)
sql = "UPDATE `test` SET `lang` = %s WHERE `test`.`chat_id` = %s"
val = (call.data, call.message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
except Exception as e:
print(repr(e))
#--------------START Если юзер выбрал США-----------------------
def echo_usa(message):
try:
if ((message.text>='a' and message.text<='z') or (message.text >="A" and message.text<="Z") or (message.text>='а' and message.text<='я') or (message.text>='А' and message.text<='Я')):
print(message.text)
sql = "UPDATE `test` SET `name` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, row_width=1, resize_keyboard=True)
button_phone = types.KeyboardButton(text="Send phone contact", request_contact=True)
keyboard.add((button_phone))
bot.send_message(message.chat.id, " Please, share with Your contact.", reply_markup=keyboard)
bot.register_next_step_handler(message, location_usa)
else:
bot.send_message(message.chat.id,"Please,write you name correctly")
bot.register_next_step_handler(message, echo_usa)
except Exception as e:
print(repr(e))
#bot.reply_to(message, message.text)
def location_usa(message):
print(message.contact)
if (message.contact):
print(message.contact.phone_number)
sql = "UPDATE `test` SET `number` = %s WHERE `test`.`chat_id` = %s"
val = (message.contact.phone_number, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
button_geo = types.KeyboardButton(text="Send location", request_location=True)
keyboard.add(button_geo)
bot.send_message(message.chat.id, "Please, share with Your location.", reply_markup=keyboard)
bot.register_next_step_handler(message, oursets_usa)
else:
print(message.text)
bot.send_message(message.chat.id, "You must to share your contact")
bot.register_next_step_handler(message,location_usa)
def oursets_usa(message):
print(message.location)
if (message.location):
sql = "UPDATE `test` SET `latitude` = %s, `longitude` = %s WHERE `test`.`chat_id` = %s"
val = (message.location.latitude, message.location.longitude, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="Set 1")
set2choose = types.KeyboardButton(text="Set 2")
set3choose = types.KeyboardButton(text="Customer Order")
keyboard.add(set1choose, set2choose, set3choose)
bot.send_message(message.chat.id, "Hot brad for the breakfast every day🤩"
"\nPlease check our options out👇🏾"
"\n\n🔸Set 1:"
"\n Delivery of 2 hot breads every day during 1 month"
"\n Price: 120 000 sum + delivery cost"
"\n\n🔸Set 2:"
"\n Delivery of 4 hot breads every day during 1 month "
"\n Price: 240 000 sum + delivery cost"
"\n\n🔸 Custom order"
"\n Your conditions and our offer "
"\n", reply_markup=keyboard)
else:
print(message.text)
bot.send_message(message.chat.id, "You must to share your location(no need to write)")
bot.register_next_step_handler(message,oursets_usa)
@bot.message_handler(content_types=['text'])
def lalala_usa(message):
print(message)
if message.text=="Set 1":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="CONFIRM")
set2choose = types.KeyboardButton(text="REFRESH")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Set #1\n\n"
"💳 2 hot breads for the breakfast everyday during 1 month \n"
"2000 som * 2 breads * 30 days = 120 000 som \n\n"
"🏎 Delivery cost:\n"
"5000 som (up to 5 breads) * 30 days = 150 000 som\n\n"
"🕒 The delivery will be executed during 30 mins or less\n\n"
"💳 Total cost: 270 000 som\n\n"
"✅ Please confirm your subscription"
"\n", reply_markup=keyboard)
elif message.text=="Set 2":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="CONFIRM")
set2choose = types.KeyboardButton(text="REFRESH")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Set #2\n\n"
"📌 4 hot breads for the breakfast everyday during 1 month\n"
"2000 som * 4 breads * 30 days = 240 000 som\n\n"
"🏎 Delivery cost:\n"
" 5000 som (up to 5 breads) * 30 days = 150 000 som\n\n"
"🕒 The delivery will be executed during 30 mins or less\n\n"
"💳 Total cost: 390 000 som\n\n"
"✅ Please confirm your subscription", reply_markup=keyboard)
elif message.text=="CONFIRM":
sql = "UPDATE `test` SET `CONFIRMATION` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id, "We will contact you as soon as possible")
elif message.text == "REFRESH":
bot.register_next_step_handler(message,send_welcome)
elif message.text =="Customer Order":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id,"Please write down your offers and our Client Service Officer will contact you as soon as possible")
bot.register_next_step_handler(message,ifwrittencustomorder_usa)
# ------------------------------------------------
# -------------------------------------------------
# Если в русском интерфейсе юзер выбирает сеты
# -------------------------------------------------
# ---------------------------------------------
elif message.text=="ПОДТВЕРДИТЬ":
sql = "UPDATE `test` SET `CONFIRMATION` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id, "Наш менеджер свяжеться с вами")
elif message.text == "ЗАНОВО":
bot.register_next_step_handler(message,send_welcome)
elif message.text=="Сет 1":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="ПОДТВЕРДИТЬ")
set2choose = types.KeyboardButton(text="ЗАНОВО")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Сет #1\n\n"
"📌 2 горячие лепёшки на завтрак в течении 1 месяца \n"
"2000 сум * 4 лепёшки * 30 дней = 120 000 сум \n\n"
"🏎 Цена доставки:\n"
"5000 сум (до 5 лепёшек) * 30 дней = 150 000 сум\n\n"
"🕒 Доставка осуществляется в меньше чем за 30 минут\n\n"
"💳 Общая цена: 270 000 сум\n\n"
"✅ Пожалуйста подтвердите заказ",reply_markup=keyboard)
elif message.text=="Сет 2":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="ПОДТВЕРДИТЬ")
set2choose = types.KeyboardButton(text="ЗАНОВО")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Сет #2\n\n"
"📌 4 горячие лепёшки на завтрак в течении 1 месяца \n"
" 2000 сум * 4 лепёшки * 30 дней = 240 000 сум\n\n"
"🏎 Цена доставки:\n"
" 5000 сум (до 5 лепёшек) * 30 дней = 150 000 сум\n\n"
"🕒 Доставка осуществляется в меньше чем за 30 минут\n\n"
"💳 Общая цена: 390 000 сум\n\n"
"✅ Пожалуйста подтвердите заказ",reply_markup=keyboard)
elif message.text=="Особый заказ":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id,
"Пожалуйста, напишите Ваши предложения и наш Менеджер по работе с клиентами")
bot.register_next_step_handler(message, ifwrittencustomorder_ru)
# КОНЕЦ Если в русском интерфейсе юзер выбирает сеты
# Если в узбекском интерфейсе юзер выбирает сеты
elif message.text == "Tasdiqlash":
sql = "UPDATE `test` SET `CONFIRMATION` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id, "Bizning menedjeremiz siz bilan aloqaga chikadi.")
elif message.text == "Qaytadan":
bot.register_next_step_handler(message, send_welcome)
elif message.text == "Set №1":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="Tasdiqlash")
set2choose = types.KeyboardButton(text="Qaytadan")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Set #1\n\n"
"💳 1 oy davomida nonushtaga 2 tadan qaynoq non \n"
"2000 sum * 2 qaynoq non * 30 kun = 120 000 so'm \n\n"
"🏎 Yetkazib berish narxi:\n"
"5000 so'm (5ta nongacha) * 30 kun = 150 000 сум\n\n"
"🕒 Yetkazib berish 30 minut ichida amalga oshirilad\n\n"
"💳 Umumiy Narx: 270 000 сум\n\n"
"✅ Iltimos buyurtmani tasdiqlang",reply_markup=keyboard)
elif message.text == "Set №2":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="Tasdiqlash")
set2choose = types.KeyboardButton(text="Qaytadan")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "🔸 Set #2\n\n"
"💳 1 oy davomida nonushtaga 2 tadan qaynoq non \n"
"2000 sum * 4 qaynoq non * 30 kun = 240 000 so'm \n\n"
"🏎 Yetkazib berish narxi:\n"
"5000 so'm (5ta nongacha) * 30 kun = 150 000 сум\n\n"
"🕒 Yetkazib berish 30 minut ichida amalga oshirilad\n\n"
"💳 Umumiy Narx: 390 000 сум\n\n"
"✅ Iltimos buyurtmani tasdiqlang",reply_markup=keyboard)
elif message.text == "Maxsus buyurtma":
sql = "UPDATE `test` SET `set` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
bot.send_message(message.chat.id,
"Iltimos, O'zingizni shartlaringizni yozing va mijozlar bilan ishlash bo'limining xodimi siz bilan bog'lanadi")
bot.register_next_step_handler(message, ifwrittencustomorder_uz)
def ifwrittencustomorder_usa(message):
sql = "UPDATE `test` SET `CUSTOM` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="CONFIRM")
set2choose = types.KeyboardButton(text="REFRESH")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "✅ Please confirm your subscription"
"\n", reply_markup=keyboard)
def ifwrittencustomorder_ru(message):
sql = "UPDATE `test` SET `CUSTOM` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="ПОДТВЕРДИТЬ")
set2choose = types.KeyboardButton(text="ЗАНОВО")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "✅ Пожалуйста, подтвердите ваш заказ"
"\n", reply_markup=keyboard)
def ifwrittencustomorder_uz(message):
sql = "UPDATE `test` SET `CUSTOM` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="Tasdiqlash")
set2choose = types.KeyboardButton(text="Qaytadan")
keyboard.add(set1choose, set2choose)
bot.send_message(message.chat.id, "✅ Iltimos, buyurtmani tasdiqlang!"
"\n", reply_markup=keyboard)
#КОНЕЦ Если в узбекском интерфейсе юзер выбирает сеты
# --------------FINISH Если юзер выбрал США-----------------------
# --------------START Если юзер выбрал РОССИЮ.-----------------------
@bot.message_handler(func=lambda m: True)
def echo_russia(message):
try:
if ((message.text >= 'a' and message.text <= 'z') or (message.text >= "A" and message.text <= "Z") or (
message.text >= 'а' and message.text <= 'я') or (message.text >= 'А' and message.text <= 'Я')):
print(message.text)
sql = "UPDATE `test` SET `name` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, row_width=1, resize_keyboard=True)
button_phone = types.KeyboardButton(text="Отправить свой номер", request_contact=True)
keyboard.add((button_phone))
bot.send_message(message.chat.id, " Пожалуйста, отправьте свой номер.", reply_markup=keyboard)
bot.register_next_step_handler(message, location_russia)
else:
bot.send_message(message.chat.id, "Пожалуйста, напишите свое имя правильно")
bot.register_next_step_handler(message, echo_usa)
except Exception as e:
print(repr(e))
def location_russia(message):
print(message.contact)
if (message.contact):
print(message.contact.phone_number)
sql = "UPDATE `test` SET `number` = %s WHERE `test`.`chat_id` = %s"
val = (message.contact.phone_number, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
button_geo = types.KeyboardButton(text="Отправить местоположение", request_location=True)
keyboard.add(button_geo)
bot.send_message(message.chat.id, "Пожалуйста, поделитесь своим местоположением.", reply_markup=keyboard)
bot.register_next_step_handler(message, oursets_russia)
else:
print(message.text)
bot.send_message(message.chat.id, "Вы должны переслать свои контакты")
bot.register_next_step_handler(message,location_russia)
def oursets_russia(message):
if (message.location):
print(message.location)
sql = "UPDATE `test` SET `latitude` = %s, `longitude` = %s WHERE `test`.`chat_id` = %s"
val = (message.location.latitude, message.location.longitude, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1chooserus = types.KeyboardButton(text="Сет 1")
set2chooserus= types.KeyboardButton(text="Сет 2")
set3chooserus = types.KeyboardButton(text="Особый заказ")
keyboard.add(set1chooserus, set2chooserus, set3chooserus)
bot.send_message(message.chat.id,"Горячие лепешки на завтрак каждый день🤩"
"\nПожайлуста, выберите опции 👇🏾"
"\n\n🔸Сет 1:"
"\n Доставка 2х горячих лепёшек в течении 1 месяца"
"\n Цена: 120 000 сум + плата за доставку"
"\n\n🔸Сет 2:"
"\n Доставка 4х горячих лепёшек в течении 1 месяца "
"\n Цена: 240 000 сум + плата за доставку"
"\n\n🔸Особый заказ"
"\n Ваши условия и наше предложение "
"\n", reply_markup=keyboard)
else:
print(message.text)
bot.send_message(message.chat.id, "Вы должны указать свое местоположение (писать не нужно)")
bot.register_next_step_handler(message, oursets_usa)
# --------------FINISH Если юзер выбрал РОССИЮ.-----------------------
# --------------START Если юзер выбрал UZBEKISTAN.-----------------------
@bot.message_handler(func=lambda m: True)
def echo_uzb(message):
try:
if ((message.text >= 'a' and message.text <= 'z') or (message.text >= "A" and message.text <= "Z") or (message.text >= 'а' and message.text <= 'я') or (message.text >= 'А' and message.text <= 'Я')):
print(message.text)
sql = "UPDATE `test` SET `name` = %s WHERE `test`.`chat_id` = %s"
val = (message.text, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, row_width=1, resize_keyboard=True)
button_phone = types.KeyboardButton(text="Telefon raqamni jo'natmoq", request_contact=True)
keyboard.add((button_phone))
bot.send_message(message.chat.id, " Iltimos, telefon raqamingiz bilan ulashing", reply_markup=keyboard)
bot.register_next_step_handler(message, location_uzb)
else:
bot.send_message(message.chat.id, "Iltimos, ismingizni to'g'ri yozing")
bot.register_next_step_handler(message, echo_uzb)
except Exception as e:
print(repr(e))
def location_uzb(message):
print(message.contact)
if (message.contact):
print(message.contact.phone_number)
sql = "UPDATE `test` SET `number` = %s WHERE `test`.`chat_id` = %s"
val = (message.contact.phone_number, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, row_width=1, resize_keyboard=True)
button_geo = types.KeyboardButton(text="Manzilni jo'natmoq", request_location=True)
keyboard.add(button_geo)
bot.send_message(message.chat.id, "Iltimos, manzilingiz bilan ulashing", reply_markup=keyboard)
bot.register_next_step_handler(message, oursets_uzb)
else:
print(message.text)
bot.send_message(message.chat.id, "Kontaktlaringizni yo'naltirishingiz kerak")
bot.register_next_step_handler(message, location_uzb)
def oursets_uzb(message):
if (message.location):
print(message.location)
sql = "UPDATE `test` SET `latitude` = %s, `longitude` = %s WHERE `test`.`chat_id` = %s"
val = (message.location.latitude, message.location.longitude, message.chat.id)
mycursor.execute(sql, val)
mydb.commit()
keyboard = types.ReplyKeyboardMarkup(one_time_keyboard=True, resize_keyboard=True)
set1choose = types.KeyboardButton(text="Set №1")
set2choose = types.KeyboardButton(text="Set №2")
set3choose = types.KeyboardButton(text="Maxsus buyurtma")
keyboard.add(set1choose, set2choose, set3choose)
bot.send_message(message.chat.id,"Har kuni nonushtaga qaynoq nonlar🤩"
"\nIltimos, imkoniyatlarni tanlang👇🏾"
"\n\n🔸Set 1:"
"\n 1 oy davomida 2ta qaynoq non yetkazib berish"
"\n Narxi: 120 000 so'm + yetkazib berish narxi"
"\n\n🔸Set 2:"
"\n 1 oy davomida 4ta qaynoq non yetkazib berish "
"\n Narxi: 240 000 so'm + yetkazib berish narxi"
"\n\n🔸Maxsus buyurtma"
"\n Sizning shartlaringiz va bizning taklifimiz"
"\n", reply_markup=keyboard)
else:
print(message.text)
bot.send_message(message.chat.id, "Siz joylashgan joyingizni ko'rsatishingiz kerak (yozishning hojati yo'q)")
bot.register_next_step_handler(message, oursets_uzb)
# --------------FINISH Если юзер выбрал UZBEKISTAN.-----------------------
bot.polling()
|
[
"madaminovasadulloh2002@gmail.com"
] |
madaminovasadulloh2002@gmail.com
|
6f28587f978afcbd3659af670f15dc2764b8de81
|
4c3e3dd2d1976121ebcae159ea772918bc811c16
|
/rnn_model.py
|
68caf5c06b3971946bc0f7b2746567466e3a4032
|
[] |
no_license
|
MaxLost/rnn-character-classification
|
79b778d11f617f541649e9e4229f11a180cde972
|
12752c013f18a19d0e3e565307a1f1015232bc51
|
refs/heads/main
| 2023-09-03T00:56:50.307036
| 2021-11-17T18:17:24
| 2021-11-17T18:17:24
| 420,234,826
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 944
|
py
|
import torch
import torch.nn as nn
class RNN(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(RNN, self).__init__()
self.hidden_size = hidden_size
self.to_hidden_layer = nn.Sequential(
nn.Linear(input_size + hidden_size, hidden_size*2),
nn.ReLU(),
nn.Linear(hidden_size*2, hidden_size)
)
self.to_output = nn.Sequential(
nn.Linear(input_size + hidden_size, hidden_size*2),
nn.ReLU(),
nn.Linear(hidden_size*2, output_size)
)
self.softmax = nn.LogSoftmax(dim=1)
def forward(self, input, hidden):
combined = torch.cat((input, hidden), 1)
hidden = self.to_hidden_layer(combined)
output = self.to_output(combined)
output = self.softmax(output)
return output, hidden
def init_hidden(self):
return torch.zeros(1, self.hidden_size)
|
[
"stepmax14@gmail.com"
] |
stepmax14@gmail.com
|
e44d7dc167483d94d872f514bf14c924d8e4353d
|
bc82eff404b7842336bcb7991b8ba70e3961d624
|
/movie_trailer/main.py
|
ac62b0571eb6cc4b10c8ce062712fef724729979
|
[] |
no_license
|
HassanTC/udacity-nanodegree-projects
|
fcfb4542aa2d7a2fd536b39829eb8269fa63bf11
|
5af157bd2b82eabab9bc3644cc5294a46bad8173
|
refs/heads/master
| 2021-05-04T23:49:06.034275
| 2018-04-14T17:52:39
| 2018-04-14T17:52:39
| 119,426,673
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,740
|
py
|
import movie
import fresh_tomatoes
# Shawshank movie: movie title, poster image and movie trailer
shawshank = movie.Movie(
"Shawshank Redemption",
"https://upload.wikimedia.org/wikipedia/en/8/81/ShawshankRedemptionMoviePoster.jpg", # NOQA
"https://www.youtube.com/watch?v=6hB3S9bIaco")
# forest gump movie: movie title, poster image and movie trailer
forest_gump = movie.Movie(
"Forrest Gump",
"https://upload.wikimedia.org/wikipedia/en/6/67/Forrest_Gump_poster.jpg", # NOQA
"https://www.youtube.com/watch?v=uPIEn0M8su0")
# the theory of Everything movie: movie title, poster image and movie trailer
theory_of_everything = movie.Movie(
"The Theory of Everything",
"https://upload.wikimedia.org/wikipedia/en/b/b8/Theory_of_Everything.jpg",
"https://www.youtube.com/watch?v=Salz7uGp72c")
# Avatar movie: movie title, poster image and movie trailer
avatar = movie.Movie(
"Avatar",
"https://upload.wikimedia.org/wikipedia/en/b/b0/Avatar-Teaser-Poster.jpg",
"https://www.youtube.com/watch?v=5PSNL1qE6VY")
# SAW 2004 movie: movie title, poster image and movie trailer
saw = movie.Movie(
"Saw",
"https://upload.wikimedia.org/wikipedia/en/5/56/Saw_official_poster.jpg",
"https://www.youtube.com/watch?v=S-1QgOMQ-ls")
# the mist movie: movie title, poster image and movie trailer
mist = movie.Movie(
"The Mist",
"https://upload.wikimedia.org/wikipedia/en/a/a1/The_Mist_poster.jpg",
"https://www.youtube.com/watch?v=LhCKXJNGzN8")
# set of movies to create the view list
movies = [
shawshank,
forest_gump,
theory_of_everything,
avatar,
saw,
mist]
# open the browser with the list of movies into HTML view
fresh_tomatoes.open_movies_page(movies)
|
[
"hassan.mahmoud1@outlook.com"
] |
hassan.mahmoud1@outlook.com
|
f4f79fde3c1ab3b41556f659bd750656efb2e315
|
f69b90471efabb97c04c0671cea45de855f222a5
|
/Ethereum/env/bin/easy_install
|
3fb98970e063210594b6e51a7a00629af4050afc
|
[] |
no_license
|
victomteng1997/SENA_project_Ethereum
|
b94324844f9c797a4aaa94790dbdbdfb376ff341
|
382813008a2f95c527ffe72b868340dd384635e7
|
refs/heads/master
| 2021-01-07T20:36:23.004970
| 2020-03-04T08:19:34
| 2020-03-04T08:19:34
| 241,813,331
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 269
|
#!/Users/mark/Documents/lab/flaskintro/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"deng_gelei@i2r.a-star.edu.sg"
] |
deng_gelei@i2r.a-star.edu.sg
|
|
18376ea574df33c2b0c6b1a04c1202daafe49dba
|
ef8f9777be4beff936b64af8df5b79f840d6086a
|
/gym_miniworld/miniworld.py
|
62ca2a5e9a99cf566836e597be6728ecbcbe0179
|
[
"MIT"
] |
permissive
|
benlansdell/gym-miniworld
|
261d97a108b7cdef6982207a4f126b840404b1e2
|
a97a244489f516e5d84f8648196c33a9d372788b
|
refs/heads/master
| 2020-04-05T01:55:03.633823
| 2018-11-06T21:37:34
| 2018-11-06T21:37:34
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 34,710
|
py
|
import math
from enum import IntEnum
import numpy as np
import gym
from gym import spaces
from .random import *
from .opengl import *
from .objmesh import *
from .entity import *
from .math import *
# Blue sky horizon color
BLUE_SKY_COLOR = np.array([0.45, 0.82, 1])
# Map of color names to RGB values
COLORS = {
'red' : np.array([1, 0, 0]),
'green' : np.array([0, 1, 0]),
'blue' : np.array([0, 0, 1]),
'purple': np.array([0.44, 0.15, 0.76]),
'yellow': np.array([1, 1, 0]),
'grey' : np.array([0.39, 0.39, 0.39])
}
# List of color names, sorted alphabetically
COLOR_NAMES = sorted(list(COLORS.keys()))
# Default wall height for room
DEFAULT_WALL_HEIGHT=2.74
# Texture size/density in texels/meter
TEX_DENSITY = 512
def gen_texcs_wall(
tex,
min_x,
min_y,
width,
height
):
"""
Generate texture coordinates for a wall quad
"""
xc = (TEX_DENSITY / tex.width)
yc = (TEX_DENSITY / tex.height)
min_u = (min_x) * xc
max_u = (min_x + width) * xc
min_v = (min_y) * yc
max_v = (min_y + height) * yc
return np.array(
[
[min_u, min_v],
[min_u, max_v],
[max_u, max_v],
[max_u, min_v],
],
dtype=np.float32
)
def gen_texcs_floor(
tex,
poss
):
"""
Generate texture coordinates for the floor or ceiling
This is done by mapping x,z positions directly to texture
coordinates
"""
texc_mul = np.array(
[
TEX_DENSITY / tex.width,
TEX_DENSITY / tex.height
],
dtype=float
)
coords = np.stack([poss[:,0], poss[:,2]], axis=1) * texc_mul
return coords
class Room:
"""
Represent an individual room and its contents
"""
def __init__(
self,
outline,
wall_height=DEFAULT_WALL_HEIGHT,
floor_tex='floor_tiles_bw',
wall_tex='concrete',
ceil_tex='concrete_tiles',
no_ceiling=False
):
# The outlien should have shape Nx2
assert len(outline.shape) == 2
assert outline.shape[1] == 2
assert outline.shape[0] >= 3
# Add a Y coordinate to the outline points
outline = np.insert(outline, 1, 0, axis=1)
# Number of outline vertices / walls
self.num_walls = outline.shape[0]
# List of 2D points forming the outline of the room
# Shape is Nx3
self.outline = outline
# Compute the min and max x, z extents
self.min_x = self.outline[:, 0].min()
self.max_x = self.outline[:, 0].max()
self.min_z = self.outline[:, 2].min()
self.max_z = self.outline[:, 2].max()
# Compute midpoint coordinates
self.mid_x = (self.max_x + self.min_x) / 2
self.mid_z = (self.max_z + self.min_z) / 2
# Compute approximate surface area
self.area = (self.max_x - self.min_x) * (self.max_z - self.min_z)
# Compute room edge directions and normals
# Compute edge vectors (p1 - p0)
# For the first point, p0 is the last
# For the last point, p0 is p_n-1
next_pts = np.concatenate([self.outline[1:], np.expand_dims(self.outline[0], axis=0)], axis=0)
self.edge_dirs = next_pts - self.outline
self.edge_dirs = (self.edge_dirs.T / np.linalg.norm(self.edge_dirs, axis=1)).T
self.edge_norms = -np.cross(self.edge_dirs, Y_VEC)
self.edge_norms = (self.edge_norms.T / np.linalg.norm(self.edge_norms, axis=1)).T
# Height of the room walls
self.wall_height = wall_height
# No ceiling flag
self.no_ceiling = no_ceiling
# Load the textures
self.wall_tex = Texture.get(wall_tex)
self.floor_tex = Texture.get(floor_tex)
self.ceil_tex = Texture.get(ceil_tex)
# Lists of portals, indexed by wall/edge index
self.portals = [[] for i in range(self.num_walls)]
# List of neighbor rooms
# Same length as list of portals
self.neighbors = []
def add_portal(
self,
edge,
start_pos=None,
end_pos=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
min_y=0,
max_y=None
):
"""
Create a new portal/opening in a wall of this room
"""
if max_y == None:
max_y = self.wall_height
assert edge <= self.num_walls
assert max_y > min_y
# Get the edge points, compute the direction vector
e_p0 = self.outline[edge]
e_p1 = self.outline[(edge+1) % self.num_walls]
e_len = np.linalg.norm(e_p1 - e_p0)
e_dir = (e_p1 - e_p0) / e_len
x0, _, z0 = e_p0
x1, _, z1 = e_p1
dx, _, dz = e_dir
# If the portal extents are specified by x coordinates
if min_x != None:
assert min_z == None and max_z == None
assert start_pos == None and end_pos == None
assert x0 != x1
m0 = (min_x - x0) / dx
m1 = (max_x - x0) / dx
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
# If the portal extents are specified by z coordinates
elif min_z != None:
assert min_x == None and max_x == None
assert start_pos == None and end_pos == None
assert z0 != z1
m0 = (min_z - z0) / dz
m1 = (max_z - z0) / dz
if m1 < m0:
m0, m1 = m1, m0
start_pos, end_pos = m0, m1
else:
assert min_x == None and max_x == None
assert min_z == None and max_z == None
assert end_pos > start_pos
assert start_pos >= 0, "portal outside of wall extents"
assert end_pos <= e_len, "portal outside of wall extents"
self.portals[edge].append({
'start_pos': start_pos,
'end_pos': end_pos,
'min_y': min_y,
'max_y': max_y
})
# Sort the portals by start position
self.portals[edge].sort(key=lambda e: e['start_pos'])
return start_pos, end_pos
def point_inside(self, p):
"""
Test if a point is inside the room
"""
# Vector from edge start to test point
ap = p - self.outline
# Compute the dot products of normals to AP vectors
dotNAP = np.sum(self.edge_norms * ap, axis=1)
# The point is inside if all the dot products are greater than zero
return np.all(np.greater(dotNAP, 0))
def _gen_static_data(self):
"""
Generate polygons and static data for this room
Needed for rendering and collision detection
Note: the wall polygons are quads, but the floor and
ceiling can be arbitrary n-gons
"""
# Generate the floor vertices
self.floor_verts = self.outline
self.floor_texcs = gen_texcs_floor(
self.floor_tex,
self.floor_verts
)
# Generate the ceiling vertices
# Flip the ceiling vertex order because of backface culling
self.ceil_verts = np.flip(self.outline, axis=0) + self.wall_height * Y_VEC
self.ceil_texcs = gen_texcs_floor(
self.ceil_tex,
self.ceil_verts
)
self.wall_verts = []
self.wall_norms = []
self.wall_texcs = []
self.wall_segs = []
def gen_seg_poly(
edge_p0,
side_vec,
seg_start,
seg_end,
min_y,
max_y
):
if seg_end == seg_start:
return
if min_y == max_y:
return
s_p0 = edge_p0 + seg_start * side_vec
s_p1 = edge_p0 + seg_end * side_vec
# If this polygon starts at ground level, add a collidable segment
if min_y == 0:
self.wall_segs.append(np.array([s_p1, s_p0]))
# Generate the vertices
# Vertices are listed in counter-clockwise order
self.wall_verts.append(s_p0 + min_y * Y_VEC)
self.wall_verts.append(s_p0 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + max_y * Y_VEC)
self.wall_verts.append(s_p1 + min_y * Y_VEC)
# Compute the normal for the polygon
normal = np.cross(s_p1 - s_p0, Y_VEC)
normal = -normal / np.linalg.norm(normal)
for i in range(4):
self.wall_norms.append(normal)
# Generate the texture coordinates
texcs = gen_texcs_wall(
self.wall_tex,
seg_start,
min_y,
seg_end - seg_start,
max_y - min_y
)
self.wall_texcs.append(texcs)
# For each wall
for wall_idx in range(self.num_walls):
edge_p0 = self.outline[wall_idx, :]
edge_p1 = self.outline[(wall_idx+1) % self.num_walls, :]
wall_width = np.linalg.norm(edge_p1 - edge_p0)
side_vec = (edge_p1 - edge_p0) / wall_width
if len(self.portals[wall_idx]) > 0:
seg_end = self.portals[wall_idx][0]['start_pos']
else:
seg_end = wall_width
# Generate the first polygon (going up to the first portal)
gen_seg_poly(
edge_p0,
side_vec,
0,
seg_end,
0,
self.wall_height
)
# For each portal in this wall
for portal_idx, portal in enumerate(self.portals[wall_idx]):
portal = self.portals[wall_idx][portal_idx]
start_pos = portal['start_pos']
end_pos = portal['end_pos']
min_y = portal['min_y']
max_y = portal['max_y']
# Generate the bottom polygon
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
0,
min_y
)
# Generate the top polygon
gen_seg_poly(
edge_p0,
side_vec,
start_pos,
end_pos,
max_y,
self.wall_height
)
if portal_idx < len(self.portals[wall_idx]) - 1:
next_portal = self.portals[wall_idx][portal_idx+1]
next_portal_start = next_portal['start_pos']
else:
next_portal_start = wall_width
# Generate the polygon going up to the next portal
gen_seg_poly(
edge_p0,
side_vec,
end_pos,
next_portal_start,
0,
self.wall_height
)
self.wall_verts = np.array(self.wall_verts)
self.wall_norms = np.array(self.wall_norms)
if len(self.wall_segs) > 0:
self.wall_segs = np.array(self.wall_segs)
else:
self.wall_segs = np.array([]).reshape(0, 2, 3)
if len(self.wall_texcs) > 0:
self.wall_texcs = np.concatenate(self.wall_texcs)
else:
self.wall_texcs = np.array([]).reshape(0, 2)
def _render(self):
"""
Render the static elements of the room
"""
glEnable(GL_TEXTURE_2D)
glColor3f(1, 1, 1)
# Draw the floor
self.floor_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, 1, 0)
for i in range(self.floor_verts.shape[0]):
glTexCoord2f(*self.floor_texcs[i, :])
glVertex3f(*self.floor_verts[i, :])
glEnd()
# Draw the ceiling
if not self.no_ceiling:
self.ceil_tex.bind()
glBegin(GL_POLYGON)
glNormal3f(0, -1, 0)
for i in range(self.ceil_verts.shape[0]):
glTexCoord2f(*self.ceil_texcs[i, :])
glVertex3f(*self.ceil_verts[i, :])
glEnd()
# Draw the walls
self.wall_tex.bind()
glBegin(GL_QUADS)
for i in range(self.wall_verts.shape[0]):
glNormal3f(*self.wall_norms[i, :])
glTexCoord2f(*self.wall_texcs[i, :])
glVertex3f(*self.wall_verts[i, :])
glEnd()
class MiniWorldEnv(gym.Env):
"""
Base class for MiniWorld environments. Implements the procedural
world generation and simulation logic.
"""
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 30
}
# Enumeration of possible actions
class Actions(IntEnum):
# Turn left or right by a small amount
turn_left = 0
turn_right = 1
# Move forward or back by a small amount
move_forward = 2
move_back = 3
# Pick up or drop an object being carried
pickup = 4
drop = 5
# Toggle/activate an object
toggle = 6
# Done completing task
done = 7
def __init__(
self,
max_episode_steps=1500,
forward_step=0.15,
turn_step=15,
obs_width=80,
obs_height=60,
window_width=800,
window_height=600,
domain_rand=False
):
# Action enumeration for this environment
self.actions = MiniWorldEnv.Actions
# Actions are discrete integer values
self.action_space = spaces.Discrete(len(self.actions))
# Observations are RGB images with pixels in [0, 255]
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(obs_height, obs_width, 3),
dtype=np.uint8
)
self.reward_range = (-math.inf, math.inf)
# Maximum number of steps per episode
self.max_episode_steps = max_episode_steps
# Flag to enable/disable domain randomization
self.domain_rand = domain_rand
# Robot forward movement step size in meters
self.forward_step = forward_step
# Robot turn step size in degrees
self.turn_step = turn_step
# Window for displaying the environment to humans
self.window = None
# Invisible window to render into (shadow OpenGL context)
self.shadow_window = pyglet.window.Window(width=1, height=1, visible=False)
# Enable depth testing and backface culling
glEnable(GL_DEPTH_TEST)
glEnable(GL_CULL_FACE)
# Frame buffer used to render observations
self.obs_fb = FrameBuffer(obs_width, obs_height, 8)
# Frame buffer used for human visualization
self.vis_fb = FrameBuffer(window_width, window_height, 16)
# Compute the observation display size
self.obs_disp_width = 256
self.obs_disp_height = obs_height * (self.obs_disp_width / obs_width)
# For displaying text
self.text_label = pyglet.text.Label(
font_name="Arial",
font_size=14,
multiline=True,
width=400,
x = window_width + 5,
y = window_height - (self.obs_disp_height + 19)
)
# Initialize the state
self.seed()
self.reset()
def close(self):
pass
def seed(self, seed=None):
self.rand = RandGen(seed)
return [seed]
def reset(self):
"""
Reset the simulation at the start of a new episode
This also randomizes many environment parameters (domain randomization)
"""
# Step count since episode start
self.step_count = 0
# Create the agent
self.agent = Agent()
# List of entities contained
self.entities = []
# List of rooms in the world
self.rooms = []
# TODO: randomize elements of the world
# Perform domain-randomization
# May want a params class with some accessor for param names
# params.randomize(seed)
# params.val_name
# Wall segments for collision detection
self.wall_segs = []
# Generate the world
self._gen_world()
# Compute the min and max x, z extents of the whole floorplan
self.min_x = min([r.min_x for r in self.rooms])
self.max_x = max([r.max_x for r in self.rooms])
self.min_z = min([r.min_z for r in self.rooms])
self.max_z = max([r.max_z for r in self.rooms])
# Generate static data
if len(self.wall_segs) == 0:
self._gen_static_data()
# Pre-compile static parts of the environment into a display list
self._render_static()
# Generate the first camera image
obs = self.render_obs()
# Return first observation
return obs
def _get_carry_pos(self, agent_pos, ent):
"""
Compute the position at which to place an object being carried
"""
dist = self.agent.radius + ent.radius + self.forward_step
pos = agent_pos + self.agent.dir_vec * 1.05 * dist
# Adjust the Y-position so the object is visible while being carried
y_pos = max(self.agent.cam_height - ent.height - 0.3, 0)
pos = pos + Y_VEC * y_pos
return pos
def move_agent(self, fwd_dist):
"""
Move the agent forward
"""
next_pos = self.agent.pos + self.agent.dir_vec * fwd_dist
if self.intersect(self.agent, next_pos, self.agent.radius):
return False
carrying = self.agent.carrying
if carrying:
next_carrying_pos = self._get_carry_pos(next_pos, carrying)
if self.intersect(carrying, next_carrying_pos, carrying.radius):
return False
carrying.pos = next_carrying_pos
self.agent.pos = next_pos
return True
def turn_agent(self, turn_angle):
"""
Turn the agent left or right
"""
turn_angle *= (math.pi / 180)
orig_dir = self.agent.dir
self.agent.dir += turn_angle
carrying = self.agent.carrying
if carrying:
pos = self._get_carry_pos(self.agent.pos, carrying)
if self.intersect(carrying, pos, carrying.radius):
self.agent.dir = orig_dir
return False
carrying.pos = pos
carrying.dir = self.agent.dir
return True
def step(self, action):
"""
Perform one action and update the simulation
"""
self.step_count += 1
if action == self.actions.move_forward:
self.move_agent(self.forward_step)
elif action == self.actions.move_back:
self.move_agent(-self.forward_step)
elif action == self.actions.turn_left:
self.turn_agent(self.turn_step)
elif action == self.actions.turn_right:
self.turn_agent(-self.turn_step)
# Pick up an object
elif action == self.actions.pickup:
# Position at which we will test for an intersection
test_pos = self.agent.pos + self.agent.dir_vec * 1.5 * self.agent.radius
ent = self.intersect(self.agent, test_pos, 1.2 * self.agent.radius)
if not self.agent.carrying:
if isinstance(ent, Entity):
if not ent.is_static:
self.agent.carrying = ent
# Drop an object being carried
elif action == self.actions.drop:
if self.agent.carrying:
self.agent.carrying.pos[1] = 0
self.agent.carrying = None
# If we are carrying an object, update its position as we move
if self.agent.carrying:
ent_pos = self._get_carry_pos(self.agent.pos, self.agent.carrying)
self.agent.carrying.pos = ent_pos
self.agent.carrying.dir = self.agent.dir
# Generate the current camera image
obs = self.render_obs()
# If the maximum time step count is reached
if self.step_count >= self.max_episode_steps:
done = True
reward = 0
return obs, reward, done, {}
reward = 0
done = False
return obs, reward, done, {}
def add_rect_room(
self,
min_x,
max_x,
min_z,
max_z,
**kwargs
):
"""
Create a rectangular room
"""
assert len(self.wall_segs) == 0, "cannot add rooms after static data is generated"
# 2D outline coordinates of the room,
# listed in counter-clockwise order when viewed from the top
outline = np.array([
# East wall
[max_x, max_z],
# North wall
[max_x, min_z],
# West wall
[min_x, min_z],
# South wall
[min_x, max_z],
])
room = Room(
outline,
**kwargs,
)
self.rooms.append(room)
return room
def connect_rooms(
self,
room_a,
room_b,
min_x=None,
max_x=None,
min_z=None,
max_z=None,
max_y=None
):
"""
Connect two rooms along facing edges
"""
def find_facing_edges():
for idx_a in range(room_a.num_walls):
norm_a = room_a.edge_norms[idx_a]
for idx_b in range(room_b.num_walls):
norm_b = room_b.edge_norms[idx_b]
# Reject edges that are not facing the correct way
if np.dot(norm_a, norm_b) > -0.9:
continue
dir = room_b.outline[idx_b] - room_a.outline[idx_a]
# Reject edges that are not facing each other
if np.dot(norm_a, dir) > 0:
continue
return idx_a, idx_b
return None, None
idx_a, idx_b = find_facing_edges()
assert idx_a != None, "matching edges not found in connect_rooms"
start_a, end_a = room_a.add_portal(
edge=idx_a,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
start_b, end_b = room_b.add_portal(
edge=idx_b,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z,
max_y=max_y
)
a = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * start_a
b = room_a.outline[idx_a] + room_a.edge_dirs[idx_a] * end_a
c = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * start_b
d = room_b.outline[idx_b] + room_b.edge_dirs[idx_b] * end_b
# If the portals are directly connected, stop
if np.linalg.norm(a - d) < 0.001:
return
len_a = np.linalg.norm(b - a)
len_b = np.linalg.norm(d - c)
# Room outline points must be specified in counter-clockwise order
outline = np.stack([c, b, a, d])
outline = np.stack([outline[:, 0], outline[:, 2]], axis=1)
max_y = max_y if max_y != None else room_a.wall_height
room = Room(
outline,
wall_height=max_y,
wall_tex=room_a.wall_tex.name,
floor_tex=room_a.floor_tex.name,
ceil_tex=room_a.ceil_tex.name,
no_ceiling=room_a.no_ceiling,
)
self.rooms.append(room)
room.add_portal(1, start_pos=0, end_pos=len_a)
room.add_portal(3, start_pos=0, end_pos=len_b)
def place_entity(
self,
ent,
room=None,
pos=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
"""
Place an entity/object in the world.
Find a position that doesn't intersect with any other object.
"""
assert len(self.rooms) > 0, "create rooms before calling place_entity"
assert ent.radius != None, "entity must have physical size defined"
# Generate collision detection data
if len(self.wall_segs) == 0:
self._gen_static_data()
# If an exact position if specified
if pos is not None:
ent.dir = dir if dir else self.rand.float(-math.pi, math.pi)
ent.pos = pos
self.entities.append(ent)
return
# Keep retrying until we find a suitable position
while True:
# Pick a room, sample rooms proportionally to floor surface area
r = room if room else self.rand.choice(self.rooms, probs=self.room_probs)
# Choose a random point within the square bounding box of the room
lx = r.min_x if min_x == None else min_x
hx = r.max_x if max_x == None else max_x
lz = r.min_z if min_z == None else min_z
hz = r.max_z if max_z == None else max_z
pos = self.rand.float(
low =[lx + ent.radius, 0, lz + ent.radius],
high=[hx - ent.radius, 0, hz - ent.radius]
)
# Make sure the position is within the room's outline
if not r.point_inside(pos):
continue
# Make sure the position doesn't intersect with any walls
if self.intersect(ent, pos, ent.radius):
continue
# Pick a direction
d = dir if dir else self.rand.float(-math.pi, math.pi)
ent.pos = pos
ent.dir = d
break
self.entities.append(ent)
return ent
def place_agent(
self,
room=None,
dir=None,
min_x=None,
max_x=None,
min_z=None,
max_z=None
):
"""
Place the agent in the environment at a random position
and orientation
"""
return self.place_entity(
self.agent,
room=room,
dir=dir,
min_x=min_x,
max_x=max_x,
min_z=min_z,
max_z=max_z
)
def intersect(self, ent, pos, radius):
"""
Check if an entity intersects with the world
"""
# Ignore the Y position
px, _, pz = pos
pos = np.array([px, 0, pz])
# Check for intersection with walls
if intersect_circle_segs(pos, radius, self.wall_segs):
return True
# Check for entity intersection
for ent2 in self.entities:
# Entities can't intersect with themselves
if ent2 is ent:
continue
px, _, pz = ent2.pos
pos2 = np.array([px, 0, pz])
d = np.linalg.norm(pos2 - pos)
if d < radius + ent2.radius:
return ent2
return None
def near(self, ent0, ent1=None):
"""
Test if the two entities are near each other.
Used for "go to" or "put next" type tasks
"""
if ent1 == None:
ent1 = self.agent
dist = np.linalg.norm(ent0.pos - ent1.pos)
return dist < ent0.radius + ent1.radius + 1.1 * self.forward_step
def _gen_static_data(self):
"""
Generate static data needed for rendering and collision detection
"""
# Generate the static data for each room
for room in self.rooms:
room._gen_static_data()
# Concatenate the wall segments
self.wall_segs = np.concatenate([r.wall_segs for r in self.rooms])
# Room selection probabilities
self.room_probs = np.array([r.area for r in self.rooms], dtype=float)
self.room_probs /= np.sum(self.room_probs)
def _gen_world(self):
"""
Generate the world. Derived classes must implement this method.
"""
raise NotImplementedError
def _reward(self):
"""
Default sparse reward computation
"""
return 1.0 - 0.2 * (self.step_count / self.max_episode_steps)
def _render_static(self):
"""
Render the static elements of the scene into a display list.
Called once at the beginning of each episode.
"""
# TODO: manage this automatically
# glIsList
glDeleteLists(1, 1);
glNewList(1, GL_COMPILE);
light_pos = [0, 2.5, 0, 1]
# Background/minimum light level
ambient = [0.45, 0.45, 0.45, 1]
# Diffuse material color
diffuse = [0.70, 0.70, 0.70, 1]
glLightfv(GL_LIGHT0, GL_POSITION, (GLfloat*4)(*light_pos))
glLightfv(GL_LIGHT0, GL_AMBIENT, (GLfloat*4)(*ambient))
glLightfv(GL_LIGHT0, GL_DIFFUSE, (GLfloat*4)(*diffuse))
#glLightf(GL_LIGHT0, GL_SPOT_CUTOFF, 180)
#glLightf(GL_LIGHT0, GL_SPOT_EXPONENT, 0)
#glLightf(GL_LIGHT0, GL_CONSTANT_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_LINEAR_ATTENUATION, 0)
#glLightf(GL_LIGHT0, GL_QUADRATIC_ATTENUATION, 0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glEnable(GL_COLOR_MATERIAL)
glColorMaterial(GL_FRONT_AND_BACK, GL_AMBIENT_AND_DIFFUSE)
# Render the rooms
for room in self.rooms:
room._render()
# Render the static entities
for ent in self.entities:
if ent.is_static:
ent.render()
glEndList()
def _render_world(
self,
frame_buffer,
cam_pos,
cam_dir,
cam_fov_y
):
"""
Render the world from a given camera position into a frame buffer,
and produce a numpy image array as output.
"""
# Switch to the default OpenGL context
# This is necessary on Linux Nvidia drivers
self.shadow_window.switch_to()
# Bind the frame buffer before rendering into it
frame_buffer.bind()
# Clear the color and depth buffers
glClearColor(*BLUE_SKY_COLOR, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
# Set the projection matrix
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(
cam_fov_y,
frame_buffer.width / float(frame_buffer.height),
0.04,
100.0
)
# Setup the camera
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
gluLookAt(
# Eye position
*cam_pos,
# Target
*(cam_pos + cam_dir),
# Up vector
0, 1.0, 0.0
)
# Call the display list for the static parts of the environment
glCallList(1)
# TODO: keep the non-static entities in a different list for efficiency?
# Render the non-static entities
for ent in self.entities:
if not ent.is_static and ent is not self.agent:
ent.render()
#ent.draw_bound()
# Resolve the rendered imahe into a numpy array
return frame_buffer.resolve()
def render_obs(self, frame_buffer=None):
"""
Render an observation from the point of view of the agent
"""
if frame_buffer == None:
frame_buffer = self.obs_fb
return self._render_world(
frame_buffer,
self.agent.cam_pos,
self.agent.cam_dir,
self.agent.cam_fov_y
)
def render(self, mode='human', close=False):
"""
Render the environment for human viewing
"""
if close:
if self.window:
self.window.close()
return
# Render the human-view image
img = self.render_obs(self.vis_fb)
img_width = img.shape[1]
img_height = img.shape[0]
if mode == 'rgb_array':
return img
# Render the neural network view
obs = self.render_obs()
obs_width = obs.shape[1]
obs_height = obs.shape[0]
window_width = img_width + self.obs_disp_width
window_height = img_height
if self.window is None:
config = pyglet.gl.Config(double_buffer=True)
self.window = pyglet.window.Window(
width=window_width,
height=window_height,
resizable=False,
config=config
)
self.window.clear()
self.window.switch_to()
self.window.dispatch_events()
# Bind the default frame buffer
glBindFramebuffer(GL_FRAMEBUFFER, 0);
# Clear the color and depth buffers
glClearColor(0, 0, 0, 1.0)
glClearDepth(1.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
# Setup orghogonal projection
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glOrtho(0, window_width, 0, window_height, 0, 10)
# Draw the human render to the rendering window
img = np.ascontiguousarray(np.flip(img, axis=0))
img_data = pyglet.image.ImageData(
img_width,
img_height,
'RGB',
img.ctypes.data_as(POINTER(GLubyte)),
pitch=img_width * 3,
)
img_data.blit(
0,
0,
0,
width=img_width,
height=img_height
)
# Draw the observation
obs = np.ascontiguousarray(np.flip(obs, axis=0))
obs_data = pyglet.image.ImageData(
obs_width,
obs_height,
'RGB',
obs.ctypes.data_as(POINTER(GLubyte)),
pitch=obs_width * 3,
)
obs_data.blit(
img_width,
img_height - self.obs_disp_height,
0,
width=self.obs_disp_width,
height=self.obs_disp_height
)
# Draw the text label in the window
self.text_label.text = "pos: (%.2f, %.2f, %.2f)\nangle: %d\nsteps: %d" % (
*self.agent.pos,
int(self.agent.dir * 180 / math.pi),
self.step_count
)
self.text_label.draw()
# Force execution of queued commands
glFlush()
# If we are not running the Pyglet event loop,
# we have to manually flip the buffers
if mode == 'human':
self.window.flip()
return None
|
[
"maximechevalierb@gmail.com"
] |
maximechevalierb@gmail.com
|
6530542905bdeb0595e2cb3c50e809ca462c892b
|
a08c34701dc5641eb0ca275b4ff53120a0cffd59
|
/income_expense_managment/urls.py
|
4216d06cb0ca4e0681e79f387088c8504c243a49
|
[] |
no_license
|
sunitayadav14/DailyIncomeExpense
|
b6ff288adf61c4255b6ed9086343a00392fbc2ae
|
0f929e82c97a8c42dd8bc7f01f2ceda84621fe59
|
refs/heads/master
| 2023-06-29T02:22:22.044576
| 2021-08-07T09:21:08
| 2021-08-07T09:21:08
| 393,636,852
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 605
|
py
|
from django.contrib import admin
from django.urls import path,include
from account.views import home
urlpatterns = [
path('',home),
path('admin/', admin.site.urls),
path('acc-',include(('account.urls','account'),namespace='account')),
path('inc-',include(('income.urls','income'),namespace='income')),
path('exp-',include(('expense.urls','expense'),namespace='expense')),
]
'''
<a href="{% url 'key:value%}' ">
1)key is mapping for app in project URL
2) value is mapping for function in app URL
3) key is always Match with NameSpace
4) value is always Match with Name Attribut
'''
|
[
"karishmayadav482@gmail.com"
] |
karishmayadav482@gmail.com
|
22502e12c168c587954c21469fd222c3256b5177
|
342b62ec92427f178aaf9d08674825a163c13d91
|
/the_k_strongest_values_in_an_array_5429.py
|
5f83af10b3fb1a4901d6b12c5dbb326f6bcf7351
|
[] |
no_license
|
mmkhaque/LeetCodeWeeklyChallenge
|
1b2f8cb27d275c271c0e37599591af6e14110ea5
|
5d3d361341a56df72aabf32c2acf5d73b5a46eb3
|
refs/heads/master
| 2023-07-15T18:47:41.452250
| 2021-08-22T04:08:13
| 2021-08-22T04:08:13
| 254,993,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 851
|
py
|
class Solution:
def getStrongest(self, arr: List[int], k: int) -> List[int]:
def calculate_median(arr):
n = len(arr)
med = (n-1)/ 2
return arr[int(med)]
if len(arr) == 0:
return []
if k == 0:
return []
arr.sort()
med = calculate_median(arr)
ans = []
for index, num in enumerate(arr):
ans.append([num, abs(num-med), index])
ans.sort(key=lambda x: (x[1], x[2]), reverse=True)
output = []
i = 0
j = 1
i = 0
for num, diff, index in ans:
i +=1
output.append(num)
if i == k:
break
return output
|
[
"noreply@github.com"
] |
mmkhaque.noreply@github.com
|
ad7fff59ff1d5880ad197a6205640018c46b96ba
|
e6dd6badd9073f7411704f97cd53c4c71595b55d
|
/python_cookbook/2-11Strip.py
|
d2e119a7cd109a3e4a6d3b9ae9eb8e6860910075
|
[] |
no_license
|
binkesi/leetcode_easy
|
8f753fe3306ed5e678146469bbbf35f3e1bb741c
|
79878064daa7a7f1505f895287ee07f7a081dc6a
|
refs/heads/master
| 2020-12-10T05:46:57.134697
| 2020-05-25T03:00:03
| 2020-05-25T03:00:03
| 233,516,663
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 179
|
py
|
import re
text = " ------------hello========="
if __name__ == "__main__":
aaa = text.strip()
print(aaa)
print(aaa.lstrip('-'))
print(aaa.rstrip('='))
|
[
"sjtusgn@163.com"
] |
sjtusgn@163.com
|
4ad57f1de789251894e5dfc259528c30fb29f79b
|
0e6333be66ae534506cad1f8d32712d303d554a8
|
/kornia/color/xyz.py
|
03e2c28e30e5db88d97fe26a7ddb896ed069dfd1
|
[
"Apache-2.0"
] |
permissive
|
jakubLangr/kornia
|
beaf00ad7662b65cd7077e7cfe06b48e089bbd4a
|
9a9a056bfb7ffa45395871b4858a8a041c926d99
|
refs/heads/master
| 2021-01-08T18:22:54.442658
| 2020-02-21T17:03:29
| 2020-02-21T17:03:29
| 242,106,066
| 1
| 0
|
NOASSERTION
| 2020-02-21T09:49:02
| 2020-02-21T09:49:02
| null |
UTF-8
|
Python
| false
| false
| 3,667
|
py
|
from typing import Union
import torch
import torch.nn as nn
class RgbToXyz(nn.Module):
r"""Converts an image from RGB to XYZ
The image data is assumed to be in the range of (0, 1).
args:
image (torch.Tensor): RGB image to be converted to XYZ.
returns:
torch.Tensor: XYZ version of the image.
shape:
- image: :math:`(*, 3, H, W)`
- output: :math:`(*, 3, H, W)`
Examples:
>>> input = torch.rand(2, 3, 4, 5)
>>> xyz = kornia.color.RgbToXyz()
>>> output = xyz(input) # 2x3x4x5
Reference:
[1] https://docs.opencv.org/4.0.1/de/d25/imgproc_color_conversions.html
"""
def __init__(self) -> None:
super(RgbToXyz, self).__init__()
def forward(self, image: torch.Tensor) -> torch.Tensor: # type: ignore
return rgb_to_xyz(image)
class XyzToRgb(nn.Module):
r"""Converts an image from XYZ to RGB
args:
image (torch.Tensor): XYZ image to be converted to RGB.
returns:
torch.Tensor: RGB version of the image.
shape:
- image: :math:`(*, 3, H, W)`
- output: :math:`(*, 3, H, W)`
Examples:
>>> input = torch.rand(2, 3, 4, 5)
>>> rgb = kornia.color.XyzToRgb()
>>> output = rgb(input) # 2x3x4x5
Reference:
[1] https://docs.opencv.org/4.0.1/de/d25/imgproc_color_conversions.html
"""
def __init__(self) -> None:
super(XyzToRgb, self).__init__()
def forward(self, image: torch.Tensor) -> torch.Tensor: # type: ignore
return xyz_to_rgb(image)
def rgb_to_xyz(image: torch.Tensor) -> torch.Tensor:
r"""Converts a RGB image to XYZ.
See :class:`~kornia.color.RgbToXyz` for details.
Args:
image (torch.Tensor): RGB Image to be converted to XYZ.
Returns:
torch.Tensor: XYZ version of the image.
"""
if not torch.is_tensor(image):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(image)))
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError("Input size must have a shape of (*, 3, H, W). Got {}"
.format(image.shape))
r: torch.Tensor = image[..., 0, :, :]
g: torch.Tensor = image[..., 1, :, :]
b: torch.Tensor = image[..., 2, :, :]
x: torch.Tensor = 0.412453 * r + 0.357580 * g + 0.180423 * b
y: torch.Tensor = 0.212671 * r + 0.715160 * g + 0.072169 * b
z: torch.Tensor = 0.019334 * r + 0.119193 * g + 0.950227 * b
out: torch.Tensor = torch.stack((x, y, z), -3)
return out
def xyz_to_rgb(image: torch.Tensor) -> torch.Tensor:
r"""Converts a XYZ image to RGB.
See :class:`~kornia.color.XyzToRgb` for details.
Args:
image (torch.Tensor): XYZ Image to be converted to RGB.
Returns:
torch.Tensor: RGB version of the image.
"""
if not torch.is_tensor(image):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(image)))
if len(image.shape) < 3 or image.shape[-3] != 3:
raise ValueError("Input size must have a shape of (*, 3, H, W). Got {}"
.format(image.shape))
x: torch.Tensor = image[..., 0, :, :]
y: torch.Tensor = image[..., 1, :, :]
z: torch.Tensor = image[..., 2, :, :]
r: torch.Tensor = 3.2404813432005266 * x + -1.5371515162713185 * y + -0.4985363261688878 * z
g: torch.Tensor = -0.9692549499965682 * x + 1.8759900014898907 * y + 0.0415559265582928 * z
b: torch.Tensor = 0.0556466391351772 * x + -0.2040413383665112 * y + 1.0573110696453443 * z
out: torch.Tensor = torch.stack((r, g, b), dim=-3)
return out
|
[
"noreply@github.com"
] |
jakubLangr.noreply@github.com
|
50b034b2686c897992499134f2ca831df58ae14a
|
a6f0a92ba1b039fbef510d5da031e10dee6f4478
|
/linearRegression/logoPrice.py
|
0c82d573c57cc1f5c1b65f03b1229b65ec913577
|
[] |
no_license
|
niuyuanyuanna/machine-learning
|
bf3383257ce0aedd5165fe6dfedab21e3e67cef4
|
f718d447b709002bbda572946157d4df45c65197
|
refs/heads/master
| 2021-04-26T23:48:13.934690
| 2018-03-15T14:03:39
| 2018-03-15T14:03:39
| 123,859,750
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,560
|
py
|
from bs4 import BeautifulSoup
import numpy as np
import random
def loadData(inFile, yr, numPce, origPrc):
retX = []
retY = []
# 打开并读取HTML文件
with open(inFile, encoding='utf-8') as f:
html = f.read()
soup = BeautifulSoup(html)
i = 1
# 根据HTML页面结构进行解析
currentRow = soup.find_all('table', r="%d" % i)
while len(currentRow) != 0:
currentRow = soup.find_all('table', r="%d" % i)
title = currentRow[0].find_all('a')[1].text
lwrTitle = title.lower()
# 查找是否有全新标签
if (lwrTitle.find('new') > -1) or (lwrTitle.find('nisb') > -1):
newFlag = 1.0
else:
newFlag = 0.0
# 查找是否已经标志出售,只收集已出售的数据
soldUnicde = currentRow[0].find_all('td')[3].find_all('span')
if len(soldUnicde) == 0:
print("商品 #%d 没有出售" % i)
else:
# 解析页面获取当前价格
soldPrice = currentRow[0].find_all('td')[4]
priceStr = soldPrice.text
priceStr = priceStr.replace('$', '')
priceStr = priceStr.replace(',', '')
if len(soldPrice) > 1:
priceStr = priceStr.replace('Free shipping', '')
sellingPrice = float(priceStr)
# 去掉不完整的套装价格
if sellingPrice > origPrc * 0.5:
# print("%d\t%d\t%d\t%f\t%f" % (yr, numPce, newFlag, origPrc, sellingPrice))
retX.append([yr, numPce, newFlag, origPrc])
retY.append(sellingPrice)
i += 1
currentRow = soup.find_all('table', r="%d" % i)
return retX, retY
def linearRegression(dataMat, labelMat):
squarX = dataMat.T * dataMat
squarXDet = np.linalg.det(squarX)
if squarXDet == 0:
print('非奇异矩阵,不可求逆')
return
weights = squarX.I * dataMat.T * labelMat.T
return weights
def ridgeRegression(X, y):
allWeights = []
lamCount = 30
for j in range(lamCount):
i = np.eye(np.shape(X)[1])
lam = np.exp(j - 10)
denom = X.T * X + lam * i
if np.linalg.det(denom) == 0.0:
print('非奇异矩阵')
continue
w = denom.I * X.T * y.T
weights = w.T.flatten().A[0]
allWeights.append(weights)
return np.mat(allWeights)
def crossValidation(dataArr, labelArr, numVal):
m = len(labelArr)
indexList = list(range(m))
erroMat = np.mat((numVal, 30))
for i in range(numVal):
random.shuffle(indexList)
trainX = []
trainY = []
testX = []
testY = []
for j in range(m):
if j < m * 0.9:
trainX.append(dataArr[indexList[j]])
trainY.append(labelArr[indexList[j]])
else:
testX.append(dataArr[indexList[j]])
testY.append(dataArr[indexList[j]])
dataMat = np.mat(trainX)
labelMat = np.mat(trainY)
xMeans = np.mean(dataMat, axis=0)
yMean = np.mean(labelMat)
xVars = np.var(dataMat, axis=0)
xNew = (dataMat - xMeans) / xVars
yNew = labelMat - yMean
ws = ridgeRegression(xNew, yNew)
testXMat = np.mat(testX)
# testYMat = np.mat(testY)
testXNew = (testXMat - xMeans) / xVars
preY = testXMat * ws.T + float(yMean)
if __name__ == '__main__':
lgX1, lgY1 = loadData('./lego/lego8288.html', 2006, 800, 49.99)
lgX2, lgY2 = loadData('./lego/lego10030.html', 2002, 3096, 269.99) # 2002年的乐高10030,部件数目3096,原价269.99
lgX3, lgY3 = loadData('./lego/lego10179.html', 2007, 5195, 499.99) # 2007年的乐高10179,部件数目5195,原价499.99
lgX4, lgY4 = loadData('./lego/lego10181.html', 2007, 3428, 199.99) # 2007年的乐高10181,部件数目3428,原价199.99
lgX5, lgY5 = loadData('./lego/lego10189.html', 2008, 5922, 299.99) # 2008年的乐高10189,部件数目5922,原价299.99
lgX6, lgY6 = loadData('./lego/lego10196.html', 2009, 3263, 249.99)
lgX = np.concatenate((lgX1, lgX2, lgX3, lgX4, lgX5, lgX6), axis=0)
lgY = np.concatenate((lgY1, lgY2, lgY3, lgY4, lgY5, lgY6), axis=0)
# dataMat, labelMat = preprocessing(lgX, lgY)
# dataMat = np.mat(lgX)
# labelMat = np.mat(lgY)
# ws = linearRegression(dataMat, labelMat)
# print('%f%+f*年份%+f*部件数量%+f*是否为全新%+f*原价' % \
# (ws[0], ws[1], ws[2], ws[3], ws[4]))
crossValidation(lgX, lgY, 10)
|
[
"liuyuan_tx@163.com"
] |
liuyuan_tx@163.com
|
ecec53a7f0397c1947cdb0e61b81225ee3dbb404
|
2a7c9abbd797edf2288aeaf224d11c1ece38a0a9
|
/tripod/app/views.py
|
7f80bb5b733b64a1cb3b049bbed16bcc7de1b943
|
[] |
no_license
|
jeffchy/Tripod
|
32ab08ec70f4e3c5c5b51fda42c8f658b2c71d63
|
c113f662d82b046b138789c0760ba388a20b86aa
|
refs/heads/master
| 2023-03-07T05:04:58.682012
| 2021-02-20T07:25:50
| 2021-02-20T07:25:50
| 327,805,615
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,709
|
py
|
from django.http import HttpResponse, HttpResponseRedirect
from .models import Experiment, Config, ExperimentForm, ConfigForm
from django.shortcuts import render, get_object_or_404
from .core.default import Nodes, GPUs
from .core.checker import check_config_settings, check_experiment_settings
def index(request):
experiments_list = Experiment.objects.order_by('-pub_date')
context = {'experiments_list': experiments_list}
return render(request, 'app/index.html', context)
def exp(request, experiment_id):
experiment = get_object_or_404(Experiment, pk=experiment_id)
context = {'experiment': experiment}
return render(request, 'app/exp.html', context)
def addexp(request):
if request.method == 'POST':
form = ExperimentForm(request.POST)
if form.is_valid():
# flag, error = check_settings(form.cleaned_data['settings'])
# if flag:
form.save()
return HttpResponseRedirect('/app/')
else:
form = ExperimentForm()
context = {'form': form}
return render(request, 'app/addexp.html', context)
def config(request, config_id):
config = get_object_or_404(Config, pk=config_id)
if request.method == 'POST':
print(request.POST)
context = {
'config': config,
'scripts': '1',
'error': '2',
'nodes': Nodes,
'gpus': GPUs,
}
return render(request, 'app/config.html', context)
else:
context = {
'config': config,
'nodes': Nodes,
'gpus': GPUs,
}
return render(request, 'app/config.html', context)
def addconfig(request, experiment_id):
experiment = get_object_or_404(Experiment, pk=experiment_id)
error = ''
if request.method == 'POST':
form = ConfigForm(request.POST)
if form.is_valid():
add_exp_id = form.cleaned_data['experiment'].id
settings = form.cleaned_data['config_settings']
error += check_config_settings(settings)
print(settings, error)
if error:
context = {'form': form, 'experiment': experiment, 'error': error}
return render(request, 'app/addconfig.html', context)
else:
form.save()
return HttpResponseRedirect('/app/exp/{}'.format(add_exp_id))
else:
error += "Invalid Form"
else:
form = ConfigForm()
context = {'form': form, 'experiment': experiment, 'error': error}
return render(request, 'app/addconfig.html', context)
|
[
"jiangchy@shanghaitech.edu.cn"
] |
jiangchy@shanghaitech.edu.cn
|
48d17f3977310c6c12f3c2e38658ddf41ad6528a
|
e29c79b27cee203e02a9093f1dc1c2abf558637b
|
/log.py
|
3d814a30174494932380209edca128647819a52c
|
[] |
no_license
|
bartlomiejfydrych/python-selenium-pop
|
cbdb7b79e5fa5ee1a4c65aefc83938a3d5546d70
|
adcfb127ed1d9805a565871f0c6da02703a8103f
|
refs/heads/main
| 2023-01-29T13:36:27.267504
| 2020-12-12T11:08:38
| 2020-12-12T11:08:38
| 305,737,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
def log(func):
def wrapper(*args, **kwargs):
print(func.__name__)
return func(*args, **kwargs)
return wrapper
def for_all_methods(decorator):
def decorate(cls):
for attr in cls.__dict__: # there's propably a better way to do this
if callable(getattr(cls, attr)):
setattr(cls, attr, decorator(getattr(cls, attr)))
return cls
return decorate
|
[
"fydbar@gmail.com"
] |
fydbar@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.