blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c658a9eea9faa8760fe14642bb464cfabf73be90 | 2bdffd6d18cc16801064373409821130f9035ed4 | /experiments/sudoku/solve2_nn.py | 54b53ec5f2281796daec0d673a510b6cb05cce96 | [] | no_license | benthayer/Tensorflow | a433c05ba20db937d4fd6830e30d03d1741e1b2d | 48572c93702799b690c0b061fadc4cffb1a34283 | refs/heads/master | 2021-06-14T14:53:23.337419 | 2017-03-20T22:45:01 | 2017-03-20T22:45:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,084 | py | import numpy as np
import tensorflow as tf
from experiments.sudoku.gen2 import convert_to_normal, get_training_and_test_sets
def weight_variable(shape):
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
data = tf.placeholder(tf.float32, shape=[None, 4**3])
W1 = weight_variable([4**3, 4**3])
b1 = bias_variable([4**3])
h1 = tf.nn.softmax(tf.matmul(data, W1) + b1)
y = h1
y_ = tf.placeholder(tf.float32, [None, 4**3])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
training_puzzles, training_solutions, test_puzzles, test_solutions = get_training_and_test_sets()
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
k = 1000
for i in range(10000):
sess.run(train_step, feed_dict={data: training_puzzles, y_: training_solutions})
if i % 100 == 0:
print("Batch {} complete".format(i))
correct_prediction = tf.equal(
tf.argmax(
tf.reshape(y, (-1, 4, 4, 4)),
2),
tf.argmax(
tf.reshape(y_, (-1, 4, 4, 4)),
2))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
accuracy = sess.run(accuracy, feed_dict={data: test_puzzles, y_: test_solutions})
W = sess.run(W1)
b = sess.run(b1)
incorrect = 0
for i in range(len(test_puzzles)):
guessed_board = sess.run(y, feed_dict={data: [test_puzzles[i]]})
if not np.array_equal(
convert_to_normal(guessed_board.reshape((4, 4, 4))),
convert_to_normal(test_solutions[i].reshape(4, 4, 4))):
incorrect += 1
if incorrect > 4:
break
print()
print("Board:\n", convert_to_normal(test_puzzles[i].reshape((4, 4, 4)), ones=True))
print("Guess:\n", convert_to_normal(guessed_board.reshape((4, 4, 4))))
print("Answer:\n", convert_to_normal(test_solutions[i].reshape((4, 4, 4))))
print("Accuracy = ", accuracy)
| [
"benthayer2365@gmail.com"
] | benthayer2365@gmail.com |
821821dc8c26febfe8eae588743128fda1bc6ed5 | cbdf42eaf2f3ee908999fb1317d3fe90fa255f42 | /kusinwolf/truth_tables.py | 78e29f83189b337acc29c22cdc795974950e4d69 | [] | no_license | tdyhacker/pythonexploration | 47c4e8a96d541428d4cf1d6c558a29e48f5fc22c | 4b360ef2b61fb565c7e2f82ff31e848776cd649a | refs/heads/master | 2021-01-10T02:51:34.245002 | 2010-11-28T02:31:36 | 2010-11-28T02:31:36 | 47,951,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,895 | py |
def printTable(truth_table):
for key in truth_table['order']:
print key, "\t",
print ""
for item in truth_table['order']:
print "-------",
print ""
for row in range(len(truth_table[truth_table['order'][0]])):
for col in range(len(truth_table['order'])):
print truth_table[truth_table['order'][col]][row], "\t",
print ""
def buildTruths(truth_table):
value = True
# Assign the columns automatically
for col in truth_table['order']:
truth_table[col] = []
truth_table["~%s" % col] = []
while len(truth_table[col]) < pow(2, len(truth_table['order'])):
for var in range(pow(2, len(truth_table['order']) - (truth_table['order'].index(col) + 1) ) ):
truth_table[col].append(value)
truth_table["~%s" % col].append(not value) # Inverses
value = value ^ True # Similar to an xor operator without writing my own
value = True
def evaluatePart(left, word, right):
if word == "V":
return left and right
elif word == "^":
return left or right
elif word == "->":
return not left or right
elif word == "<->":
return not (left ^ right) # not (left xor right)
def evaluateEquation(truth_table):
equation = truth_table['equation']
groups = truth_table['equation'].split(" ")
truth_table[equation] = []
for row in range(pow(2, len(truth_table['order']))):
truth_table[equation].append(evaluatePart(truth_table[groups[0]][row], groups[1], truth_table[groups[2]][row]))
truth_table = {}
truth_table['order'] = ['P', 'Q', 'W',]
truth_table['equation'] = "~P -> Q"
# V = or
# ^ = and
buildTruths(truth_table)
evaluateEquation(truth_table)
truth_table['order'].append(truth_table['equation']) # Cheat for printing out :P
printTable(truth_table)
| [
"kusinwolf@c446d5ca-c62c-11dd-92e3-e3085bfe42ca"
] | kusinwolf@c446d5ca-c62c-11dd-92e3-e3085bfe42ca |
9663b289594d9946c2f865789b20f2e9add71bac | 9b52be243a9fc05f18fb66044e2d23a8c83f3a0d | /Session 1/intro.py | 9457a97a764f74a1394978dd81c48aee0d3b1383 | [] | no_license | trangnguyen21/nguyenvumytrang-fundamentals-c4ejs01 | 5663a4fd064a8fa05bf3ca730fa08e21753fe982 | 03731c706e498cc72e76ba2423d1c8a186efdff2 | refs/heads/master | 2022-12-09T22:12:06.708492 | 2019-12-29T02:45:32 | 2019-12-29T02:45:32 | 218,953,505 | 0 | 1 | null | 2022-12-05T02:22:04 | 2019-11-01T09:18:56 | CSS | UTF-8 | Python | false | false | 61 | py | a = float(input('height'))
b = float(input('hypotense'))
| [
"admin@CSGLANBK-281.sg.cmc.com.vn"
] | admin@CSGLANBK-281.sg.cmc.com.vn |
0b130d34300f0d54fda9186249d00d2196464eda | d2ada8e9dea0a59476dbbdcfdebc3b8eed951271 | /CH02/bh_sshserver.py | 5046e3f12011c7357d50aa4e84956dbebd0307ea | [] | no_license | sadavoya/bhp | dccf211f4bd95f5eaf69e44c3bfee8f7d07af688 | 6fbf1be8ca0f83363234d9c95170bdd770716c28 | refs/heads/master | 2021-01-13T14:51:13.347114 | 2017-02-21T01:39:57 | 2017-02-21T01:39:57 | 76,486,946 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,325 | py | #!/usr/bin/env python
'''SSH'''
import socket
import threading
import paramiko
import sys
# using the demo keys in the paramiko demo files
host_key = paramiko.RSAKey(filename='test_rsa.key')
#print host_key.get_base64()
class Server(paramiko.ServerInterface):
def __init__(self):
self.event = threading.Event()
def check_channel_request(self, kind, chanid):
if kind == 'session':
return paramiko.OPEN_SUCCEEDED
return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
def check_auth_password(self, username, password):
if (username == 'joker') and (password == 'joker'):
return paramiko.AUTH_SUCCESSFUL
return paramiko.AUTH_FAILED
def main():
'''Main'''
server = sys.argv[1]
ssh_port = int(sys.argv[2])
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((server, ssh_port))
sock.listen(100)
print '[+] Listening for connection...'
client, addr = sock.accept()
except Exception, e:
print '[-] Listen failed: ' + str(e)
sys.exit(1)
#print '[+] Got a connection to %s:%d!' % (addr[1], addr[2])
try:
bh_session = paramiko.Transport(client)
bh_session.add_server_key(host_key)
server = Server()
try:
bh_session.start_server(server=server)
except paramiko.SSHException, x:
print '[-] SSH negotiation failed.'
chan = bh_session.accept(20)
print '[+] Authenticated!'
print chan.recv(1024)
chan.send('Welcome to bh_ssh')
while True:
try:
command = raw_input("Enter command: ").strip('\n')
if command != 'exit':
chan.send(command)
print chan.recv(1024) + '\n'
else:
chan.send('exit')
print 'exiting'
bh_session.close()
raise Exception('exit')
except KeyboardInterrupt:
bh_session.close()
except Exception, e:
print '[-] Caught exception: ' + str(e)
try:
bh_session.close()
except:
pass
sys.exit(1)
main() | [
"root@localhost.localdomain"
] | root@localhost.localdomain |
7da317e87cb08431320105068322690d71269402 | a1092fecf5057e45f1df4e738a14be210dadbc83 | /gen.py | 3d26eb5062cedb3108e425576485a5c6bc7d741c | [] | no_license | robert-giaquinto/baum-welch | ba45b3c80e839ae7fd5b8b5a00ee07dd9228b61a | b57fb2bd64ed3fdfed1552a6ea5afd9c7c120cfc | refs/heads/master | 2021-01-15T09:09:29.267399 | 2014-05-31T21:17:42 | 2014-05-31T21:17:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,008 | py | import random
import numpy as np
N_SEQ = 10
START = 0
BEFORE = 1
AFTER = 2
END = 3
def gen_seq():
seq = []
state = START
while state != END:
if state == START:
state = BEFORE
seq.append('S')
if state == BEFORE:
n, l, r = np.random.multinomial(1, [0.96, 0.036, 0.004])
if n:
seq.append('N')
elif l:
seq.append('L')
else:
seq.append('R')
state += np.random.binomial(1, 1/5000.)
if state == AFTER:
n, l, r = np.random.multinomial(1, [0.96, 0.004, 0.036])
if n:
seq.append('N')
elif l:
seq.append('L')
else:
seq.append('R')
state += np.random.binomial(1, 1/5000.)
seq.append('E')
return seq
if __name__ == '__main__':
random.seed(42)
for i in xrange(N_SEQ):
seq = gen_seq()
print ''.join(seq)
| [
"piotrek.kaleta@gmail.com"
] | piotrek.kaleta@gmail.com |
3925710c3420811e6ea8744fbbc871f548568fad | fc8a86e30c286910eef086821397b34093954fed | /day04/note/json1.py | e4125259f10447fc494d84f76907d062ad6d5450 | [
"Apache-2.0"
] | permissive | wjianwei126/learnpython | 0e1a0504b70852eb3d9c53d8827981ce5d609446 | 797ec7afc94e6c467d7e1f037219b3aaf1327cf0 | refs/heads/master | 2020-04-09T04:18:32.059840 | 2015-01-22T12:42:00 | 2015-01-22T12:42:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | #!/usr/bin/evn python
#coding:utf-8
import json
a = {'k1':'v1','k2':'v2'}
a_json = json.dumps(a)
print a_json
print type(a_json)
a_new = json.loads(a_json)
print a_new
print type(a_new) | [
"congmmy@gmail.com"
] | congmmy@gmail.com |
40da92312bb0d21b9e98e3332e5523a47d977ab2 | b36a2ca0e71bd272afd4c831e09b498cedfec889 | /0x07-python-test_driven_development/5-text_indentation.py | 0053243d7e0d860174c7314f26c14ad56febdc8c | [] | no_license | dondropo/holbertonschool-higher_level_programming | 3a132c1dd8d31b5e9f170a6fab83aa1550449965 | 6d0bf581830c4ecfb17d926fdc89b357a5664574 | refs/heads/master | 2023-03-02T02:20:09.866147 | 2021-01-31T22:37:01 | 2021-01-31T22:37:01 | 259,390,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | #!/usr/bin/python3
def text_indentation(text):
if not type(text) is str:
raise TypeError("text must be a string")
sp_chars = [':', '.', '?']
if type(text) is not str:
raise TypeError("text must be a string")
idx = 0
for j in text:
if j in sp_chars:
if text[idx + 1] is " ":
text = text[:idx + 1] + text[idx + 2:]
else:
idx += 1
idx = 0
for j in text:
if j in sp_chars:
text = text[:idx + 1] + '\n\n' + text[idx + 1:]
idx += 3
else:
idx += 1
print(text, end='')
| [
"alejandroruscamoreno@gmail.com"
] | alejandroruscamoreno@gmail.com |
1d5db696b220dc961fb989dfd34319751e4378f2 | 66ff35c1389e468e809c4080262f79a91373ce29 | /RotorS_ws/build/rotors_control/catkin_generated/pkg.installspace.context.pc.py | 43cfa00f3c30211991b0271b5a9ef0c38c9db3c3 | [] | no_license | TJHDL/Tilted-Hexarotor-Omnicopter | e544a82b505ecd148f8bde10fced42605397bec0 | f8f47b4d101b0c2665b4c78180908eeac6758627 | refs/heads/main | 2023-05-01T14:13:53.670321 | 2021-05-10T12:06:46 | 2021-05-10T12:06:46 | 366,026,610 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include".split(';') if "${prefix}/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;mav_msgs;nav_msgs;roscpp;sensor_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-llee_position_controller;-lroll_pitch_yawrate_thrust_controller".split(';') if "-llee_position_controller;-lroll_pitch_yawrate_thrust_controller" != "" else []
PROJECT_NAME = "rotors_control"
PROJECT_SPACE_DIR = "/home/hdl/GraduateDesign/Catkin_workspace_assemble/RotorS_ws/install"
PROJECT_VERSION = "2.2.3"
| [
"hdltjdx@163.com"
] | hdltjdx@163.com |
79397e0d2322942df03ee78140a794d44521cfca | 2b01258d3a1dca79d548f490731b0474cab5202f | /app/model/file.py | adb9d84802b812ecb99f5c5f112817f1a79f7abd | [
"MIT"
] | permissive | icrdr/1-mu-server | 9714b60ea285c843dca6f0cda74566c70449fe76 | 8212b79a9949e334c185b99b1be41add5e577937 | refs/heads/master | 2022-11-23T08:11:44.258413 | 2020-10-16T14:03:36 | 2020-10-16T14:03:36 | 194,759,358 | 0 | 0 | MIT | 2022-11-22T04:07:08 | 2019-07-02T00:18:35 | Python | UTF-8 | Python | false | false | 4,551 | py | from .. import db, app
from datetime import datetime
import os
import shortuuid
from psd_tools import PSDImage
from PIL import Image
from .post import Tag
from ..utility import word2List
FILE_TAG = db.Table(
'file_tags',
db.Column('tag_id', db.Integer,
db.ForeignKey('tags.id')),
db.Column('file_id', db.Integer,
db.ForeignKey('files.id')),
)
class File(db.Model):
__tablename__ = 'files'
id = db.Column(db.Integer, primary_key=True)
# one-many: File.uploader-User.files
uploader_user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
author = db.Column(db.String(64))
name = db.Column(db.String(64))
format = db.Column(db.String(16))
url = db.Column(db.String(512), unique=True)
from_url = db.Column(db.String(512))
upload_date = db.Column(db.DateTime, default=datetime.utcnow)
# one-many: Preview.file-File.previews
previews = db.relationship(
'Preview', backref=db.backref('file', lazy=True))
description = db.Column(db.String(512))
public = db.Column(db.Boolean, nullable=False, default=False)
tags = db.relationship(
'Tag', secondary=FILE_TAG,
lazy='subquery', backref=db.backref('files', lazy=True))
@staticmethod
def create_file(uploader_id, file, description, tags, public):
# filename = utils.secure_filename(file.filename)
format = file.filename.split(".")[-1].lower()
rawname = file.filename[:-len(format)-1]
date = datetime.utcnow().strftime("%Y%m%d")
year = date[:4]
month = date[4:6]
day = date[6:8]
random_name = str(shortuuid.uuid())
filename = random_name +'.'+ format
path = os.path.join(app.config['UPLOAD_FOLDER'], year, month, day)
if not os.path.exists(path):
os.makedirs(path)
file.save(os.path.join(path, filename))
new_file = File(
uploader_user_id = uploader_id,
name = rawname,
format = format,
url = str(os.path.join(year, month, day , filename)).replace('\\', '/')
)
if description:
new_file.description = description
if public:
new_file.public = True
if tags:
all_tag_list = []
for tag in tags:
tag_list = word2List(tag)
all_tag_list += tag_list
for tag in all_tag_list:
_tag = Tag.query.filter_by(name=tag).first()
if not _tag:
_tag = Tag(name=tag)
db.session.add(_tag)
new_file.tags.append(_tag)
db.session.add(new_file)
db.session.commit()
if format in ['png','jpg','psd','jpeg','gif','bmp','tga','tiff','tif']:
try:
im_path = os.path.join(path, filename)
if format == 'psd':
psd = PSDImage.open(im_path)
im = psd.compose()
else:
im = Image.open(im_path)
im = im.convert('RGB')
for size in app.config['THUMBNAIL_SIZE']:
im.thumbnail((size, size))
im.save(os.path.join(path, random_name) + "_%s.jpg"%str(size), "JPEG")
new_preview = Preview(
bind_file_id = new_file.id,
url = str(os.path.join(year, month, day , random_name+"_%s.jpg"%str(size))).replace('\\', '/'),
size = size
)
db.session.add(new_preview)
db.session.commit()
except Exception as e:
print(e)
return new_file
@staticmethod
def clear_missing_file():
files_list = File.query.all()
for file in files_list:
if not os.path.exists(os.path.join(app.config['UPLOAD_FOLDER'], file.url)):
for preview in file.previews:
db.session.delete(preview)
db.session.delete(file)
db.session.commit()
def __repr__(self):
return '<File %r>' % self.name
class Preview(db.Model):
__tablename__ = 'previews'
id = db.Column(db.Integer, primary_key=True)
# one-many: Preview.file-File.previews
bind_file_id = db.Column(db.Integer, db.ForeignKey('files.id'))
url = db.Column(db.String(512), unique=True)
size = db.Column(db.Integer)
def __repr__(self):
return '<Preview %r>' % self.nickname | [
"icrdr2010@gmail.com"
] | icrdr2010@gmail.com |
6ee7e72ba92ecde352fbe7130382ee1d2873e524 | d5f080543d3004f560c1ae636900080f1c7e8b31 | /configs/D2Det/D2Det_detection_r101_fpn_2x.py | 4e184d8220699043f302581714e52140c0c3b0ba | [
"MIT"
] | permissive | Randl/D2Det | dc7bd395b8c538e96f390d7ce5c396f87ee89bd8 | 5e35b218d9de824e73e0a49953af25a0c6984e74 | refs/heads/master | 2022-09-25T13:52:21.141590 | 2020-06-11T09:08:47 | 2020-06-11T09:08:47 | 271,498,684 | 0 | 0 | MIT | 2020-06-11T08:56:15 | 2020-06-11T08:56:15 | null | UTF-8 | Python | false | false | 5,685 | py | # model settings
model = dict(
type='D2Det',
pretrained='torchvision://resnet101',
backbone=dict(
type='ResNet',
depth=101,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(
type='DeformRoIPoolingPack',
out_size=7,
sample_per_part=1,
out_channels=256,
no_trans=False,
group_size=1,
trans_std=0.1),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
with_reg=False,
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0)),
reg_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
D2Det_head=dict(
type='D2DetHead',
num_convs=8,
in_channels=256,
norm_cfg=dict(type='GN', num_groups=36),
MASK_ON=False))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_radius=1,
pos_weight=-1,
max_num_grid=192,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.03, nms=dict(type='nms', iou_thr=0.5), max_per_img=125))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=1000,
warmup_ratio=1.0 / 80,
step=[20, 23])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 24
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/D2Det_detection_r101_fpn_2x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| [
"connor@tju.edu.cn"
] | connor@tju.edu.cn |
6907542e7974952c900a54a7451dffc120b1d850 | 719990ee24f8dbfc11024bb5f1ec22cd3b8b4c62 | /scrape.py | ead90be26583ec15b8e8d7b048ed45fdfff202d1 | [] | no_license | raymond-devries/usara-nationals | 790eed3d34a2f2ac2e74c141ae493c51d6eb50c3 | 1c9f82d686de730bde0296f40bf9d92a0ec78bbb | refs/heads/master | 2023-08-12T04:28:51.849196 | 2021-09-19T18:32:43 | 2021-09-19T18:32:43 | 405,495,892 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | from selenium import webdriver
import json
from selenium.webdriver.firefox.options import Options
def main():
options = Options()
options.headless = True
driver = webdriver.Firefox(options=options)
driver.get("https://adventureenablers.s3.amazonaws.com/Tracking/2021USARANationals/SI/index.html")
data = driver.execute_script("return getData(5)")
driver.quit()
with open("raw_data.json", "w") as f:
json.dump(data, f, ensure_ascii=False, indent=4)
if __name__ == '__main__':
main()
| [
"raymond.l.devries@gmail.com"
] | raymond.l.devries@gmail.com |
da12b13c74af1380f00c4a72cbbbc0e05debc10d | 0d68ecb5f8ad4577163550ffd48737ab1c677b38 | /src/blockit/utils/io.py | 5b7c9a0f6d90a5f199e92da246bd9384d94e578f | [
"MIT"
] | permissive | jgarte/blockit | 8372c35ea9d6ed14ab67b48de753e7dfc02cfc84 | e0311444701ac1a1d0fbec623f6ebc72f1b37e6b | refs/heads/main | 2023-05-31T04:59:43.541995 | 2021-06-21T14:48:30 | 2021-06-21T14:48:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 830 | py | """File I/O util functions."""
from pathlib import Path
from blockit.txn.txn_block import TransactionBlock
def get_project_root_path() -> Path:
"""Get project root path.
Returns:
Path: Absolute path of the project root
"""
return Path(__file__).parents[3].absolute()
def write_block(txn_block: TransactionBlock, path: Path = None) -> None:
"""Save transaction block.
Args:
txn_block (TransactionBlock): Transaction block to save
path (Path): Path to save file
"""
txn_ids = []
for txn in txn_block.transactions:
txn_ids.append(txn.txid)
if path is None:
save_path = get_project_root_path() / "block.txt"
else:
save_path = path
with open(save_path, "w") as f:
for txn_id in txn_ids:
f.write(f"{txn_id}\n")
| [
"ank@leoank.me"
] | ank@leoank.me |
627dc9d2396b751179bf4503d940b93c9c792dcf | b4ea78b8b33e2dee808290e8f87038108b12cf7b | /Python-learning/画图/others/test6.py | eec99faddab877ca9a2c0f07386452d0d66a70e3 | [] | no_license | liang2713020/Learning | d275ddfb8032d49f42143dc71bfd52fdeacb8932 | fbfdc12ce2877af4be020082885519334523c8ab | refs/heads/master | 2021-01-22T19:55:06.788211 | 2015-07-26T13:54:19 | 2015-07-26T13:54:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py |
from pylab import *
figure(figsize=(8,5), dpi=80)
subplot(111)
X = np.linspace(-np.pi, np.pi, 256,endpoint=True)
C,S = np.cos(X), np.sin(X)
plot(X, C, color="blue", linewidth=2.5, linestyle="-")
plot(X, S, color="red", linewidth=2.5, linestyle="-")
xlim(-4.0,4.0)
xticks(np.linspace(-4,4,9,endpoint=True))
ylim(-1.0,1.0)
yticks(np.linspace(-1,1,5,endpoint=True))
#savefig("../figures/exercice_3.png",dpi=72)
show()
| [
"568191222@qq.com"
] | 568191222@qq.com |
0e0b558e0962614dfcb87a6d486c3d9fdd1a129a | 7327dda3e2c72026bfe0de5185645fb24d0e3fe0 | /week2/iterative-sorting.py | b18c3fbefd43c624e00aa455a3487c7eacb86247 | [] | no_license | CarnunMP/CS-morning-challenges | 782b1774344361c69929ab1f0006f99ea7fe5abc | b1bb02d4130d3a4e0f6aa6cd28673f92982ea054 | refs/heads/master | 2021-01-03T06:19:33.004744 | 2020-02-25T21:55:07 | 2020-02-25T21:55:07 | 239,958,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,949 | py | ### Objective challenge:
### 1. Try writing a Python function to perform a linear search on a set of data.
### 2. Try writing a Python function to perform a binary search on a set of data.
### 3. Can you rewrite the above function so that it uses recursion?
test_data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
# 1)
def linear_search(arr, target):
steps = 1
for i, num in enumerate(arr):
if num == target:
return {
'index': i,
'steps': steps
}
steps += 1
return {
'index': None,
'steps': steps
}
print(linear_search(test_data, 10)) # Expect: {'index': 10, 'steps': 11}
# 2)
def binary_search(arr, target):
left_index = 0
right_index = len(arr) - 1
steps = 1
while left_index != right_index:
middle_index = left_index + ( (right_index - left_index) // 2 )
if arr[middle_index] == target:
return {
'index': middle_index,
'steps': steps
}
else:
if arr[middle_index] > target:
right_index = middle_index
else:
left_index = middle_index + 1
steps += 1
return {
'index': None,
'steps': steps
}
print(binary_search(test_data, 10))
# 3)
def recursive_binary_search(arr, target, left_index_offset = 0, steps = 1):
left_index = 0
right_index = len(arr) - 1
middle_index = left_index + ( (right_index - left_index) // 2 )
# Not sure if try-catch was the best way to handle targets which don't exist in arr, but it works!
try:
if arr[middle_index] == target:
return {
'index': left_index_offset + middle_index,
'steps': steps
}
elif arr[middle_index] > target:
return recursive_binary_search(arr[:middle_index], target, left_index_offset, steps + 1)
else:
return recursive_binary_search(arr[middle_index + 1:], target, left_index_offset + middle_index + 1, steps + 1)
except:
return {
'index': None,
'steps': steps
}
print(recursive_binary_search(test_data, 10))
print(recursive_binary_search(test_data, 20))
### Objective challenge:
### 1. What will the array [25, 67, 4, 33, 19, 40] look like after each pass of the Selection Sort algorithm?
### 2. What will the same array look like after each pass of the Insertion Sort algorithm?
# 1) 0th: [25, 67, 4, 33, 19, 40]
# 1st: [4, 67, 25, 33, 19, 40]
# 2nd: [4, 19, 25, 33, 67, 40]
# 3rd: [4, 19, 25, 33, 67, 40]
# 4th: [4, 19, 25, 33, 67, 40]
# 5th: [4, 19, 25, 33, 40, 67]
# 2) 0th: [25, 67, 4, 33, 19, 40]
# 1st: [25, 67, 4, 33, 19, 40]
# 2nd: [4, 25, 67, 33, 19, 40]
# 3rd: [4, 25, 33, 67, 19, 40]
# 4th: [4, 19, 25, 33, 67, 40]
# 5th: [4, 19, 25, 33, 40, 67]
| [
"carnun@hotmail.co.uk"
] | carnun@hotmail.co.uk |
16a3403ab8a7c97642874c0b8f630e03fc070931 | 2546d448f03a57152a701180077fcc904b1b944a | /schedule/urls.py | 8077ba2843cfce809db5893d0f5c814810d77fe0 | [] | no_license | NathanDai5287/Sharetrade | 61f52913591a404766654921c054663d83414a55 | 62a453364c0d97cf0b114e5286bfd0dc8fef44a5 | refs/heads/master | 2023-06-26T20:52:09.932366 | 2021-08-03T04:38:04 | 2021-08-03T04:38:04 | 387,053,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 847 | py | """schedule URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', include("users.urls"))
]
| [
"nathandai2000@gmail.com"
] | nathandai2000@gmail.com |
1b5264d22279cc7d5f53699e4a0c0adf326e2398 | 2836975ce5ee74397fb674bdfd04a164d00baafb | /main/migrations/0015_auto_20201124_1105.py | 54d7a4575c36c78dab5ae3e805029eaf089a4b61 | [] | no_license | tz01x/rental | 57aedf6677ead989a089999b4802a6975d62ce0c | 103491c76c62b71901d3f758f9b9af59d2270fe4 | refs/heads/master | 2023-08-22T14:45:18.125694 | 2021-09-30T01:58:46 | 2021-09-30T01:58:46 | 332,413,051 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | # Generated by Django 3.1.1 on 2020-11-24 05:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0014_auto_20201122_1658'),
]
operations = [
migrations.AddField(
model_name='property',
name='latlong',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='property',
name='thana',
field=models.CharField(blank=True, max_length=400, null=True, verbose_name='Thana'),
),
migrations.AlterField(
model_name='property',
name='area',
field=models.CharField(blank=True, max_length=400, null=True, verbose_name='District'),
),
]
| [
"abdur963rahman@gmil.com"
] | abdur963rahman@gmil.com |
aa7b59318cba778a709f76ed4f709ab1a5fa40e7 | cc6d9fb4a7c7235ff5985ef17f4a554f19a0263d | /apps/transactions/templatetags/filters.py | 9706794c65b1abd95312c80471d048127a3ae137 | [] | no_license | timohermans/rabo-overview | 6c210a73a68b17620ee8df0985b9b4e28200081c | 0baea9631ee504b63046459718ea1a255992a18d | refs/heads/main | 2023-08-05T19:54:30.751983 | 2021-09-11T18:17:53 | 2021-09-11T18:17:53 | 393,132,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,389 | py | from datetime import date
from typing import Any, Iterator, List
from dateutil.relativedelta import relativedelta
from django import template
from apps.transactions.models import Account, Transaction
register = template.Library()
@register.filter
def previous_month(source: date) -> date:
"""date - 1"""
return date(source.year, source.month, 1) - relativedelta(months=1)
@register.filter
def next_month(source: date) -> date:
"""date + 1"""
return date(source.year, source.month, 1) + relativedelta(months=1)
@register.filter
def to_date_string(source: date) -> str:
"""date string for month hrefs"""
return source.isoformat()
@register.filter
def receivers(accounts: List[Account]) -> List[Account]:
"""pulls out receivers from all accounts"""
return [a for a in accounts if a.is_user_owner is True]
@register.filter
def short_account_number(account_number: str) -> str:
"""long IBANs are way too hard to read"""
return f"{account_number[:2]}...{account_number[-4:]}"
@register.filter
def of_receiver(
transactions: List[Transaction], receiver: Account
) -> Iterator[Transaction]:
"""returns transactions of a user owned account"""
return (t for t in transactions if t.receiver == receiver)
@register.filter
def get(o: object, key: str) -> Any:
"""I want property access in templates!"""
return getattr(o, key)
| [
"timo.hermans@kabisa.nl"
] | timo.hermans@kabisa.nl |
1b20c5a22901e1d346f020449eeffb7621afe266 | 5f51fdeb5efc6cbcc0736957d2f16eddf9214671 | /python/mind_palace/product_ranker/prepare_data/integerize_clickstream.py | e724ec724f5bfb745e44fd81b6c4d2eb7a35091a | [] | no_license | thejusvm/learn-cascading | aa438e74f26b94a880ad04bb425092f5145612e3 | 1e0fd76f7f746e4c177661e40c5abd4fe081643f | refs/heads/master | 2021-09-14T17:12:29.879467 | 2018-03-01T15:32:05 | 2018-03-01T15:32:05 | 103,110,403 | 0 | 0 | null | 2017-09-11T08:32:29 | 2017-09-11T08:32:29 | null | UTF-8 | Python | false | false | 6,607 | py | import cPickle as pickle
import glob
import json
import numpy as np
import os
import pandas as pd
import time
from contextlib import closing
from functools import partial
from multiprocessing import Pool
import mind_palace.product_ranker.constants as CONST
from mind_palace.product_ranker.commons import init_attribute_dicts, generate_key
"""
Given a file containing the click through data with product attributes,
this file integerizes the data with different integer dictionary for each attribute.
It uses DictIntegerizer class to assign a unique integer for every unique value of the attribute.
TODO : this code currently instantiates a new DictIntegerizer for each attribute,
it needs to support taking a dict in the form of a pickled file and integerizing using it.
"""
def logBreak() :
print "------------------------------------------"
def integerize(attributes, attribute_dicts, products_attributes) :
attributes_integerized = []
for attribute in attributes :
attribute_dict = attribute_dicts[attribute]
if attribute in products_attributes :
attribute_val = products_attributes[attribute]
else :
attribute_val = CONST.MISSING_DATA_TEXT
attribute_integerized = attribute_dict.only_get(attribute_val, missing_val=CONST.DEFAULT_DICT_KEYS.index(CONST.MISSING_DATA_TEXT))
attributes_integerized.append(attribute_integerized)
return attributes_integerized
def get_exploded_columns(keys, field_name):
return map(lambda x : field_name + "_" + x, keys)
def add_to_row(row, attributes, attribute_vals, key_prefix):
for i in range(len(attributes)) :
attribute = attributes[i]
if len(attribute_vals) != 0 :
attribute_val = attribute_vals[i]
else :
attribute_val = []
row[generate_key(key_prefix, attribute)] = attribute_val
def cross_attribute_prefix(attributes, key_prefixes) :
keys = []
for attribute in attributes :
for key_prefix in key_prefixes :
keys.append(generate_key(key_prefix, attribute))
return keys
def integerize_single_val_column(df, column_name, new_column_prefix, attributes, attribute_dicts) :
integerize_single = lambda x: integerize(attributes, attribute_dicts, json.loads(x))
integerized_cols = df[column_name].apply(integerize_single)
for i in range(len(attributes)) :
attribute = attributes[i]
df[generate_key(new_column_prefix, attribute)] = integerized_cols.apply(lambda x : json.dumps(x[i]))
def integerize_multi_val_column(df, column_name, new_column_prefix, attributes, attribute_dicts) :
integerize_multiple = lambda y: np.array(map(lambda x: integerize(attributes, attribute_dicts, x), json.loads(y))).T
integerized_cols = df[column_name].apply(integerize_multiple)
for i in range(len(attributes)) :
attribute = attributes[i]
df[generate_key(new_column_prefix, attribute)] = integerized_cols.apply(lambda x : json.dumps(x[i].tolist() if len(x) > 0 else []))
def process_row(df, attributes, attribute_dicts):
integerize_single_val_column(df, "positiveProducts", CONST.POSITIVE_COL_PREFIX, attributes, attribute_dicts)
integerize_multi_val_column(df, "negativeProducts", CONST.NEGATIVE_COL_PREFIX, attributes, attribute_dicts)
integerize_multi_val_column(df, "pastClickedProducts", CONST.CLICK_COL_PRERFIX, attributes, attribute_dicts)
integerize_multi_val_column(df, "pastBoughtProducts", CONST.BOUGHT_COL_PREFIX, attributes, attribute_dicts)
def process_file(data_path,
attributes,
attribute_dicts):
df = pd.read_csv(data_path, sep="\t")
# df = df[df["findingMethod"].apply(lambda x: str(x).lower() == "search")]
df = df[df["findingMethod"].apply(lambda x: str(x).lower() == "search" or str(x).lower() == "organic")]
start = time.clock()
process_row(df, attributes, attribute_dicts)
attribute_keys = cross_attribute_prefix(attributes, CONST.OUTPUTS_PER_ATTRIBUTE)
necessaryKeys = ["timestamp"]
necessaryKeys += attribute_keys
data = df[necessaryKeys]
print "time taken by data preprocess : " + str(time.clock() - start)
return data
def get_attributedict_path(data_path):
return data_path + "/productdict.pickle"
def get_train_path(data_path):
return data_path + "/train.tsv"
def get_test_path(data_path):
return data_path + "/test.tsv"
def get_attributedict(data_path) :
with open(data_path, 'rb') as handle:
return pickle.load(handle)
def prepare_data(raw_data_path,
processed_data_path,
attributes,
attribute_dicts):
filenames = glob.glob(raw_data_path)
out_files = [processed_data_path + "/part-" + str(counter) for counter in range(len(filenames))]
io_files = zip(filenames, out_files)
with closing(Pool(processes=20)) as pool:
pool.map(partial(integerize_file, attributes, attribute_dicts), io_files)
return attribute_dicts
def integerize_file(attributes, attribute_dicts, io_file):
in_file, out_file = io_file
logBreak()
start = time.clock()
print "start file processing : " + in_file
pd = process_file(in_file, attributes, attribute_dicts)
print "end file processing : " + in_file + ", in " + str(time.clock() - start)
print out_file
start = time.clock()
pd.to_csv(out_file, sep="\t", index=False)
print "dumped content of " + in_file + " to " + out_file + " in " + str(time.clock() - start)
logBreak()
def integerize_clickstream(attributes, attribute_dicts, raw_data_path, output_path) :
prepare_data(raw_data_path, output_path, attributes, attribute_dicts)
if __name__ == '__main__' :
raw_data_path = "/Users/thejus/workspace/learn-cascading/data/sessionExplodeWithAttributes-201708.MOB.smaller" + "/part-*"
processed_data_path = "/Users/thejus/workspace/learn-cascading/data/sessionExplodeWithAttributes-201708.MOB.smaller.search.1"
os.makedirs(processed_data_path)
attributes = ["productId", "brand", "vertical"]
attribute_dicts = init_attribute_dicts(attributes, CONST.DEFAULT_DICT_KEYS)
dicts = integerize_clickstream(attributes, attribute_dicts, raw_data_path, processed_data_path)
product_dict_file = get_attributedict_path(processed_data_path)
start = time.clock()
with open(product_dict_file, 'w+b') as handle:
pickle.dump(dicts, handle, protocol=pickle.HIGHEST_PROTOCOL)
print "pickled attribute dicts into " + product_dict_file + " in " + str(time.clock() - start)
logBreak() | [
"thejus@flipkart.com"
] | thejus@flipkart.com |
1338076a2a3f108f9a4dc2d5342bb1e00f1c6a08 | bae29c2fb8eedd320bc881c2a22b70298ab0f38d | /icoder/settings.py | 967f1581a2d331e5cd6bb00382882744e5558c5f | [] | no_license | SourabhRishabhMishra/icoder | 1527604df1f93f04bc58c4471555381837da296d | 6f4e279c1e31e99e91fbd7c091c9e3088cc1d2e5 | refs/heads/master | 2022-12-07T05:47:02.222021 | 2020-08-19T01:20:32 | 2020-08-19T01:20:32 | 288,088,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,353 | py | """
Django settings for icoder project.
Generated by 'django-admin startproject' using Django 3.0.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4nf#h&87fk-6=prj*#-3tns#4jl#qls79q79ntbw62n42esed^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'home.apps.HomeConfig',
'blog.apps.BlogConfig',
'django.contrib.humanize',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'icoder.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'icoder.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR,"static"),
]
MESSAGE_TAGS = {
messages.ERROR:'danger'
} | [
"sourabhm384@gmail.com"
] | sourabhm384@gmail.com |
5e3d8619d84b4b1e96647c74f4f542131e866de3 | 0dfa9dc572ea50ae81de8052f4d2ac79bb31a243 | /test_incorrect_ip_address_managers.py | 6feda65aa49efdb3e75d6789fac86e0fa77e7db8 | [] | no_license | dwjhaines/selenium | 6b52c9c1d02d6088695c49fc8a4dfc2d3323206f | 44e0e1285be92013e6d8d2cece7419431ac3f4e3 | refs/heads/master | 2020-04-17T19:45:58.070917 | 2016-09-14T14:53:40 | 2016-09-14T14:53:40 | 66,008,161 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,416 | py | ###############################################################################################
# #
# test_incorrect_ip_address_managers.py #
# #
# Tests that up to five managers can log in when the only license has an incorrect IP #
# address. #
# #
###############################################################################################
import time
import um_utils
import db_utils
from selenium import webdriver
import pyodbc
if __name__ == "__main__":
# List of managers i.e. users with manager rights
managers = ['maria.a', 'maria.b', 'maria.c', 'maria.d', 'maria.e', 'maria.f', 'maria.g']
# Empty list to be filled with user objects
users = []
testFailed = 0
# Set up connection to database
connection = db_utils.connectToDb()
cur = connection.cursor()
# Delete all existing licenses
db_utils.deleteAllLicenses(connection, cur)
maxUsers = 0
maxManagers = maxUsers + 5
# Install license with and incorrect IP address
maxUsers = db_utils.addUserLicenseIncorrectIPAddress (connection, cur)
print 'License installed with invalid IP address'
# Get the number of users already logged in
count = db_utils.getNumberOfActiveUsers(connection, cur)
print 'Max users allowed: %d' % maxUsers
print 'Max managers allowed: %d' % maxManagers
print 'Number of users already logged in: %d' % count
print 'Opening browsers........'
for manager in managers:
# For each manager, create a user object and add object to users list
users.append(um_utils.user(manager, 'quantel@'))
# Keep trying to log in each of the editors. Once the max number of users have been logged in, no further logins should be allowed.
for user in users:
result = um_utils.login(user)
if (result == 0 or result == 1):
user.loggedin = True
count = db_utils.getNumberOfActiveUsers(connection, cur)
print '\tNumber of active users (max: %d): %d' % (maxManagers, count)
if (count > maxManagers):
testFailed = 1
print 'Test Failed: Max number of users exceded.'
print 'Sleeping for 10 secs.................'
time.sleep( 10 )
# Log out any users that were logged in and close all the browsers
for user in users:
if (user.loggedin == True):
um_utils.logout(user)
user.loggedin = False
time.sleep( 1 )
um_utils.closeBrowser(user)
# Delete incorrect license and reinstall license for five users
db_utils.deleteAllLicenses(connection, cur)
maxUsers = db_utils.addFiveUserLicense(connection, cur)
print 'License installed for %d users' % maxUsers
# Close connection to database
db_utils.closeConnection(connection, cur)
# Print test result
if (testFailed == 1):
print '************ Test Failed ************'
else:
print '************ Test Passed ************' | [
"David.Haines@s-a-m.com"
] | David.Haines@s-a-m.com |
7d812592e10d2a0d003e3156aef68f26c0796648 | 601adbb343313e7cce71b9b8d06620f541f349e5 | /tests/test_ci/test_runners/test_BaseRunner.py | 4545078bf38683e3c939099329a8ad2f0d27d15f | [] | no_license | jgsogo/conan-sword-and-sorcery | f3ff2c9b739410a7fb6eb97c49470d585fd1ab4c | 143f05d8b469a3afc9c807ec87fbe2dcbe63dab3 | refs/heads/master | 2021-04-06T06:23:40.584031 | 2018-08-15T16:50:43 | 2018-08-15T16:50:43 | 124,441,534 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,120 | py | # -*- coding: utf-8 -*-
import os
import unittest
try:
from unittest import mock
except ImportError:
import mock
from conan_sword_and_sorcery.ci.runners import AppveyorRunner
from conan_sword_and_sorcery.ci.runners.base_runner import SUCCESS, FAIL, DRY_RUN, BaseRunner
from conan_sword_and_sorcery.parsers.settings import get_settings
from conan_sword_and_sorcery.utils.environ import context_env
from conan_sword_and_sorcery.parsers.profile import profile_for
from tests.utils import TestCaseEnvClean
class JobGeneratorClass4Testing:
def __init__(self, *args, **kwargs):
pass
class BaseRunner4Testing(BaseRunner):
job_generator_class = JobGeneratorClass4Testing
class TestBaseRunnerStableBranch(TestCaseEnvClean):
def setUp(self):
self.settings = get_settings()
# Dummy (but valid) conanfile
me = os.path.dirname(__file__)
self.conanfile = os.path.join(me, '..', '..', 'files', 'single', 'conanfile01.py')
def test_enumerate_jobs(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with context_env(CONAN_VISUAL_VERSIONS="12", CONAN_VISUAL_RUNTIMES="MT"):
self.assertTrue(len(list(runner.enumerate_jobs())) != 0)
def test_is_pull_request(self):
runner = BaseRunner4Testing(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with self.assertRaises(NotImplementedError):
runner.is_pull_request()
def test_get_branch_name(self):
runner = BaseRunner4Testing(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with self.assertRaises(NotImplementedError):
runner.get_branch_name()
def test_dry_run(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows", dry_run=True)
with context_env(CONAN_GCC_VERSIONS="6", CONAN_ARCHS='x86', CONAN_BUILD_PACKAGES='pckg1'):
compiler, options = list(runner.enumerate_jobs())[0]
with profile_for(compiler=compiler) as profile_file:
runner.set_compiler(compiler)
runner.set_profile(profile_file)
r = runner.run(options={'shared': True}, username='test', channel='testing')
self.assertEqual(r, DRY_RUN)
def test_run_fail(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with context_env(CONAN_GCC_VERSIONS="6", CONAN_ARCHS='x86', CONAN_BUILD_PACKAGES='pckg1'):
compiler, options = list(runner.enumerate_jobs())[0]
with profile_for(compiler=compiler) as profile_file:
runner.set_compiler(compiler)
runner.set_profile(profile_file)
with mock.patch('conan_sword_and_sorcery.ci.runners.base_runner.cmd', return_value=1) as mocked_cmd:
r = runner.run(options={'shared': True}, username='test', channel='testing')
self.assertEqual(r, FAIL)
def test_run_success(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with context_env(CONAN_GCC_VERSIONS="6", CONAN_ARCHS='x86', CONAN_BUILD_PACKAGES='pckg1'):
compiler, options = list(runner.enumerate_jobs())[0]
with profile_for(compiler=compiler) as profile_file:
runner.set_compiler(compiler)
runner.set_profile(profile_file)
with mock.patch('conan_sword_and_sorcery.ci.runners.base_runner.cmd', return_value=0) as mocked_cmd:
r = runner.run(options={'shared': True}, username='test', channel='testing')
self.assertEqual(r, SUCCESS)
args, kwargs = mocked_cmd.call_args
self.assertEqual(len(args), 0) # All arguments are passed with name
self.assertEqual(kwargs['exception'], None)
command = kwargs.get('command')
self.assertIn('--build=pckg1', command)
self.assertIn('--build=outdated', command)
self.assertIn('--build={}'.format(runner.recipe.name), command)
self.assertIn('--profile {}'.format(profile_file), command)
self.assertIn('-o {}:shared=True'.format(runner.recipe.name), command)
def test_is_upload_requested(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="True", APPVEYOR_REPO_BRANCH='non-stable-branch'):
self.assertFalse(runner.is_stable_branch())
self.assertFalse(runner.is_upload_requested())
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="False", APPVEYOR_REPO_BRANCH='non-stable-branch'):
self.assertFalse(runner.is_stable_branch())
self.assertTrue(runner.is_upload_requested())
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="False", APPVEYOR_REPO_BRANCH='stable/v1.2.3'):
self.assertTrue(runner.is_stable_branch())
self.assertTrue(runner.is_upload_requested())
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="True", APPVEYOR_REPO_BRANCH='stable/v1.2.3'):
self.assertTrue(runner.is_stable_branch())
self.assertTrue(runner.is_upload_requested())
def test_upload(self):
runner = AppveyorRunner(conanfile=self.conanfile, settings=self.settings, osys="Windows")
with mock.patch('conan_sword_and_sorcery.ci.runners.base_runner.upload', return_value=0) as mocked_upload:
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="True", APPVEYOR_REPO_BRANCH='non-stable-branch'):
runner.upload(username='test', channel='testing')
with context_env(CONAN_UPLOAD_ONLY_WHEN_STABLE="False", APPVEYOR_REPO_BRANCH='non-stable-branch'):
runner.upload(username='test', channel='testing')
args, kwargs = mocked_upload.call_args
self.assertEqual(kwargs['username'], 'test')
| [
"jgsogo@gmail.com"
] | jgsogo@gmail.com |
d8ca730c49e849faef22bb61d6e7c1ea1853c890 | 694d57c3e512ce916269411b51adef23532420cd | /python/chapter-1/lab4-exec1.2.py | 00e1dd5356363c18fb8e1045f63f53286f0a515a | [] | no_license | clovery410/mycode | 5541c3a99962d7949832a0859f18819f118edfba | e12025e754547d18d5bb50a9dbe5e725fd03fd9c | refs/heads/master | 2021-05-16T02:46:47.996748 | 2017-05-10T23:43:50 | 2017-05-10T23:43:50 | 39,235,141 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | def gcb_recur(a, b):
smaller_para = min(a, b)
larger_para = max(a, b)
remainder = larger_para % smaller_para
if smaller_para % remainder == 0:
return remainder
return gcb_recur(smaller_para, remainder)
print(gcb_recur(50, 35))
def gcb_itera(a, b):
smaller_para = min(a, b)
larger_para = max(a, b)
remainder = larger_para % smaller_para
while not smaller_para % remainder == 0:
smaller_para, remainder = remainder, smaller_para % remainder
return remainder
print(gcb_itera(50, 35))
| [
"admin@admins-MacBook-Air.local"
] | admin@admins-MacBook-Air.local |
3b98e43e2f3dc2377b74432e9fe99c572da37f2a | 4904acd900496b4883c2f5b4aa6b45d1ef6654c0 | /graphgallery/gallery/nodeclas/tensorflow/__init__.py | 1cf21d123e086ed846bcb034e8d4271c9735498d | [
"MIT"
] | permissive | blindSpoter01/GraphGallery | aee039edd759be9272d123463b0ad73a57e561c7 | e41caeb32a07da95364f15b85cad527a67763255 | refs/heads/master | 2023-06-17T11:42:27.169751 | 2021-07-15T03:07:39 | 2021-07-15T03:07:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 782 | py | from .gcn import GCN
from .gat import GAT
from .clustergcn import ClusterGCN
from .sgc import SGC
from .gwnn import GWNN
from .robustgcn import RobustGCN
from .graphsage import GraphSAGE
from .fastgcn import FastGCN
from .chebynet import ChebyNet
from .densegcn import DenseGCN
from .lgcn import LGCN
from .BVAT.obvat import OBVAT
from .BVAT.sbvat import SBVAT
from .gmnn import GMNN
from .dagnn import DAGNN
from .mlp import MLP
from .tagcn import TAGCN
from .appnp import APPNP, PPNP
from .ssgc import SSGC
from .agnn import AGNN
from .arma import ARMA
# experimental model
from .experimental.edgeconv import EdgeGCN
from .experimental.s_obvat import SimplifiedOBVAT
from .experimental.gcn_mix import GCN_MIX
from .experimental.gcna import GCNA
from .experimental.sat import SAT
| [
"cnljt@outlook.com"
] | cnljt@outlook.com |
6aef4706708cb0d55ce4d56b5e7fcbfcba763ea4 | 5a1d08aac9ed0c730e4f97b0e766c6763cfaab1f | /gb_chat/common/ui_keyboard_interrupt_helper.py | f393750b04ea94f8e2bfb5f048664114872d7b25 | [
"Apache-2.0"
] | permissive | Cerzon/gb_chat | 0b2965e046bcf4d832fb398361271d8eae19e50f | b4f8a6bf62b0971a135fbb2083456193f7a816cb | refs/heads/main | 2023-04-24T12:26:44.142068 | 2021-05-03T14:52:01 | 2021-05-03T14:52:01 | 360,984,777 | 0 | 0 | Apache-2.0 | 2021-05-03T14:52:02 | 2021-04-23T19:22:17 | Python | UTF-8 | Python | false | false | 685 | py | """
This solution is taken from https://coldfix.de/2016/11/08/pyqt-boilerplate/#keyboardinterrupt-ctrl-c
"""
import signal
from typing import Callable
from PyQt5.QtCore import QCoreApplication, QTimer
def _interrupt_handler(app: QCoreApplication) -> None:
app.quit()
def _safe_timer(timeout: int, fun: Callable[[], None]) -> None:
def timer_event() -> None:
try:
fun()
finally:
QTimer.singleShot(timeout, timer_event)
QTimer.singleShot(timeout, timer_event)
def setup_interrupt_handling(app: QCoreApplication) -> None:
signal.signal(signal.SIGINT, lambda *args: _interrupt_handler(app))
_safe_timer(50, lambda: None)
| [
"derlih@gmail.com"
] | derlih@gmail.com |
7d6e7442b32fe58141787e6063cf7b0ae35a74b7 | d49fbd7874b70a93cbc551afed1b87e3e47617a8 | /django/example/repositories/__init__.py | 1efb28043ae95783f5bde83b3415bcedaf028594 | [] | no_license | gitter-badger/tutorials-4 | bbdbb673e978118f9fec3212baa13f6f99226be0 | 3ce1cdb7c6d26f6df4d6bb94e82f83e8cab9389b | refs/heads/master | 2020-04-04T20:52:28.181616 | 2018-10-28T22:05:17 | 2018-10-28T22:05:17 | 156,264,177 | 0 | 0 | null | 2018-11-05T18:32:17 | 2018-11-05T18:32:16 | null | UTF-8 | Python | false | false | 528 | py | from .category import load_categories, load_category # noqa
from .entry import load_entries # noqa
from .notification import create_notification, load_notifications # noqa
from .price import ( # noqa
cheapest_price_by_category,
load_price,
prices_for_category,
)
from .profile import ( # noqa
add_balance,
create_profile,
del_balance,
load_profile,
save_profile,
)
from .subscription import create_subscription, load_subscription # noqa
from .user import create_user, save_password # noqa
| [
"proofit404@gmail.com"
] | proofit404@gmail.com |
4b4b5fbf0fcd97b37a90c5dc2ac660d862ce075b | d5a947bf9819e039f7238e61233c3bfab505deeb | /resume/models.py | d9a053342ec016c9bf06a472b188404a8bdfe82a | [] | no_license | P-Tanifor/JobSite | 09e9c30f3682f16fa125dce587b03e4e97e59e28 | afc5b0acd3a6e81fe96f7486a661705fa86b933e | refs/heads/main | 2023-09-03T12:43:00.162298 | 2021-10-29T11:32:45 | 2021-10-29T11:32:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 287 | py | from django.db import models
from django.contrib.auth.models import User
import django
# Create your models here.
class Resume(models.Model):
description = models.CharField(max_length=1024)
author = models.ForeignKey(django.contrib.auth.models.User, on_delete=models.CASCADE)
| [
"ptanifor@gmail.com"
] | ptanifor@gmail.com |
697a66de3c22d6e8c4704790081528d98a614067 | adc531efc839ec0fc8e67504e5429ad7696c57cc | /API_Article/migrations/0037_auto_20210430_2214.py | 9066067fd23b7e1551788489d86436814bad5e8b | [] | no_license | huynguyen-py/GraduateBackendAPI | 1521db57947804d4b2342060632a4ecf637993cd | bdfb25ae96fd1165ce431be48c03d80b73d32de8 | refs/heads/main | 2023-05-07T00:04:24.866230 | 2021-06-02T05:03:06 | 2021-06-02T05:03:06 | 372,700,747 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | # Generated by Django 3.1.7 on 2021-04-30 15:14
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('API_Article', '0036_auto_20210316_0944'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='content_cmt',
field=models.TextField(blank=True, default='Body_comment', null=True),
),
migrations.AlterField(
model_name='comment',
name='create_date_cmt',
field=models.DateTimeField(default=datetime.datetime(2021, 4, 30, 22, 13, 42, 6285)),
),
]
| [
"iamhuynguyen1002@gmail.com"
] | iamhuynguyen1002@gmail.com |
0a19884a78e49b4f205f1efa1f54e90fe911ff31 | a1e2d31682b80aca10bbcd25db550419e04e71e5 | /semesters/apps.py | 1b7401041ba6e4bb8cb6e06093f6fd800242272b | [] | no_license | letzzBuild/ElectiveAPI | 712b1043c533eb00657f0cb481a7fcdbc47bb376 | e89d3af596ae2898e4480f8b380d46e13cd6338d | refs/heads/main | 2023-08-14T16:37:45.390654 | 2021-09-26T07:57:13 | 2021-09-26T07:57:13 | 379,562,536 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 93 | py | from django.apps import AppConfig
class SemestersConfig(AppConfig):
name = 'semesters'
| [
"letzzBuild@gmail.com"
] | letzzBuild@gmail.com |
16a35cd3db0fd05415676003f38fabb5303dec8d | 6f9170a35fa5d758bec62c9a9be21fae11b6783f | /desafio027.py | 06d929be683134e41c754ae05dd54637ce1078f0 | [] | no_license | alineat/python-exercicios | 11e227ceadbff0e997e2ed427f54a721a5e76e9e | 9557dea8d057ded63e2abbed458d00660d9e8c4e | refs/heads/master | 2020-07-03T15:09:40.575431 | 2019-08-12T14:37:00 | 2019-08-12T14:37:00 | 201,947,953 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | # Faça um programa que leia o nome completo de uma pessoa, mostrando em seguida o primeiro e o último nome separadamente
nome = str(input('Nome completo: ')).strip()
dividido = nome.split()
print('Primeiro nome: {}.\nSegundo nome: {}'
'.'.format(dividido [0], dividido[len(dividido)-1])) | [
"aline_atsuta@hotmail.com"
] | aline_atsuta@hotmail.com |
ccea4c2d3b3dedfd336b6570dafd6f1cbb2e431c | bf79aba1e47566d06fd9a7096a1d2dbbaf228748 | /detect.py | 26b305d7faa0cf22c154c6cb4cc9c78bbbeb2413 | [] | no_license | SubinMs/smartPrice | b998e315d8ffe610a75e7164f08fdf78000fb954 | d21b9d991fa513bb08ac097a36e905ba2563cc1c | refs/heads/master | 2020-09-14T16:28:47.955172 | 2019-11-21T17:16:46 | 2019-11-21T17:16:46 | 223,184,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,201 | py | import io, os
from numpy import random
from google.cloud import vision
from Pillow_Utility import draw_borders, Image
import pandas as pd
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = r"GoogleCloudDemo_ServiceAcct_Token.json"
client = vision.ImageAnnotatorClient()
img_list = os.listdir('./images')
#file_name = 'image_name.jpg'
file_name = img_list[0]
image_path = os.path.join('./images', file_name)
save_path = os.path.join('./test_images/')
static_path = os.path.join('./static/result_img/')
with io.open(image_path, 'rb') as image_file:
content = image_file.read()
image = vision.types.Image(content=content)
response = client.object_localization(image=image)
localized_object_annotations = response.localized_object_annotations
pillow_image = Image.open(image_path)
df = pd.DataFrame(columns=['name', 'score'])
img_size = list(pillow_image.size)
width = img_size[0]
height = img_size[1]
ob = 0
for obj in localized_object_annotations:
df = df.append(
dict(
name=obj.name,
score=obj.score
),
ignore_index=True)
if (obj.name=='Mobile phone') :
vr = dict(ld_x=obj.bounding_poly.normalized_vertices[0].x * width,ld_y=obj.bounding_poly.normalized_vertices[0].y * height,
ru_x=obj.bounding_poly.normalized_vertices[2].x * width,ru_y=obj.bounding_poly.normalized_vertices[2].y * height)
leftDown_x = int(vr['ld_x'])
leftDown_y = int(vr['ld_y'])
rightup_x = int(vr['ru_x'])
rightup_y = int(vr['ru_y'])
ob = ob + 1
con = str(ob)
im = Image.open('images/'+file_name)
crp = im.crop((leftDown_x,leftDown_y,rightup_x,rightup_y))
crp.show()
crp.save(save_path+'img_'+con+'.jpg',format='JPEG')
crp.save(static_path+'img_'+con+'.jpg',format='JPEG')
#end if
r, g, b = random.randint(150, 255), random.randint(
150, 255), random.randint(150, 255)
draw_borders(pillow_image, obj.bounding_poly, (r, g, b),
pillow_image.size, obj.name, obj.score)
#end for
#os.remove(image_path)
| [
"noreply@github.com"
] | noreply@github.com |
602ecb7bb83ddd5c367c45eeaec4531e135d6824 | f87dc2227f9539ce9f87b8eb417d28f487ea2eac | /이진탐색/부품찾기.py | b3627efacbce4f210bf7ebc9dc2784e06dd4977a | [] | no_license | jjangsungwon/python-for-coding-test | fb1e019a2e68e426bb4f6770bffdc6289a647b4a | 8d9bf8de5de2a9724f75b35ea04dd9bcc40dec86 | refs/heads/master | 2022-12-16T02:53:55.967070 | 2020-08-26T08:41:14 | 2020-08-26T08:41:14 | 285,842,867 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 895 | py | def binary_search(target, start, end):
if start > end:
return None
while start <= end:
mid = (start + end) // 2
if array[mid] == target: # 일치
return "yes"
elif array[mid] > target: # 중간값이 찾고자 하는 값보다 클 때
end = mid - 1
else:
start = mid + 1
return None # 일치하는 값이 없을 때
if __name__ == "__main__":
# 입력
N = int(input())
array = list(map(int, input().split()))
M = int(input())
find = list(map(int, input().split()))
# 이진 탐색을 하기 위해서 정렬
array.sort()
# find에서 값을 하나씩 읽는다.
for data in find:
# 이진 탐색
result = binary_search(data, 0, N - 1)
if result is not None:
print('yes', end=" ")
else:
print('no', end=" ")
| [
"dnjs2113@gmail.com"
] | dnjs2113@gmail.com |
5c09d311aad75b9bd3cd0f7997527b7e8fa604b9 | 457a1baf3a9afc365d53e955db5ccbef6a9f636b | /morphenepython/blockchain.py | 78f87d70667fda7d3020bdbb096dc6dc9ad2baea | [
"MIT"
] | permissive | morphene/morphene-python | 5bf9cb3cbc4a081297e26269c398192e12433072 | 7c6144c7337330490229ce69b8c3fb5dc2e3d08e | refs/heads/master | 2021-07-03T07:43:16.118829 | 2019-05-30T19:38:49 | 2019-05-30T19:38:49 | 189,285,305 | 0 | 0 | NOASSERTION | 2020-10-27T21:47:05 | 2019-05-29T19:22:41 | Python | UTF-8 | Python | false | false | 42,412 | py | # This Python file uses the following encoding: utf-8
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from future.utils import python_2_unicode_compatible
from builtins import str
from builtins import range
from builtins import object
import sys
import time
import hashlib
import json
import math
from threading import Thread, Event
from time import sleep
import logging
from datetime import datetime, timedelta
from .utils import formatTimeString, addTzInfo
from .block import Block
from morphenepythonapi.node import Nodes
from morphenepythonapi.morphenenoderpc import MorpheneNodeRPC
from .exceptions import BatchedCallsNotSupported, BlockDoesNotExistsException, BlockWaitTimeExceeded, OfflineHasNoRPCException
from morphenepythonapi.exceptions import NumRetriesReached
from morphenepythongraphenebase.py23 import py23_bytes
from morphenepython.instance import shared_morphene_instance
from .amount import Amount
import morphenepython as mph
log = logging.getLogger(__name__)
if sys.version_info < (3, 0):
from Queue import Queue
else:
from queue import Queue
FUTURES_MODULE = None
if not FUTURES_MODULE:
try:
from concurrent.futures import ThreadPoolExecutor, wait, as_completed
FUTURES_MODULE = "futures"
# FUTURES_MODULE = None
except ImportError:
FUTURES_MODULE = None
# default exception handler. if you want to take some action on failed tasks
# maybe add the task back into the queue, then make your own handler and pass it in
def default_handler(name, exception, *args, **kwargs):
log.warn('%s raised %s with args %s and kwargs %s' % (name, str(exception), repr(args), repr(kwargs)))
pass
class Worker(Thread):
"""Thread executing tasks from a given tasks queue"""
def __init__(self, name, queue, results, abort, idle, exception_handler):
Thread.__init__(self)
self.name = name
self.queue = queue
self.results = results
self.abort = abort
self.idle = idle
self.exception_handler = exception_handler
self.daemon = True
self.start()
def run(self):
"""Thread work loop calling the function with the params"""
# keep running until told to abort
while not self.abort.is_set():
try:
# get a task and raise immediately if none available
func, args, kwargs = self.queue.get(False)
self.idle.clear()
except:
# no work to do
# if not self.idle.is_set():
# print >> stdout, '%s is idle' % self.name
self.idle.set()
# time.sleep(1)
continue
try:
# the function may raise
result = func(*args, **kwargs)
# print(result)
if(result is not None):
self.results.put(result)
except Exception as e:
# so we move on and handle it in whatever way the caller wanted
self.exception_handler(self.name, e, args, kwargs)
finally:
# task complete no matter what happened
self.queue.task_done()
# class for thread pool
class Pool:
"""Pool of threads consuming tasks from a queue"""
def __init__(self, thread_count, batch_mode=True, exception_handler=default_handler):
# batch mode means block when adding tasks if no threads available to process
self.queue = Queue(thread_count if batch_mode else 0)
self.resultQueue = Queue(0)
self.thread_count = thread_count
self.exception_handler = exception_handler
self.aborts = []
self.idles = []
self.threads = []
def __del__(self):
"""Tell my threads to quit"""
self.abort()
def run(self, block=False):
"""Start the threads, or restart them if you've aborted"""
# either wait for them to finish or return false if some arent
if block:
while self.alive():
sleep(1)
elif self.alive():
return False
# go start them
self.aborts = []
self.idles = []
self.threads = []
for n in range(self.thread_count):
abort = Event()
idle = Event()
self.aborts.append(abort)
self.idles.append(idle)
self.threads.append(Worker('thread-%d' % n, self.queue, self.resultQueue, abort, idle, self.exception_handler))
return True
def enqueue(self, func, *args, **kargs):
"""Add a task to the queue"""
self.queue.put((func, args, kargs))
def join(self):
"""Wait for completion of all the tasks in the queue"""
self.queue.join()
def abort(self, block=False):
"""Tell each worker that its done working"""
# tell the threads to stop after they are done with what they are currently doing
for a in self.aborts:
a.set()
# wait for them to finish if requested
while block and self.alive():
sleep(1)
def alive(self):
"""Returns True if any threads are currently running"""
return True in [t.is_alive() for t in self.threads]
def idle(self):
"""Returns True if all threads are waiting for work"""
return False not in [i.is_set() for i in self.idles]
def done(self):
"""Returns True if not tasks are left to be completed"""
return self.queue.empty()
def results(self, sleep_time=0):
"""Get the set of results that have been processed, repeatedly call until done"""
sleep(sleep_time)
results = []
try:
while True:
# get a result, raises empty exception immediately if none available
results.append(self.resultQueue.get(False))
self.resultQueue.task_done()
except:
return results
return results
@python_2_unicode_compatible
class Blockchain(object):
""" This class allows to access the blockchain and read data
from it
:param MorpheneClient morphene_instance: MorpheneClient instance
:param str mode: (default) Irreversible block (``irreversible``) or
actual head block (``head``)
:param int max_block_wait_repetition: maximum wait repetition for next block
where each repetition is block_interval long (default is 3)
This class let's you deal with blockchain related data and methods.
Read blockchain related data:
.. testsetup::
from morphenepython.blockchain import Blockchain
chain = Blockchain()
Read current block and blockchain info
.. testcode::
print(chain.get_current_block())
print(chain.morphene.info())
Monitor for new blocks. When ``stop`` is not set, monitoring will never stop.
.. testcode::
blocks = []
current_num = chain.get_current_block_num()
for block in chain.blocks(start=current_num - 99, stop=current_num):
blocks.append(block)
len(blocks)
.. testoutput::
100
or each operation individually:
.. testcode::
ops = []
current_num = chain.get_current_block_num()
for operation in chain.ops(start=current_num - 99, stop=current_num):
ops.append(operation)
"""
def __init__(
self,
morphene_instance=None,
mode="irreversible",
max_block_wait_repetition=None,
data_refresh_time_seconds=900,
):
self.morphene = morphene_instance or shared_morphene_instance()
if mode == "irreversible":
self.mode = 'last_irreversible_block_num'
elif mode == "head":
self.mode = "head_block_number"
else:
raise ValueError("invalid value for 'mode'!")
if max_block_wait_repetition:
self.max_block_wait_repetition = max_block_wait_repetition
else:
self.max_block_wait_repetition = 3
self.block_interval = self.morphene.get_block_interval()
def is_irreversible_mode(self):
return self.mode == 'last_irreversible_block_num'
def get_transaction(self, transaction_id):
""" Returns a transaction from the blockchain
:param str transaction_id: transaction_id
"""
if not self.morphene.is_connected():
raise OfflineHasNoRPCException("No RPC available in offline mode!")
self.morphene.rpc.set_next_node_on_empty_reply(False)
ret = self.morphene.rpc.get_transaction(transaction_id, api="database")
return ret
def get_transaction_hex(self, transaction):
""" Returns a hexdump of the serialized binary form of a transaction.
:param dict transaction: transaction
"""
if not self.morphene.is_connected():
raise OfflineHasNoRPCException("No RPC available in offline mode!")
self.morphene.rpc.set_next_node_on_empty_reply(False)
ret = self.morphene.rpc.get_transaction_hex(transaction, api="database")
return ret
def get_current_block_num(self):
""" This call returns the current block number
.. note:: The block number returned depends on the ``mode`` used
when instantiating from this class.
"""
props = self.morphene.get_dynamic_global_properties(False)
if props is None:
raise ValueError("Could not receive dynamic_global_properties!")
if self.mode not in props:
raise ValueError(self.mode + " is not in " + str(props))
return int(props.get(self.mode))
def get_current_block(self, only_ops=False, only_virtual_ops=False):
""" This call returns the current block
:param bool only_ops: Returns block with operations only, when set to True (default: False)
:param bool only_virtual_ops: Includes only virtual operations (default: False)
.. note:: The block number returned depends on the ``mode`` used
when instantiating from this class.
"""
return Block(
self.get_current_block_num(),
only_ops=only_ops,
only_virtual_ops=only_virtual_ops,
morphene_instance=self.morphene
)
def get_estimated_block_num(self, date, estimateForwards=False, accurate=True):
""" This call estimates the block number based on a given date
:param datetime date: block time for which a block number is estimated
.. note:: The block number returned depends on the ``mode`` used
when instantiating from this class.
"""
last_block = self.get_current_block()
date = addTzInfo(date)
if estimateForwards:
block_offset = 10
first_block = Block(block_offset, morphene_instance=self.morphene)
time_diff = date - first_block.time()
block_number = math.floor(time_diff.total_seconds() / self.block_interval + block_offset)
else:
time_diff = last_block.time() - date
block_number = math.floor(last_block.identifier - time_diff.total_seconds() / self.block_interval)
if block_number < 1:
block_number = 1
if accurate:
if block_number > last_block.identifier:
block_number = last_block.identifier
block_time_diff = timedelta(seconds=10)
while block_time_diff.total_seconds() > self.block_interval or block_time_diff.total_seconds() < -self.block_interval:
block = Block(block_number, morphene_instance=self.morphene)
block_time_diff = date - block.time()
delta = block_time_diff.total_seconds() // self.block_interval
if delta == 0 and block_time_diff.total_seconds() < 0:
delta = -1
elif delta == 0 and block_time_diff.total_seconds() > 0:
delta = 1
block_number += delta
if block_number < 1:
break
if block_number > last_block.identifier:
break
return int(block_number)
def block_time(self, block_num):
""" Returns a datetime of the block with the given block
number.
:param int block_num: Block number
"""
return Block(
block_num,
morphene_instance=self.morphene
).time()
def block_timestamp(self, block_num):
""" Returns the timestamp of the block with the given block
number as integer.
:param int block_num: Block number
"""
block_time = Block(
block_num,
morphene_instance=self.morphene
).time()
return int(time.mktime(block_time.timetuple()))
def blocks(self, start=None, stop=None, max_batch_size=None, threading=False, thread_num=8, only_ops=False, only_virtual_ops=False):
""" Yields blocks starting from ``start``.
:param int start: Starting block
:param int stop: Stop at this block
:param int max_batch_size: When not None, batch calls of are used.
Cannot be combined with threading
:param bool threading: Enables threading. Cannot be combined with batch calls
:param int thread_num: Defines the number of threads, when `threading` is set.
:param bool only_ops: Only yield operations (default: False).
Cannot be combined with ``only_virtual_ops=True``.
:param bool only_virtual_ops: Only yield virtual operations (default: False)
.. note:: If you want instant confirmation, you need to instantiate
class:`morphenepython.blockchain.Blockchain` with
``mode="head"``, otherwise, the call will wait until
confirmed in an irreversible block.
"""
# Let's find out how often blocks are generated!
current_block = self.get_current_block()
current_block_num = current_block.block_num
if not start:
start = current_block_num
head_block_reached = False
if threading and FUTURES_MODULE is not None:
pool = ThreadPoolExecutor(max_workers=thread_num)
elif threading:
pool = Pool(thread_num, batch_mode=True)
if threading:
morphene_instance = [self.morphene]
nodelist = self.morphene.rpc.nodes.export_working_nodes()
for i in range(thread_num - 1):
morphene_instance.append(mph.MorpheneClient(node=nodelist,
num_retries=self.morphene.rpc.num_retries,
num_retries_call=self.morphene.rpc.num_retries_call,
timeout=self.morphene.rpc.timeout))
# We are going to loop indefinitely
latest_block = 0
while True:
if stop:
head_block = stop
else:
current_block_num = self.get_current_block_num()
head_block = current_block_num
if threading and not head_block_reached:
latest_block = start - 1
result_block_nums = []
for blocknum in range(start, head_block + 1, thread_num):
# futures = []
i = 0
if FUTURES_MODULE is not None:
futures = []
block_num_list = []
# freeze = self.morphene.rpc.nodes.freeze_current_node
num_retries = self.morphene.rpc.nodes.num_retries
# self.morphene.rpc.nodes.freeze_current_node = True
self.morphene.rpc.nodes.num_retries = thread_num
error_cnt = self.morphene.rpc.nodes.node.error_cnt
while i < thread_num and blocknum + i <= head_block:
block_num_list.append(blocknum + i)
results = []
if FUTURES_MODULE is not None:
futures.append(pool.submit(Block, blocknum + i, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=morphene_instance[i]))
else:
pool.enqueue(Block, blocknum + i, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=morphene_instance[i])
i += 1
if FUTURES_MODULE is not None:
try:
results = [r.result() for r in as_completed(futures)]
except Exception as e:
log.error(str(e))
else:
pool.run(True)
pool.join()
for result in pool.results():
results.append(result)
pool.abort()
self.morphene.rpc.nodes.num_retries = num_retries
# self.morphene.rpc.nodes.freeze_current_node = freeze
new_error_cnt = self.morphene.rpc.nodes.node.error_cnt
self.morphene.rpc.nodes.node.error_cnt = error_cnt
if new_error_cnt > error_cnt:
self.morphene.rpc.nodes.node.error_cnt += 1
# self.morphene.rpc.next()
checked_results = []
for b in results:
if b.block_num is not None and int(b.block_num) not in result_block_nums:
b["id"] = b.block_num
b.identifier = b.block_num
checked_results.append(b)
result_block_nums.append(int(b.block_num))
missing_block_num = list(set(block_num_list).difference(set(result_block_nums)))
while len(missing_block_num) > 0:
for blocknum in missing_block_num:
try:
block = Block(blocknum, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=self.morphene)
checked_results.append(block)
result_block_nums.append(int(block.block_num))
except Exception as e:
log.error(str(e))
missing_block_num = list(set(block_num_list).difference(set(result_block_nums)))
from operator import itemgetter
blocks = sorted(checked_results, key=itemgetter('id'))
for b in blocks:
if latest_block < int(b.block_num):
latest_block = int(b.block_num)
yield b
if latest_block <= head_block:
for blocknum in range(latest_block + 1, head_block + 1):
if blocknum not in result_block_nums:
block = Block(blocknum, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=self.morphene)
result_block_nums.append(blocknum)
yield block
elif max_batch_size is not None and (head_block - start) >= max_batch_size and not head_block_reached:
if not self.morphene.is_connected():
raise OfflineHasNoRPCException("No RPC available in offline mode!")
self.morphene.rpc.set_next_node_on_empty_reply(False)
latest_block = start - 1
batches = max_batch_size
for blocknumblock in range(start, head_block + 1, batches):
# Get full block
if (head_block - blocknumblock) < batches:
batches = head_block - blocknumblock + 1
for blocknum in range(blocknumblock, blocknumblock + batches - 1):
if only_virtual_ops:
self.morphene.rpc.get_ops_in_block(blocknum, only_virtual_ops, add_to_queue=True)
else:
self.morphene.rpc.get_block(blocknum, add_to_queue=True)
latest_block = blocknum
if batches >= 1:
latest_block += 1
if latest_block <= head_block:
if only_virtual_ops:
block_batch = self.morphene.rpc.get_ops_in_block(blocknum, only_virtual_ops, add_to_queue=False)
else:
block_batch = self.morphene.rpc.get_block(latest_block, add_to_queue=False)
if not bool(block_batch):
raise BatchedCallsNotSupported()
blocknum = latest_block - len(block_batch) + 1
if not isinstance(block_batch, list):
block_batch = [block_batch]
for block in block_batch:
if not bool(block):
continue
block = Block(block, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=self.morphene)
block["id"] = block.block_num
block.identifier = block.block_num
yield block
blocknum = block.block_num
else:
# Blocks from start until head block
if start is None:
start = head_block - 1
for blocknum in range(start, head_block + 1):
# Get full block
block = self.wait_for_and_get_block(blocknum, only_ops=only_ops, only_virtual_ops=only_virtual_ops, block_number_check_cnt=5, last_current_block_num=current_block_num)
yield block
# Set new start
start = head_block + 1
head_block_reached = True
if stop and start > stop:
return
# Sleep for one block
time.sleep(self.block_interval)
def wait_for_and_get_block(self, block_number, blocks_waiting_for=None, only_ops=False, only_virtual_ops=False, block_number_check_cnt=-1, last_current_block_num=None):
""" Get the desired block from the chain, if the current head block is smaller (for both head and irreversible)
then we wait, but a maxmimum of blocks_waiting_for * max_block_wait_repetition time before failure.
:param int block_number: desired block number
:param int blocks_waiting_for: difference between block_number and current head and defines
how many blocks we are willing to wait, positive int (default: None)
:param bool only_ops: Returns blocks with operations only, when set to True (default: False)
:param bool only_virtual_ops: Includes only virtual operations (default: False)
:param int block_number_check_cnt: limit the number of retries when greater than -1
:param int last_current_block_num: can be used to reduce the number of get_current_block_num() api calls
"""
if last_current_block_num is None:
last_current_block_num = self.get_current_block_num()
elif last_current_block_num - block_number < 50:
last_current_block_num = self.get_current_block_num()
if not blocks_waiting_for:
blocks_waiting_for = max(
1, block_number - last_current_block_num)
repetition = 0
# can't return the block before the chain has reached it (support future block_num)
while last_current_block_num < block_number:
repetition += 1
time.sleep(self.block_interval)
if last_current_block_num - block_number < 50:
last_current_block_num = self.get_current_block_num()
if repetition > blocks_waiting_for * self.max_block_wait_repetition:
raise BlockWaitTimeExceeded("Already waited %d s" % (blocks_waiting_for * self.max_block_wait_repetition * self.block_interval))
# block has to be returned properly
repetition = 0
cnt = 0
block = None
while (block is None or block.block_num is None or int(block.block_num) != block_number) and (block_number_check_cnt < 0 or cnt < block_number_check_cnt):
try:
block = Block(block_number, only_ops=only_ops, only_virtual_ops=only_virtual_ops, morphene_instance=self.morphene)
cnt += 1
except BlockDoesNotExistsException:
block = None
if repetition > blocks_waiting_for * self.max_block_wait_repetition:
raise BlockWaitTimeExceeded("Already waited %d s" % (blocks_waiting_for * self.max_block_wait_repetition * self.block_interval))
repetition += 1
time.sleep(self.block_interval)
return block
def ops(self, start=None, stop=None, only_virtual_ops=False, **kwargs):
""" Blockchain.ops() is deprecated. Please use Blockchain.stream() instead.
"""
raise DeprecationWarning('Blockchain.ops() is deprecated. Please use Blockchain.stream() instead.')
def ops_statistics(self, start, stop=None, add_to_ops_stat=None, with_virtual_ops=True, verbose=False):
""" Generates statistics for all operations (including virtual operations) starting from
``start``.
:param int start: Starting block
:param int stop: Stop at this block, if set to None, the current_block_num is taken
:param dict add_to_ops_stat: if set, the result is added to add_to_ops_stat
:param bool verbose: if True, the current block number and timestamp is printed
This call returns a dict with all possible operations and their occurrence.
"""
if add_to_ops_stat is None:
import morphenepythonbase.operationids
ops_stat = morphenepythonbase.operationids.operations.copy()
for key in ops_stat:
ops_stat[key] = 0
else:
ops_stat = add_to_ops_stat.copy()
current_block = self.get_current_block_num()
if start > current_block:
return
if stop is None:
stop = current_block
for block in self.blocks(start=start, stop=stop, only_ops=False, only_virtual_ops=False):
if verbose:
print(block["identifier"] + " " + block["timestamp"])
ops_stat = block.ops_statistics(add_to_ops_stat=ops_stat)
if with_virtual_ops:
for block in self.blocks(start=start, stop=stop, only_ops=True, only_virtual_ops=True):
if verbose:
print(block["identifier"] + " " + block["timestamp"])
ops_stat = block.ops_statistics(add_to_ops_stat=ops_stat)
return ops_stat
def stream(self, opNames=[], raw_ops=False, *args, **kwargs):
""" Yield specific operations (e.g. transfers) only
:param array opNames: List of operations to filter for
:param bool raw_ops: When set to True, it returns the unmodified operations (default: False)
:param int start: Start at this block
:param int stop: Stop at this block
:param int max_batch_size: When not None, batch calls of are used.
Cannot be combined with threading
:param bool threading: Enables threading. Cannot be combined with batch calls
:param int thread_num: Defines the number of threads, when `threading` is set.
:param bool only_ops: Only yield operations (default: False)
Cannot be combined with ``only_virtual_ops=True``
:param bool only_virtual_ops: Only yield virtual operations (default: False)
The dict output is formated such that ``type`` carries the
operation type. Timestamp and block_num are taken from the
block the operation was stored in and the other keys depend
on the actual operation.
.. note:: If you want instant confirmation, you need to instantiate
class:`morphenepython.blockchain.Blockchain` with
``mode="head"``, otherwise, the call will wait until
confirmed in an irreversible block.
output when `raw_ops=False` is set:
.. code-block:: js
{
'type': 'transfer',
'from': 'initwitness',
'to': 'luckyguy',
'amount': '1000000.000 MORPH',
'memo': 'get rich',
'_id': '6d4c5f2d4d8ef1918acaee4a8dce34f9da384786',
'timestamp': datetime.datetime(2019, 6, 1, 16, 20, 0, tzinfo=<UTC>),
'block_num': 420, 'trx_num': 2, 'trx_id': 'cf11b2ac8493c71063ec121b2e8517ab1e0e6bea'
}
output when `raw_ops=True` is set:
.. code-block:: js
{
'block_num': 22277588,
'op':
[
'transfer',
{
'from': 'initwitness', 'to': 'luckyguy',
'amount': '1000000.000 MORPH',
'memo': 'get rich'
}
],
'timestamp': datetime.datetime(2019, 6, 1, 16, 20, 0, tzinfo=<UTC>)
}
"""
for block in self.blocks(**kwargs):
if "transactions" in block:
trx = block["transactions"]
else:
trx = [block]
block_num = 0
trx_id = ""
_id = ""
timestamp = ""
for trx_nr in range(len(trx)):
if "operations" not in trx[trx_nr]:
continue
for event in trx[trx_nr]["operations"]:
if isinstance(event, list):
op_type, op = event
# trx_id = block["transaction_ids"][trx_nr]
block_num = block.get("id")
_id = self.hash_op(event)
timestamp = block.get("timestamp")
elif isinstance(event, dict) and "type" in event and "value" in event:
op_type = event["type"]
if len(op_type) > 10 and op_type[len(op_type) - 10:] == "_operation":
op_type = op_type[:-10]
op = event["value"]
# trx_id = block["transaction_ids"][trx_nr]
block_num = block.get("id")
_id = self.hash_op(event)
timestamp = block.get("timestamp")
elif "op" in event and isinstance(event["op"], dict) and "type" in event["op"] and "value" in event["op"]:
op_type = event["op"]["type"]
if len(op_type) > 10 and op_type[len(op_type) - 10:] == "_operation":
op_type = op_type[:-10]
op = event["op"]["value"]
trx_id = event.get("trx_id")
block_num = event.get("block")
_id = self.hash_op(event["op"])
timestamp = event.get("timestamp")
else:
op_type, op = event["op"]
trx_id = event.get("trx_id")
block_num = event.get("block")
_id = self.hash_op(event["op"])
timestamp = event.get("timestamp")
if not bool(opNames) or op_type in opNames and block_num > 0:
if raw_ops:
yield {"block_num": block_num,
"trx_num": trx_nr,
"op": [op_type, op],
"timestamp": timestamp}
else:
updated_op = {"type": op_type}
updated_op.update(op.copy())
updated_op.update({"_id": _id,
"timestamp": timestamp,
"block_num": block_num,
"trx_num": trx_nr,
"trx_id": trx_id})
yield updated_op
def awaitTxConfirmation(self, transaction, limit=10):
""" Returns the transaction as seen by the blockchain after being
included into a block
:param dict transaction: transaction to wait for
:param int limit: (optional) number of blocks to wait for the transaction (default: 10)
.. note:: If you want instant confirmation, you need to instantiate
class:`morphenepython.blockchain.Blockchain` with
``mode="head"``, otherwise, the call will wait until
confirmed in an irreversible block.
.. note:: This method returns once the blockchain has included a
transaction with the **same signature**. Even though the
signature is not usually used to identify a transaction,
it still cannot be forfeited and is derived from the
transaction contented and thus identifies a transaction
uniquely.
"""
counter = 0
for block in self.blocks():
counter += 1
for tx in block["transactions"]:
if sorted(
tx["signatures"]
) == sorted(transaction["signatures"]):
return tx
if counter > limit:
raise Exception(
"The operation has not been added after %d blocks!" % (limit))
@staticmethod
def hash_op(event):
""" This method generates a hash of blockchain operation. """
if isinstance(event, dict) and "type" in event and "value" in event:
op_type = event["type"]
if len(op_type) > 10 and op_type[len(op_type) - 10:] == "_operation":
op_type = op_type[:-10]
op = event["value"]
event = [op_type, op]
data = json.dumps(event, sort_keys=True)
return hashlib.sha1(py23_bytes(data, 'utf-8')).hexdigest()
def get_all_accounts(self, start='', stop='', steps=1e3, limit=-1, **kwargs):
""" Yields account names between start and stop.
:param str start: Start at this account name
:param str stop: Stop at this account name
:param int steps: Obtain ``steps`` ret with a single call from RPC
"""
cnt = 1
if not self.morphene.is_connected():
raise OfflineHasNoRPCException("No RPC available in offline mode!")
lastname = start
while True:
ret = self.morphene.rpc.lookup_accounts(lastname, steps)
for account in ret:
if isinstance(account, dict):
account_name = account["name"]
else:
account_name = account
if account_name != lastname:
yield account_name
cnt += 1
if account_name == stop or (limit > 0 and cnt > limit):
return
if lastname == account_name:
return
lastname = account_name
if len(ret) < steps:
return
def get_account_count(self):
""" Returns the number of accounts"""
self.morphene.rpc.set_next_node_on_empty_reply(False)
ret = self.morphene.rpc.get_account_count()
return ret
def get_account_reputations(self, start='', stop='', steps=1e3, limit=-1, **kwargs):
""" Yields account reputation between start and stop.
:param str start: Start at this account name
:param str stop: Stop at this account name
:param int steps: Obtain ``steps`` ret with a single call from RPC
"""
cnt = 1
if not self.morphene.is_connected():
raise OfflineHasNoRPCException("No RPC available in offline mode!")
lastname = start
self.morphene.rpc.set_next_node_on_empty_reply(False)
while True:
ret = self.morphene.rpc.get_account_reputations(lastname, steps, api="follow")
for account in ret:
if isinstance(account, dict):
account_name = account["account"]
else:
account_name = account
if account_name != lastname:
yield account
cnt += 1
if account_name == stop or (limit > 0 and cnt > limit):
return
if lastname == account_name:
return
lastname = account_name
if len(ret) < steps:
return
def get_similar_account_names(self, name, limit=5):
""" Returns limit similar accounts with name as list
:param str name: account name to search similars for
:param int limit: limits the number of accounts, which will be returned
:returns: Similar account names as list
:rtype: list
.. code-block:: python
>>> from morphenepython.blockchain import Blockchain
>>> blockchain = Blockchain()
>>> ret = blockchain.get_similar_account_names("test", limit=5)
>>> len(ret) == 5
True
"""
if not self.morphene.is_connected():
return None
self.morphene.rpc.set_next_node_on_empty_reply(False)
return self.morphene.rpc.lookup_accounts(name, limit)
def find_rc_accounts(self, name):
""" Returns the RC parameters of one or more accounts.
:param str name: account name to search rc params for (can also be a list of accounts)
:returns: RC params
:rtype: list
.. code-block:: python
>>> from morphenepython.blockchain import Blockchain
>>> blockchain = Blockchain()
>>> ret = blockchain.find_rc_accounts(["test"])
>>> len(ret) == 1
True
"""
if not self.morphene.is_connected():
return None
self.morphene.rpc.set_next_node_on_empty_reply(False)
if isinstance(name, list):
account = self.morphene.rpc.find_rc_accounts({'accounts': name}, api="rc")
if bool(account):
return account["rc_accounts"]
else:
account = self.morphene.rpc.find_rc_accounts({'accounts': [name]}, api="rc")
if bool(account):
return account["rc_accounts"][0]
def list_change_recovery_account_requests(
self, start="", limit=1000, order="by_account"):
""" List pending `change_recovery_account` requests.
:param str/list start: Start the listing from this entry.
Leave empty to start from the beginning. If `order` is set
to `by_account`, `start` has to be an account name. If
`order` is set to `by_effective_date`, `start` has to be a
list of [effective_on, account_to_recover],
e.g. `start=['2018-12-18T01:46:24', 'bott']`.
:param int limit: maximum number of results to return (default
and maximum: 1000).
:param str order: valid values are "by_account" (default) or
"by_effective_date".
:returns: list of `change_recovery_account` requests.
:rtype: list
.. code-block:: python
>>> from morphenepython.blockchain import Blockchain
>>> blockchain = Blockchain()
>>> ret = blockchain.list_change_recovery_account_requests(limit=1)
"""
if not self.morphene.is_connected():
return None
self.morphene.rpc.set_next_node_on_empty_reply(False)
requests = self.morphene.rpc.list_change_recovery_account_requests(
{'start': start, 'limit': limit, 'order': order}, api="database")
if bool(requests):
return requests['requests']
def find_change_recovery_account_requests(self, accounts):
""" Find pending `change_recovery_account` requests for one or more
specific accounts.
:param str/list accounts: account name or list of account
names to find `change_recovery_account` requests for.
:returns: list of `change_recovery_account` requests for the
given account(s).
:rtype: list
.. code-block:: python
>>> from morphenepython.blockchain import Blockchain
>>> blockchain = Blockchain()
>>> ret = blockchain.find_change_recovery_account_requests('bott')
"""
if not self.morphene.is_connected():
return None
self.morphene.rpc.set_next_node_on_empty_reply(False)
if isinstance(accounts, str):
accounts = [accounts]
requests = self.morphene.rpc.find_change_recovery_account_requests(
{'accounts': accounts}, api="database")
if bool(requests):
return requests['requests']
| [
"andrewc@pobox.com"
] | andrewc@pobox.com |
e805cdb88bd2de7f4bce40ee710b792a3c6c17be | 106aa71c49f176415c7c140f066bde4e3a2df797 | /Archive/Mads_Wind/utility.py | a67c83df9e2c038f38bc54fbb709dfaca7a60d8b | [
"MIT"
] | permissive | madsankern/DynamicProgramming | df461dae3bcc3dbde18e79fdded0974daa0e293c | 0812b844068c33b2529d4b11940f9c89582bc374 | refs/heads/main | 2023-05-31T00:18:45.820845 | 2021-06-09T16:51:45 | 2021-06-09T16:51:45 | 341,465,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | import numpy as np
# Without housing
def u(c,par):
if par.eta == 1.0:
u = np.log(c)
else:
u = (c**(1-par.eta) - 1.0) / (1.0 - par.eta)
return u
# With housing
def u_h(c,h,par):
if par.eta == 1.0:
u = np.log(c) + par.kappa*h
else:
u = (c**(1-par.eta) - 1.0) / (1.0 - par.eta) + par.kappa*h
return u
# Marginal utility
def marg_u(c,par):
return c**(-par.eta)
# Inverse marginal utility
def inv_marg_u(u,par):
return u**(-1.0/par.eta)
| [
"Wind.Mads@bcg.com"
] | Wind.Mads@bcg.com |
7efb9951bfdf815059c2e6a6b72a96e332f6a971 | 602afe5a905c1f66892312b91fc381d966196f1a | /utilities/request_parsers.py | aeadf503229360bc0911ab99d3a6bab21f0b095e | [] | no_license | Big-Ideas-Lab/nutrics | 394299905af1fbd88ded4197032a2ce03aa8445c | 174baecf041096552a69b4c5f68895186673e4cd | refs/heads/master | 2022-08-27T06:48:01.326349 | 2020-05-08T17:25:54 | 2020-05-08T17:25:54 | 243,624,416 | 0 | 0 | null | 2022-06-22T02:45:47 | 2020-02-27T21:53:05 | Python | UTF-8 | Python | false | false | 2,799 | py |
'''
There was too much clutter in the resources files, so I pulled out defining of requests parsers.
'''
from flask_restful import reqparse
#create parser for incoming user data
u_parser = reqparse.RequestParser()
u_parser.add_argument('username', help = 'Username cannot be blank.', required = True)
u_parser.add_argument('email', help = 'Please include a valid email address.', required = True)
u_parser.add_argument('password', help = 'Please enter a valid password.', required = True)
u_parser.add_argument('age', help = 'Please enter an age.', required = True)
u_parser.add_argument('gender_identity', help = 'Please enter an age.', required = True)
u_parser.add_argument('activity_level', help = 'We need your activity level for nutritious recommendations.', required = True)
#create parser for incoming geolocal data
r_parser = reqparse.RequestParser()
r_parser.add_argument('latitude', help= 'Latitude parameter is required.', required = True)
r_parser.add_argument('longitude', help= 'Longitude parameter is required.', required = True)
r_parser.add_argument('distance', help= 'Distance parameter is required.', required = True)
#Preference parser
p_parser = reqparse.RequestParser()
p_parser.add_argument('preference', help = 'This field cannot be blank', required = True)
p_parser.add_argument('preference_action', help = 'This field cannot be blank', required = True)
#Admin parser
a_parser = reqparse.RequestParser()
a_parser.add_argument('action', help = 'This field cannot be blank', required = False)
a_parser.add_argument('new_admin', help = 'This field only needs to be filled when adding new admin.', required = False)
a_parser.add_argument('item_name', help = 'This field needs to be added when updating food table', required = False)
a_parser.add_argument('latitude', help = 'This field needs to be added when updating food table', required = False)
a_parser.add_argument('longitude', help = 'This field needs to be added when updating food table', required = False)
#email link parser
e_parser = reqparse.RequestParser()
e_parser.add_argument('token', help = 'include the token.', required = True)
#food link parser
f_parser = reqparse.RequestParser()
f_parser.add_argument('item_name', help = 'include the token.', required = True)
f_parser.add_argument('latitude', help = 'include the latitude.', required = True)
f_parser.add_argument('longitude', help = 'include the longitude.', required = True)
f_parser.add_argument('restaurant_name', help = 'include the restaurant name.', required = True)
f_parser.add_argument('item_description', help = 'include the item description.', required = True)
f_parser.add_argument('price', help = 'include the price.', required = True)
f_parser.add_argument('nutrition', help = 'include the nutritional content.', required = True)
| [
"joshuadarcy@joshuas-mbp.lan"
] | joshuadarcy@joshuas-mbp.lan |
da7e9cf99e5e8e2d628496cb45d1bce02e1fe524 | 436acccf18f21fe3fa7d2588fa25184c180e930d | /main.py | a08b882602da70e994455f6ef206d8db9fb1a592 | [] | no_license | Kevin-Escobedo/Jeopardy-Bot | ee097c6e375149b1c3e31f9c2c2087846138602b | 06040a2abf53ae6b0178d397a209fdc0fbdd4f50 | refs/heads/main | 2023-04-09T18:57:35.243238 | 2021-04-21T19:32:26 | 2021-04-21T19:32:26 | 358,014,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,482 | py | import requests
import json
import tweepy
import datetime
import time
import twitterCredentials as tc #File containing api key, secret key, tokens
import jeopardyDatabase
#TO-DO: Refactor
def makeTitle(s: str) -> str:
'''Capitalizes each word in s'''
#Because s.title() doesn't quite work with apostrophes
output = ""
s = s.split()
for word in s:
output += "{} ".format(word.capitalize())
return output.strip()
def getJeopardyQuestion() -> dict:
'''Gets a question from the jService API'''
link = "https://jservice.io/api/random"
response = requests.get(link)
jsonData = json.loads(response.text)
answer = jsonData[0]["answer"]
question = jsonData[0]["question"]
value = jsonData[0]["value"]
category = jsonData[0]["category"]["title"]
questionInfo = dict()
questionInfo["answer"] = answer
questionInfo["question"] = question
questionInfo["value"] = value
questionInfo["category"] = makeTitle(category)
return questionInfo
def getValidQuestion(tries: int = 10) -> dict:
'''Keeps trying to pull a Jeopardy question with no None values'''
while tries > 0:
tries -= 1
question = getJeopardyQuestion()
if all(question.values()): #Check if every value of question is not None
return question
else:
time.sleep(5) #Wait 5 seconds before calling the jService API again
return None #Return None if failed after all tries
if __name__ == "__main__":
jd = jeopardyDatabase.JeopardyDatabase()
jd.createTable()
auth = tweepy.OAuthHandler(tc.API_KEY, tc.API_SECRET_KEY)
auth.set_access_token(tc.ACCESS_TOKEN, tc.ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
try:
api.verify_credentials()
lastHour = datetime.datetime.now() - datetime.timedelta(hours = 1)
lastQuestion = jd.getHourQuestion(lastHour)
if lastQuestion != None:
api.update_status("Correct Response: {}".format(lastQuestion[5]), lastQuestion[1])
jq = getValidQuestion()
message = "{} for ${}:\n{}".format(jq["category"], jq["value"], jq["question"])
api.update_status(message)
tweetID = api.user_timeline(screename = tc.BOT_HANDLE, count = 1)[0].id
jd.insertQuestion(tweetID, jq["category"], jq["value"], jq["question"], jq["answer"])
except tweepy.error.TweepError:
print("Authentication Error")
jd.close()
| [
"escobedo001@gmail.com"
] | escobedo001@gmail.com |
26d76ad4d4f1ddd75f25e843de51546595a08f4d | 3356eb3fbf1ba5a8e5b0a851f07e8df5c852fdf8 | /tasks/takeoff.py | c4c7e6afe91521b29b8fec997819f25673715950 | [] | no_license | spb07/RL-Quadcopter-2 | 640118dcc932780e9c23d2adc36ab49d5e640f80 | 1061f3df2de6e116d281730583aa74acb472509b | refs/heads/master | 2020-03-18T22:32:02.527492 | 2018-05-29T20:49:28 | 2018-05-29T20:49:28 | 135,350,682 | 0 | 0 | null | 2018-05-29T20:41:13 | 2018-05-29T20:41:12 | null | UTF-8 | Python | false | false | 7,157 | py | import numpy as np
from physics_sim import PhysicsSim
class Task():
"""Task (environment) that defines the goal and provides feedback to the agent. Goal is to takeoff to a given height and hover once takeoff height is achieved. Ideally, only vertical movement with no movement in other planes and no rotation"""
def __init__(self, init_pose=None, init_velocities=None,
init_angle_velocities=None, runtime=5., target_pos=None):
"""Initialize a Task object.
Params
======
init_pose: initial position of the quadcopter in (x,y,z) dimensions and the Euler angles
init_velocities: initial velocity of the quadcopter in (x,y,z) dimensions
init_angle_velocities: initial radians/second for each of the three Euler angles
runtime: time limit for each episode
target_pos: target/goal (x,y,z) position for the agent
"""
# Simulation
self.sim = PhysicsSim(init_pose, init_velocities, init_angle_velocities, runtime)
self.action_repeat = 3
self.state_size = self.action_repeat * 2 # multiplier is equal to space size
self.action_low = 0
self.action_high = 900
self.action_size = 1
#self.init_velocities = init_velocities
#self.target_pos = target_pos
# Goal
self.target_pos = target_pos if target_pos is not None else np.array([0., 0., 10.])
#self.target_v = np.array([0., 0.])
#self.target_angular_v = np.array([0., 0., 0.])
def get_reward(self):
"""Uses current pose of sim to return reward."""
'''
if (abs(self.sim.pose[2] - self.target_pos[2]))<0.3: #within 30cm of target height
prize= 1
else:
if (self.sim.pose[2] > (2* self.target_pos[2])): # penalty for overshooting target height
prize = -1
else:
if ((self.sim.pose[2] - self.target_pos[2])/self.sim.v[2])< 0: # Reward for going in right direction
prize=0.2
else: # penalty for drifting away from target height
prize=-0.2
'''
#Position based reward
pos = (self.sim.pose[2]/self.target_pos[2]) #relative position of quadcopter to target height
if pos > 3: #overshot target height by 3 times
prize =-1
else:
prize= np.sin(pos * (np.pi/2.)) #reward increases smoothly to 1 till target height and then decrease smootly to -1 when current height is 3 times target height, with an additional reward/penalty based on whether quad is going in right direction
# Direction of travel reward
if ((self.sim.pose[2] - self.target_pos[2])/self.sim.v[2])< 0: # Reward for going in right direction
direc = 0.3
else: # penalty for drifting away from target height
direc = -0.3
# Reward determination
if self.sim.pose[2] <self.sim.init_pose[2]: #penalty for not going above initial position
reward = -1
else:
if (abs(self.sim.v[2])>self.target_pos[2]/2): # penalty for excessive speed
reward = -1
else:
if self.sim.done:
if self.sim.time < self.sim.runtime: #penalty for hitting boundary before runtime
reward = -1
else: # episode ran for full runtime
finish = 50/(1+(abs(self.sim.pose[2] - self.target_pos[2]))) #special reward for finishing episode, with maximum reward when finish position is at target height
reward = prize + direc + finish
else: # continuous reward during episode
reward = prize + direc
'''
if (abs(self.sim.pose[2] - self.target_pos[2]))<0.3: #within 30cm of target height
prize= 5
else:
if (self.sim.pose[2] > (2* self.target_pos[2])): # penalty for overshooting target height
prize = -5
else:
if ((self.sim.pose[2] - self.target_pos[2])/self.sim.v[2])< 0: # Reward for going in right direction
prize=1
else: # penalty for drifting away from target height
prize=-1
if self.sim.pose[2] <self.sim.init_pose[2]: #penalty for not going above initial position
reward = -5
else:
if self.sim.done:
if self.sim.time < self.sim.runtime: #penalty for hitting boundary before runtime
reward = -2
else: # episode ran for full runtime
reward = prize
else: # continuous reward during episode
reward = prize
'''
#reward = 1.- np.tanh(abs(self.sim.pose[2] - self.target_pos[2])) #only reward reaching the height
#reward = 1.-.3*(abs(self.sim.pose[2] - self.target_pos[2])).sum()
#reward = self.sim.pose[2] #quad went to zero height from starting height of 10
#reward = 1.-.3*(abs(self.sim.pose[2] - self.target_pos[2])).sum() #only reward reaching the height
#reward = 1.-.3*(abs(self.sim.pose[:3] - self.target_pos)).sum()
#reward = np.tanh(1 - 0.003*(abs(self.sim.pose[:3] - self.target_pos))).sum()
#reward = np.tanh(3.-.9*(abs(self.sim.pose[:3] - self.target_pos)).sum()-.2*(abs(self.sim.v[:2] -self.target_v)).sum()-.2*(abs(self.sim.angular_v[:3] -self.target_angular_v)).sum())
#print("\n Time= = {:7.3f} Z= {:7.3f} , VZ = {:7.3f} ,Accel= {:7.3f}, ,Prize= {:7.4f}, Direc= {:7.4f}, Reward= {:7.4f} ".format( self.sim.time, self.sim.pose[2],self.sim.v[2],self.sim.linear_accel[2],prize, direc, reward ), end="")
return reward
def step(self, rotor_speeds):
"""Uses action to obtain next state, reward, done."""
reward = 0
pose_all = []
for _ in range(self.action_repeat):
done = self.sim.next_timestep(np.concatenate([rotor_speeds] * (4))) # updates pose, v and angular_v. Returns True if env bounds breached or time up
reward += self.get_reward()
#pose_all.append(self.sim.pose)
pose_all.append(np.concatenate(([self.sim.pose[2]],[self.sim.v[2]]),axis =0))
next_state = np.concatenate(pose_all)
return next_state, reward, done
def reset(self):
"""Reset the sim to start a new episode."""
self.takeoff= False
self.sim.reset()
#state = np.concatenate([self.sim.pose] * self.action_repeat) # state definition
#print("Input init velocity reset mod: ", self.sim.init_velocities)
#print("Input init position reset mod: ", self.sim.init_pose)
#print("Target pos reset mod: ", self.target_pos)
#print("Reset velocity in reset mod: ", self.sim.v)
state = np.concatenate(([self.sim.pose[2]],[self.sim.v[2]])*self.action_repeat,axis =0)
#state = np.concatenate([self.sim.pose[2] * self.action_repeat) #restrict to height only
return state
| [
"rnb14@ic.ac.uk"
] | rnb14@ic.ac.uk |
0776fc01013ec265fc2da612b9ee90542488e9df | 04d50ae4c98c7832123b8af91de8e3990c2347f9 | /Trnsys/ProjectScripts/Decathlon/Post.py | f9a1c5804176710108653f75423192462523a451 | [] | no_license | bmj-archive/Old_Python | 79d1edb7088e1acb22260414469fbd793d83a44a | 929a19b3c0702f82c61d21450033d7416d411ccb | refs/heads/master | 2022-02-25T17:20:33.931716 | 2019-11-05T15:25:18 | 2019-11-05T15:25:18 | 74,760,848 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,539 | py | from exergyframes import exergy_frame as xrg
from exergyframes import meta_table as metaTab
import logging
import os
from config import *
import datetime
import UtilityPathsAndDirs as utilPath
import re
import numpy as np
def _create():
# Input
projectDir = FREELANCE_DIR + r"\DecathlonSim"
descriptionsFilePath = projectDir + r"\INPUT\Descriptions_r00.xlsx"
zoneNamesFilePath = projectDir + r"\INPUT\ZoneNames.xlsx"
#balDir = FREELANCE_DIR + r"\086_SmartCampus1\TRNSYS"
# Output
fileName = "ZerothRun"
csvOutDir = projectDir + r"\Analysis\\"
matfileOutDir = projectDir + r"\Analysis\\"
now = datetime.datetime.now()
nowStr = "{}-{}-{} {}-{}-{} ".format(now.year,
now.month,now.day, now.hour,now.minute,now.second)
csvFileFullPath = os.path.join(csvOutDir,nowStr + fileName + ".csvIGNORED")
matFileFullPath = os.path.join(matfileOutDir, nowStr + fileName + ".mat")
#===========================================================================
# Loop each variant
#===========================================================================
# Get the var dirs
#variantDirs = projectDir
#fullVariantPaths = [os.path.join(projectDir,d) for d in variantDirs]
# fullVariantPaths = [d for d in fullVariantPaths if os.path.isdir(d)]
#fullOutPaths = [os.path.join(d,"OUT") for d in fullVariantPaths]
#variantPathPairs = zip(variantDirs,fullOutPaths)
variantPathPairs = [["Main",projectDir]]
#===========================================================================
# # Get OUT files ----------------------------------------------------------
#===========================================================================
superFrameList = list()
for pair in variantPathPairs:
print pair
thisDir = pair[1]
inputFiles = utilPath.getFilesByExtRecurse(thisDir, "out")
frameList = list()
#for filePath in inputFiles[20:25]:
for filePath in inputFiles:
# Skip unless 3 elements in file name!
pureFileName = os.path.splitext(os.path.split(filePath)[1])[0]
splitFileName = re.split("_",pureFileName)
if len(splitFileName)==3:
thisFrame = xrg.load_single_out_file(filePath)
else:
logging.info("(Skipping '{}')".format(os.path.split(pureFileName)[1]))
frameList.append(thisFrame)
superFrameList += frameList
#superFrameList.append(frameList)
#print superFrameList
#xrg.displayFrame(thisFrame)
logging.info("Found '{}' OUT frames over all variants)".format(len(superFrameList)))
#===========================================================================
# # Get BAL files ----------------------------------------------------------
#===========================================================================
for pair in variantPathPairs:
#for pair in [variantPathPairs[0]]:
print pair
thisDir = pair[1]
inputFiles = utilPath.getFilesByExtRecurse(thisDir, "bal")
inputFiles = [item for item in inputFiles if not re.search("SUMMARY", item )]
frameList = list()
#for filePath in inputFiles[20:25]:
for filePath in inputFiles:
# Skip unless 3 elements in file name!
pureFileName = os.path.splitext(os.path.split(filePath)[1])[0]
splitFileName = re.split("_",pureFileName)
#if len(splitFileName)==3:
thisFrame = xrg.load_single_bal_file(filePath)
#else:
# logging.info("(Skipping '{}')".format(os.path.split(pureFileName)[1]))
frameList.append(thisFrame)
superFrameList += frameList
#superFrameList.append(frameList)
#print superFrameList
logging.info("Found '{}' BAL files over all variants)".format(len(superFrameList)))
#===========================================================================
# Merge frames
#===========================================================================
frameName = "dataFrame"
finalFrame = xrg.mergeFrames(frameName, superFrameList,True)
finalFrame = xrg.add_simple_time(finalFrame)
#finalFrame._convert_to_ndarray()
#xrg.displayFrame(finalFrame)
#===========================================================================
# # Add descriptions -------------------------------------------------------
#===========================================================================
descriptions = metaTab.getDescriptionsOut(descriptionsFilePath)
for desc in descriptions:
searchSys = desc[0][0]
searchPointType = desc[0][1]
searchNum = desc[0][2]
#print desc
searchIdx = (xrg.idx("system",searchSys) &
xrg.idx("pointType",searchPointType) &
xrg.idx("number",searchNum))
#print searchIdx, type(searchIdx)
descValue = desc[1]
# IN PLACE
xrg.renameHeader(finalFrame,searchIdx,"description",descValue,True)
#===========================================================================
# # Convert kJ/hr to W -----------------------------------------------------
#===========================================================================
def convertKJHtokW(array):
array = array / 3600
return array
thisMask = xrg.idx("units",r"kJ/hr")
xrg.inPlaceFunction(finalFrame,thisMask,convertKJHtokW)
xrg.renameHeader(finalFrame,thisMask,"units","kW")
#----------------------------------------------------------------- Save data
#xrg.displayFrame(finalFrame)
finalFrame.saveToCSV(csvFileFullPath)
finalFrame.saveToMat(matFileFullPath)
def _decathLoad():
logging.debug("Load".format())
loadMatPath = FREELANCE_DIR + r"\DecathlonSim\Analysis\\2012-10-31 13-28-14 ZerothRun.mat"
thisFrame = xrg.load_from_mat(loadMatPath)
print thisFrame.headersArray
if __name__ == "__main__":
logging.config.fileConfig(ABSOLUTE_LOGGING_PATH)
myLogger = logging.getLogger()
myLogger.setLevel("DEBUG")
logging.debug("Started _main".format())
_create()
#_decathLoad()
logging.debug("Finished _main".format())
| [
"Admin@6CORE"
] | Admin@6CORE |
8d8f12dad8abc695708a624d836491390fd1a623 | c5281bec992956018ee8c4e9f9561eab0698ceeb | /tests/test_utils.py | 1349491e3a2f9bf3324ef1fa7f2c62741365de59 | [
"Apache-2.0"
] | permissive | swagger-atlas/atlas | db2b2e3ee09f9afa51d42c4156d73292922410ac | 64a0a6e3107da9f7cf894880823badfa84e11f25 | refs/heads/master | 2023-01-12T03:48:21.665390 | 2019-09-20T17:24:19 | 2019-09-20T17:24:19 | 180,743,015 | 3 | 1 | Apache-2.0 | 2023-01-03T19:30:16 | 2019-04-11T07:56:48 | Python | UTF-8 | Python | false | false | 4,441 | py | from unittest import mock
import pytest
from atlas.modules import utils, exceptions, constants
class TestGetRefPathArray:
def test_local_reference(self):
assert utils.get_ref_path_array("#/definition/Sample") == ["definition", "Sample"]
def test_external_reference(self):
with pytest.raises(exceptions.ImproperSwaggerException):
utils.get_ref_path_array("document.json#/sample")
class TestGetRefName:
@mock.patch('atlas.modules.utils.get_ref_path_array')
def test_get_ref_name(self, patched_ref_array):
patched_ref_array.return_value = ["def", "abc"]
assert utils.get_ref_name("#/def/abc") == "abc"
patched_ref_array.assert_called_with("#/def/abc")
@mock.patch('atlas.modules.utils.get_ref_path_array')
class TestResolveReference:
def test_no_reference(self, patched_ref_array):
patched_ref_array.return_value = []
specs = {"a": 1}
assert utils.resolve_reference(specs, "definition") == specs
patched_ref_array.assert_called_with("definition")
def test_valid_reference(self, patched_ref_array):
patched_ref_array.return_value = ["a"]
specs = {"a": {"b": 1}}
assert utils.resolve_reference(specs, "definition") == {"b": 1}
patched_ref_array.assert_called_with("definition")
def test_valid_reference_with_recursion(self, patched_ref_array):
patched_ref_array.return_value = ["a", "b"]
specs = {"a": {"b": 1}}
assert utils.resolve_reference(specs, "definition") == 1
patched_ref_array.assert_called_with("definition")
def test_invalid_reference(self, patched_ref_array):
patched_ref_array.return_value = ["a", "c"]
specs = {"a": {"b": 1}}
with pytest.raises(exceptions.ImproperSwaggerException):
utils.resolve_reference(specs, "definition")
class TestConvertToSnakeCase:
def test_with_camel_case(self):
assert utils.convert_to_snake_case("camelCase") == "camel_case"
def test_with_pascal_case(self):
assert utils.convert_to_snake_case("CamelCase") == "camel_case"
def test_with_normal_string(self):
assert utils.convert_to_snake_case("magic") == "magic"
def test_with_hybrid_string(self):
assert utils.convert_to_snake_case("abc_caseLetter") == "abc_case_letter"
class TestGetProjectPath:
@mock.patch('atlas.modules.utils.os')
def test_get_project_path(self, patched_os):
patched_os.getcwd.return_value = "path"
assert utils.get_project_path() == "path"
class TestOperationIDName:
def test_delete_method(self):
assert utils.operation_id_name("x/{id}/y/{id}", constants.DELETE) == "x_PARAM_1_y_PARAM_2_delete"
def test_create_method(self):
assert utils.operation_id_name("x/{id}/y", constants.POST) == "x_PARAM_1_y_create"
def test_list_method(self):
assert utils.operation_id_name("x/{id}/y", constants.GET) == "x_PARAM_1_y_list"
def test_read_method(self):
assert utils.operation_id_name("x/{id}/y/{id}", constants.GET) == "x_PARAM_1_y_PARAM_2_read"
def test_update_method(self):
assert utils.operation_id_name("x/{id}/y/{id}", constants.PUT) == "x_PARAM_1_y_PARAM_2_update"
def test_patch_method(self):
assert utils.operation_id_name("x/{id}/y/{id}", constants.PATCH) == "x_PARAM_1_y_PARAM_2_partial_update"
class TestExtractResourceNameFromParam:
def test_with_suffix(self):
assert utils.extract_resource_name_from_param("pet_id", "") == "pet"
def test_without_suffix_with_query_params(self):
assert utils.extract_resource_name_from_param("id", "x/{id}/y/{y_id}/z/{abc}", constants.QUERY_PARAM) is None
def test_without_suffix_with_path_params_not_in_settings_identifier(self):
assert utils.extract_resource_name_from_param("abc", "x/{id}/y/{y_id}/z/{abc}", constants.PATH_PARAM) is None
def test_without_suffix_with_path_params(self):
assert utils.extract_resource_name_from_param("id", "x/{id}/y/{y_id}/z/{abc}", constants.PATH_PARAM) == "x"
def test_without_suffix_with_first_resource(self):
assert utils.extract_resource_name_from_param("id", "{id}/y/{y_id}/z/{abc}", constants.PATH_PARAM) is None
def test_without_suffix_with_singular(self):
assert utils.extract_resource_name_from_param("id", "pets/{id}/y/{y_id}/z/{abc}", constants.PATH_PARAM) == "pet"
| [
"kush.jain@joshtechnologygroup.com"
] | kush.jain@joshtechnologygroup.com |
a1bb1aaf10d01f0cf95dcf59433fd0ff850d609e | e15e56ddca0d1aa989725ad2766f9cf36bcbde23 | /bin/rundevserver | a84db05f4423dc055598d11370934509319f8123 | [
"Apache-2.0"
] | permissive | ylamgarchal/dci-feeder | f7c26ed78aa61ee2e90cf4d047909b357f013fab | 27c0236c0986ee96fac8209bf69b57e71e38eaf5 | refs/heads/master | 2022-02-11T07:05:17.586018 | 2019-11-25T16:29:37 | 2019-11-25T16:50:07 | 217,178,791 | 0 | 0 | Apache-2.0 | 2022-01-06T22:39:23 | 2019-10-24T00:36:35 | Python | UTF-8 | Python | false | false | 813 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# Copyright (C) Red Hat, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from dcifeeder import app
from dcifeeder import settings as s
if __name__ == '__main__':
feederapp = app.create_app()
feederapp.run(debug=s.API_DEBUG, threaded=True, host='0.0.0.0')
| [
"yassine.lamgarchal@redhat.com"
] | yassine.lamgarchal@redhat.com | |
486e48f837ce645846b31ff5ce9ea96f338a5c11 | 391437a03dc30a21ef7cc35d1b51f888da720617 | /test/travis_test_wall_trace.py | b1f54ae1ad06095c25b8fd98e155321f9372cca3 | [] | no_license | takasku/pimouse_run_corridor | 9e60eecd797488901790402b67ca2153d5e8557d | d40b966af1c55b15430e49005e90c68634252c81 | refs/heads/master | 2020-04-25T15:25:44.182451 | 2019-03-06T09:28:44 | 2019-03-06T09:28:44 | 172,856,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | #!/usr/bin/env python
#encoding: utf8
import unittest, rostest
import rosnode, rospy
import time
class WallTraceTest(unittest.TestCase):
def set_and_get(self,lf,ls,rs,rf):
with open("/dev/rtlightsensor0","w") as f:
f.write("%d %d %d %d\n" % (rf,rs,ls,lf))
time.sleep(0.3)
with open("/dev/rtmotor_raw_l0","r") as lf,\
open("/dev/rtmotor_raw_r0","r") as rf:
left = int(lf.readline().rstrip())
right = int(rf.readline().rstrip())
return left, right
def test_io(self):
left, right = self.set_and_get(400,100,100,0)
self.assertTrue(left == 0 and right == 0,"cannot stop")
left, right = self.set_and_get(0,5,1000,0)
self.assertTrue(left == right != 0,"stop wrongly by side sensors")
left, right = self.set_and_get(0,10,0,0)
self.assertTrue(left < right ,"do not curve to left")
left, right = self.set_and_get(0,200,0,0)
self.assertTrue(left > right ,"do not curve to right")
left, right = self.set_and_get(0,5,0,0)
self.assertTrue(0 < left == right ,"curve wrongly")
if __name__ == '__main__':
time.sleep(3)
rospy.init_node('travis_test_wall_trace')
rostest.rosrun('pimouse_run_corridor','travis_test_wall_trace',WallTraceTest)
| [
"fjkks5is@engs.tamagawa.ac.jp"
] | fjkks5is@engs.tamagawa.ac.jp |
4f238047a913854c18e3f54d8ee509ac319bf7c1 | 7873042aa7b983a7c1075ddcf637135eea66adcd | /movie/views.py | 802a69dbda660cfc60cfc2fa73a7d6ded3e48c56 | [] | no_license | connieGao0819/MovieHunter | f6a1a717e0bf441b1b825dd2461d72cfcb1276e9 | ad80b34a0221462bc2850991f14149b46a72dcc3 | refs/heads/master | 2020-03-06T17:58:03.548201 | 2018-03-26T19:41:09 | 2018-03-26T19:41:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,017 | py | from django.shortcuts import render
from django.views.decorators.csrf import csrf_protect
from movie.models import *
from django.http import HttpResponse
import json
from movie import index
index.index_dir()
print(index.permuterm_index.dict())
def add_seen(request, movie_id):
if request.is_ajax():
history = Seen.objects.filter(movieid_id=movie_id, username=request.user.get_username())
if len(history) == 0:
movie = Popularity.objects.get(movieid_id=movie_id)
weight = movie.weight
movie.delete()
new_record = Popularity(movieid_id=movie_id, weight=weight + 3)
new_record.save()
new_record = Seen(movieid_id=movie_id, username=request.user.get_username())
new_record.save()
return HttpResponse('1')
else:
history.delete()
return HttpResponse('0')
def add_expect(request, movie_id):
if request.is_ajax():
history = Expect.objects.filter(movieid_id=movie_id, username=request.user.get_username())
if len(history) == 0:
movie = Popularity.objects.get(movieid_id=movie_id)
weight = movie.weight
movie.delete()
new_record = Popularity(movieid_id=movie_id, weight=weight + 3)
new_record.save()
new_record = Expect(movieid_id=movie_id, username=request.user.get_username())
new_record.save()
return HttpResponse('2')
else:
history.delete()
return HttpResponse('0')
@csrf_protect
def detail(request, model, id):
items = []
try:
if model.get_name() == 'movie' and id != 'None':
try:
d = Popularity.objects.get(movieid_id=id)
weight = d.weight
d.delete()
new_record = Popularity(movieid_id=id, weight=weight + 1)
new_record.save()
except:
new_record = Popularity(movieid_id=id, weight=1)
new_record.save()
label = 'actor'
object = model.objects.get(movieid=id)
records = Act.objects.filter(movieid_id=id)
if request.user.get_username() != '':
seen_list = [str(x).split('|')[1] for x in
Seen.objects.filter(username=request.user.get_username())]
expect_list = [str(y).split('|')[1] for y in
Expect.objects.filter(username=request.user.get_username())]
if id in seen_list:
object.flag = 1
if id in expect_list:
object.flag = 2
for query in records:
for actor in Actor.objects.filter(actorid=query.actorid_id):
items.append(actor)
if model.get_name() == 'actor':
label = 'movie'
object = model.objects.get(actorid=id)
records = Act.objects.filter(actorid_id=id)
for query in records:
for movie in Movie.objects.filter(movieid=query.movieid_id):
items.append(movie)
except:
return render(request, '404.html')
return render(request, '{}_list.html'.format(label), {'items': items, 'number': len(items), 'object': object})
def whole_list(request, model, page):
if page:
page = int(page)
else:
return render(request, '404.html')
objects = model.objects.all()
total_page = len(objects) // 10
if (len(objects) / 10 - len(objects) // 10) > 0:
total_page += 1
if page > total_page:
return render(request, '404.html')
pages = [x + 1 for x in range(total_page)]
end = 10 * page if page != total_page else len(objects)
result = objects[10 * (page - 1):end]
data = {'items': result, 'number': len(objects), 'pages': pages, 'current_page': page, 'next_page': page + 1,
'last_page': page - 1, 'page_number': total_page}
if page == 1:
del data['last_page']
if page == total_page:
del data['next_page']
return render(request, '{}_list.html'.format(model.get_name()), data)
def search(request, pattern):
pattern = pattern.replace("%20", " ")
movies = Movie.objects.filter(title__contains=pattern)
actors = Actor.objects.filter(name__contains=pattern)
return render(request, 'searchresult.html',
{'items1': movies, 'search1': pattern, 'number1': len(movies), 'items2': actors, 'search2': pattern,
'number2': len(actors)})
def search_suggest(request, str):
movie_list, actor_list = [], []
# movie
movies = Movie.objects.filter(title__istartswith=str).order_by('-rate')
if len(movies) > 3:
for i in range(3):
movie_list.append({'movieid': movies[i].movieid, 'poster': movies[i].poster, 'title': movies[i].title})
else:
movies = Movie.objects.filter(title__contains=str).order_by('-rate')
num = 3 - len(movie_list) if len(movies) > 3 - len(movie_list) else len(movies)
for i in range(num):
movie_list.append({'movieid': movies[i].movieid, 'poster': movies[i].poster, 'title': movies[i].title})
# actor
actors = Actor.objects.filter(name__istartswith=str)
if len(actors) > 3:
for i in range(3):
actor_list.append({'actorid': actors[i].actorid, 'photo': actors[i].photo, 'name': actors[i].name})
else:
actors = Actor.objects.filter(name__contains=str)
num = 3 - len(actor_list) if len(actors) > 3 - len(actor_list) else len(actors)
for i in range(num):
actor_list.append({'actorid': actors[i].actorid, 'photo': actors[i].photo, 'name': actors[i].name})
# result in a dictionary
result = {'movie': movie_list, 'actor': actor_list}
return HttpResponse(json.dumps(result, ensure_ascii=False))
@csrf_protect
def seen(request, movie_id):
if request.POST:
try:
d = Seen.objects.get(username=request.user.get_username(), movieid_id=movie_id)
d.delete()
except:
return render(request, '404.html')
records = Seen.objects.filter(username=request.user.get_username())
movies = []
for record in records:
movie_id = str(record).split('|')[1]
movies.append(Movie.objects.get(movieid=movie_id))
return render(request, 'seen.html', {'items': movies, 'number': len(movies)})
def expect(request, movie_id):
if request.POST:
try:
d = Expect.objects.get(username=request.user.get_username(), movieid_id=movie_id)
d.delete()
except:
return render(request, '404.html')
records = Expect.objects.filter(username=request.user.get_username())
movies = []
for record in records:
movie_id = str(record).split('|')[1]
movies.append(Movie.objects.get(movieid=movie_id))
return render(request, 'expect.html', {'items': movies, 'number': len(movies)})
| [
"jgao4@wpi.edu"
] | jgao4@wpi.edu |
9de7481bfb9c7ec3e011a8ebfbeec40ca8cd62b0 | a7f7981a91c3213c011abd33778278c58fa4aa55 | /python-udemy/Practica1/Practica01_02.py | 55a5c9e09ea6471133973896d9386740c0939148 | [] | no_license | nicolassnider/tkinter_flask_django | 9f631c01b0d2b14758deb4ba9c74f0ca59b12d97 | 14355f130570a6e2dccd81804edfe35ee418a099 | refs/heads/master | 2023-01-23T18:32:14.775758 | 2020-12-05T00:44:43 | 2020-12-05T00:44:43 | 318,354,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 430 | py | '''
Problema 02: Hallar el cociente y residuo (resto) de dos números
enteros.
Análisis: Para la solución de este problema, se requiere que
ingrese dos números entero por teclado y el sistema realice el
cálculo respectivo para hallar el cociente y residuo.
'''
num1=float(input("num1:\n"))
num2=float(input("num2:\n"))
cociente=num1 // num2
residuo=num1 % num2
print(f"cociente = {cociente}")
print(f"residuo = {residuo}")
| [
"nicolas.snider@soulit.io"
] | nicolas.snider@soulit.io |
563e7be8d8c11aab5ab1f381e5abf2ed03a6b4d2 | 7ff37f8df377e30f09e5947c6097a7db54e8fab5 | /WebApp/model/model.py | 7d7f6b8f10cf10548fd969cc1c7914afe0deb767 | [] | no_license | jamesnelly/EmergingTechnologies-project | baa4980505f87bc6fff2771a5edfd279f45550f1 | b10a9048cc8bb9b147e118488f531cb3acade4f0 | refs/heads/master | 2020-08-06T13:11:57.695949 | 2019-12-13T18:49:26 | 2019-12-13T18:49:26 | 212,987,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | #adapted from https://www.youtube.com/watch?v=n5a0WBIQitI
#loading the dataset
from keras.datasets import mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
import tensorflow as tf
from keras import models
from keras import layers
import keras as kr
import numpy as np
import matplotlib.pyplot as plt
#creating the sequential model
mod = kr.models.Sequential()
| [
"g00346996@gmit.ie"
] | g00346996@gmit.ie |
ef45fbb0a58276e6b49fab00d2fbbc90dbba4fd6 | 20fa81fb1ba9c6e77be0f6e115ff643c4a608146 | /creational/abstract_factory.py | 9ae16b8d1eebf4d8c9ef1b2551d206b197873fe7 | [
"MIT"
] | permissive | GustavoBoaz/projeto_Patterns_Python | c6bd344a308c0f29c21a435c03d582226f434ba1 | b46c6dd6e355fce8f769b76c432ac8a00f236438 | refs/heads/master | 2022-09-06T19:15:57.183938 | 2019-11-07T19:15:57 | 2019-11-07T19:15:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,011 | py | """
Abstract Factory é um padrão de design criacional que permite produzir
famílias de objetos relacionados sem especificar suas classes concretas.
Como Implementar:
1. Mapeie uma matriz de tipos de produtos distintos versus variantes
desses produtos.
2. Declare interfaces abstratas do produto para todos os tipos de produtos.
Em seguida, faça com que todas as classes de produtos concretas
implementem essas interfaces.
3. Declare a interface abstrata de fábrica com um conjunto de métodos de
criação para todos os produtos abstratos.
4. Implemente um conjunto de classes de fábrica de concreto, uma para cada
variante de produto.
5. Crie o código de inicialização de fábrica em algum lugar do aplicativo.
Ele deve instanciar uma das classes de fábrica de concreto, dependendo da
configuração do aplicativo ou do ambiente atual. Passe esse objeto de
fábrica para todas as classes que constroem produtos.
6. Examine o código e encontre todas as chamadas diretas para os construtores
de produtos. Substitua-os por chamadas para o método de criação apropriado
no objeto de fábrica.
"""
from abc import ABC, abstractmethod
#===========================================Definição de classes abstratas
class ProductA(ABC):
""" This class is used for implements a new product in the system """
@abstractmethod
def build_productA(self) -> str:
""" return the str building """
pass
class ProductB(ABC):
""" This class is used for implements a new product in the system """
@abstractmethod
def build_productB(self) -> str:
""" return the str building """
pass
class AbstractFactory(ABC):
""" This class is used for call method of creation of in the product in the system """
@abstractmethod
def create_productA(self) -> ProductA:
""" return the ProductA building """
pass
@abstractmethod
def create_productB(self) -> ProductB:
""" return the ProductB building """
pass
#=========================================Definição dos Produtos concretos
class ProductA1(ProductA):
def build_productA(self) -> str:
return "Concrete ProductA1 Build!"
class ProductB1(ProductB):
def build_productB(self) -> str:
return "Concrete ProductB1 Build!"
class ProductA2(ProductA):
def build_productA(self) -> str:
return "Concrete ProductA2 Build!"
class ProductB2(ProductB):
def build_productB(self) -> str:
return "Concrete ProductB2 Build!"
#=========================================Definição dos Fabricas concretas
class Factory1(AbstractFactory):
def create_productA(self) -> ProductA:
return ProductA1()
def create_productB(self) -> ProductB:
return ProductB1()
class Factory2(AbstractFactory):
def create_productA(self) -> ProductA:
return ProductA2()
def create_productB(self) -> ProductB:
return ProductB2()
#======================================================Definição do Cliente
def af_client(abstract_factory: AbstractFactory) -> None:
while True:
try:
option = input("Criador produto [A][B] | Exit[C]: ")
if(option == "a"):
print(abstract_factory.create_productA().build_productA())
elif(option == "b"):
print(abstract_factory.create_productB().build_productB())
elif(option == "c"):
break
except:
print("Option false")
continue
def main_af():
while True:
try:
option = int(input("Fabrica option [1][2] | Exit[0]: "))
if(option == 1):
af_client(Factory1())
elif(option == 2):
af_client(Factory2())
elif(option == 0):
break
except:
print("Option false")
continue | [
"gustavo.boaz@hotmail.com"
] | gustavo.boaz@hotmail.com |
bc7adc55472ca0f7d4495f4427cf4e5f885f0825 | eaf2ff7b5ba40595c68f58427d89a90eb7696d84 | /main-4.py | c10a18c041f0cfc27793fe9c4ae1ecbef9c5dcb9 | [] | no_license | CodeRaker/learningMatplotlib | f5081eaccf05f9dbf75986ba154118c587779b6b | 499c7b8682d85639ff4d8e9b65adfbf959529c8b | refs/heads/master | 2020-08-05T06:06:35.352829 | 2019-10-02T20:06:28 | 2019-10-02T20:06:28 | 212,423,750 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | # https://medium.com/@andykashyap/top-5-tricks-to-make-plots-look-better-9f6e687c1e08
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
deaths = [1,2,3,4,5,6,7]
causes = [1,2,3,4,5,3,4]
#plt.style.use("classic")
plt.plot(deaths, causes)
plt.legend('ABCDEF', ncol=2, loc='upper left');
plt.show() | [
"peterglad1985@hotmail.com"
] | peterglad1985@hotmail.com |
115d00ffaebc79e81a2a2015da2ba91356e18b6e | 1235389bc1ebb52e4d045b00c888234224852a4f | /f_camera_photonics/component_capture.py | c1d78c9e49ba636090990d70619e96bf360c5eee | [] | no_license | jtchiles/camera_photonics | bc2fe6cafa42e8a0d51c8ba99b3ea22bcfcd839b | f186d4cb9a53b9d3544c0677f0ce733cb4313da8 | refs/heads/master | 2021-06-11T19:02:16.223909 | 2020-01-10T19:22:00 | 2020-01-10T19:22:00 | 140,327,371 | 0 | 0 | null | 2018-07-09T18:35:15 | 2018-07-09T18:35:15 | null | UTF-8 | Python | false | false | 981 | py | # Trying to grab images out of cv2 with the USB cam
import cv2
import os
from contextlib import contextmanager
import numpy as np
@contextmanager
def open_camera(camera_port=0):
camera = cv2.VideoCapture(camera_port)
yield camera
del(camera)
## Low level conditioning
# Number of frames to throw away while the camera adjusts to light levels
ramp_frames = 1
def get_frames(nframes=1):
with open_camera() as camera:
for i in range(ramp_frames):
camera.read()
frame_list = []
for i in range(nframes):
_, img = camera.read()
frame_list.append(img)
return frame_list
def single_shot():
return get_frames(1)[0]
def video_mean(nframes=2):
stack = np.array(get_frames(nframes))
return np.mean(stack, axis=0)
if __name__ == '__main__':
print('Called')
from f_camera_photonics import cvshow
print('Taking pic')
img = single_shot()
print('Displaying')
cvshow(img)
print('Complete') | [
"alexander.tait@nist.gov"
] | alexander.tait@nist.gov |
e9eb2c81dd1c2ed4a7921ec50c1f4ca9e1c1f484 | ab83ce38d59c37c8a55a4e5bd1f49bc2c2538777 | /__env__py3Rest/bin/python-config | b3e1a396ad5ca53df75755e6f67b4d2e6ecc4af8 | [] | no_license | Archu-S-M/Py3Rest | 3df5959d30d96358af97f8434e72c4b4af897889 | d455f5dec45577d4625ca5f5977f9248834fbd26 | refs/heads/master | 2021-01-19T00:21:05.019136 | 2017-04-04T18:26:00 | 2017-04-04T18:26:00 | 87,152,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,354 | #!/var/www/html/Py3Rest/__env__py3Rest/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"archusm007@gmail.com"
] | archusm007@gmail.com | |
a56fa3ef8236ac18d30826f76d2f59ca41e55070 | 1985271f6d8486de3ab503a6e8574e2c70a30b1b | /feature_engineering/plans/get_fe_diff_div.py | c31e4495a92018588dcef57762136a55c327beb6 | [] | no_license | shuangyumo/kdd-cup-2019-8th-solution | 15ca666e2f9af1e2c8ad4d7295ba500a40b17ca9 | f13fd8e1d8309de00476bd884b39716ffe4c3ced | refs/heads/master | 2022-03-11T11:32:35.802745 | 2019-10-05T16:45:08 | 2019-10-05T16:45:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
def get_fe_diff_div(df):
df_fe = pd.DataFrame([])
top_m = 2
for i in range(1, top_m):
df_fe['diff_eta_{}_{}'.format(0, i)] = df['recom_eta_{}'.format(0)] - df['recom_eta_{}'.format(i)]
df_fe['diff_distance_{}_{}'.format(0, i)] = df['recom_distance_{}'.format(0)] - df['recom_distance_{}'.format(i)]
df_fe['diff_price_{}_{}'.format(0, i)] = df['recom_price_{}'.format(0)] - df['recom_price_{}'.format(i)]
df_fe['div_eta_{}_{}'.format(0, i)] = \
df['recom_eta_{}'.format(0)] / (df['recom_eta_{}'.format(i)] + 0.01)
df_fe['div_distance_{}_{}'.format(0, i)] = \
df['recom_distance_{}'.format(0)] / (df['recom_distance_{}'.format(i)] + 0.01)
df_fe['div_price_{}_{}'.format(0, i )] = \
df['recom_price_{}'.format(0)] / (df['recom_price_{}'.format(i)] + 0.01)
df_fe['div_price_eta_{}_{}'.format(i, i)] = \
df['recom_price_{}'.format(i)]/(df['recom_eta_{}'.format(i)] + 0.01)
df_fe['diff_price_distance_{}_{}'.format(i, i)] = \
df['recom_distance_{}'.format(i)]/(0.01 + df['recom_price_{}'.format(i)])
df_fe['diff_distance_eta_{}_{}'.format(i, i)] = \
df['recom_distance_{}'.format(i)]/(0.01 + df['recom_eta_{}'.format(i)])
return df_fe
| [
"noreply@github.com"
] | noreply@github.com |
add4b75288e365aec578df927975b3ca9f0318ec | b77dc17ee7ebad73e1028381739e01f708fb6c8b | /ppygui/doc/tut3.py | 44c2785da27ae01aa643b6d0c2e979d5a2da0b79 | [
"BSD-3-Clause",
"LicenseRef-scancode-public-domain"
] | permissive | typerlc/ankice-deps | 6c97bee1a926fc539b2f2e8ec345244e6188c6f1 | 4267af31c56ff2f51be65cad345fc7100ec53e78 | refs/heads/master | 2016-09-01T21:43:41.904988 | 2009-06-24T15:15:12 | 2009-06-24T15:15:12 | 235,231 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | import ppygui as gui
# import the gui namespace
class MainFrame(gui.CeFrame):
# subclass to create our own main frame type
def __init__(self):
gui.CeFrame.__init__(self, title="Hello World")
# Create some child control
self.text_entry = gui.Edit(self)
self.button = gui.Button(self, "Copy")
self.label = gui.Label(self)
# Place our controls in a vertical box
sizer = gui.VBox()
sizer.add(self.text_entry)
sizer.add(self.button)
sizer.add(self.label)
# Set the vertical box as our main frame sizer
self.sizer = sizer
if __name__ == '__main__':
app = gui.Application(MainFrame())
# create an application bound to our main frame instance
app.run()
#launch the app !
| [
"richardc@pippin.(none)"
] | richardc@pippin.(none) |
9362ea26f839e1ffad4ad0b283e97271312b1665 | b2f63110ed9b2be2d51ab88dea551a50eb0ffe7b | /easy/string_mask.py | 8bb9e48380b86b9c243d9038a2278b671d80cb09 | [] | no_license | Nevermind7/codeeval | 07f321c855b850e72f3b18352d7ce4f55b0138da | be9cb36fd8fbac86d3fc1d33095c201e0be8ba9a | refs/heads/master | 2021-01-19T06:43:50.113601 | 2016-07-26T08:29:54 | 2016-07-26T08:29:54 | 63,690,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | import sys
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
word, code = test.strip().split()
paired = zip(word, code)
encoded = ''.join([x.upper() if y == '1' else x for (x, y) in paired])
print(encoded)
test_cases.close()
| [
"esser@anvo-systems-dresden.com"
] | esser@anvo-systems-dresden.com |
963a2d233e978b78dca560f8230b63663653446b | 6da9add72c81a230f2c63dcc73420a28304523ce | /clickx3/utils/constants/phone_number_prefix.py | 36d1124efa30f4ca380cba84c8e6865a2082d270 | [] | no_license | YeKelvin/clickx3-toolkit | e449eccae0d6ce2f69ffbf1380a1e410f562fac8 | 09336bf2c5c898625fbc90ddcf31c9794ca11da0 | refs/heads/master | 2023-06-04T15:09:06.005076 | 2021-06-28T12:07:08 | 2021-06-28T12:07:08 | 251,491,571 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 735 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : phone_number_prefix.py
# @Time : 2019/8/30 15:22
# @Author : Kelvin.Ye
from itertools import chain
# 移动
CMCC_CODE = [
'134',
'135',
'136',
'137',
'138',
'139',
'147',
'150',
'151',
'152',
'157',
'158',
'159',
'170',
'172',
'178',
'182',
'183',
'184',
'187',
'188'
]
# 联通
CUCC_CODE = ['130', '131', '132', '145', '155', '156', '170', '171', '175', '176', '185', '186']
# 电信
TELECOM_CODE = ['133', '149', '153', '158', '170', '173', '177', '178', '180', '181', '182', '189', '199']
# 手机号运营商前缀
MOBILENO_PREFIX = list(chain(CMCC_CODE, CUCC_CODE, TELECOM_CODE))
| [
"testmankelvin@163.com"
] | testmankelvin@163.com |
9be024aee9f1a12e949aa5184b9342662b5bb608 | edd3da6431675ce3b048b3557f1b410192491558 | /pd_transforming_data.py | 90ec53c3d745b55a3edab79171c06d8906cff808 | [] | no_license | abdlkdrgndz/data-mining | f845a5821f0800a5fd75807766593c97e5221b9f | 9882f60f75acfc55e5dc9cb2248c92e093b461e6 | refs/heads/master | 2023-02-14T21:24:39.921072 | 2021-01-10T01:19:59 | 2021-01-10T01:19:59 | 328,273,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | import pandas
myDatas = {
"name" : ['Kadir', 'Kerim', 'Mehmet'],
"age" : [12,15,18],
"created_at" : ["12.10.1988","11.10.1988","13.10.1988"]
}
df = pandas.DataFrame(myDatas)
# örneğin yeni sutünda yaşların iki katını alalım
df['age_two'] = [i *2 for i in df.age]
# şimdi bunu transforming data yöntemi ile yapalım
def transfer(age):
return age * 3
# şimdi bu fonksiyonu çağıralım
df['age_three'] = df.age.apply(transfer)
print(df)
| [
"abdulkadir.gunduz@modanisa.com"
] | abdulkadir.gunduz@modanisa.com |
ff3e75465d6bc74082977d0011083bd7cb9d2fa1 | 8dc745854d73e362aa60747b3ab1b5a0dd975902 | /demo/funs/varying_args.py | 95be35fceef5366dfe3c457b3dac4f7b9e356ad3 | [] | no_license | srikanthpragada/PYTHON_27_AUG_2020 | 08a5898fe1a0ae110b74897ce6cce6595bdfce45 | af2aebbb0d83c5e8f381cdda844ab66d2362019c | refs/heads/master | 2022-12-30T10:12:56.688671 | 2020-10-09T14:20:43 | 2020-10-09T14:20:43 | 291,730,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | def wish(*names, message="Hi"):
for n in names:
print(message, n)
wish("Bill", "Steve", message="Hello")
wish("Bill", "Steve", "Mike")
| [
"srikanthpragada@gmail.com"
] | srikanthpragada@gmail.com |
a81f92e9f7166f53fdeb2368141fc3350fe0594f | d4c0b1b7ad466448944d9b6ccadbd64125861c35 | /payment/functions/save_order.py | a7d68484777c78447a3fd7be50081c35d61835b6 | [] | no_license | ncpi34/jourdan | 82dc1874f41cdab60bb5b293153ab45d89c675ab | 76094283b02990e60d23a76d26093e5f09391571 | refs/heads/master | 2023-08-05T04:43:00.272063 | 2021-09-10T09:07:21 | 2021-09-10T09:07:21 | 404,347,860 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,598 | py | import os
from typing import IO
from account.backends import User
from cart.cart import Cart
from website.models import Article, FavoritesClient
from order.models import OrderItems, Order
from django.template.loader import render_to_string
from django.core.mail import EmailMessage
from django.conf import settings
import logging
db_logger = logging.getLogger('db')
def save_order(cart: Cart, order: Order, user: User):
"""
Save order
"""
db_logger.info("DEBUT payment/functions/save_order")
for item in cart:
article = Article.objects.filter(id=int(item['article_id']))
if article.exists():
db_logger.info("article existe")
item_order: OrderItems = OrderItems(
order=order,
quantity=item['quantity'],
article_code=item['article_code'],
price_with_taxes=item['price_with_taxes'],
name=item['name'],
price_type=item['price_type']
)
db_logger.info(f"item_order => {item_order}")
item_order.save()
# add favorites products for user
favorites, created = FavoritesClient.objects.get_or_create(
user=user,
article=article[0]
)
# add quantity to favorites
qty = 1
try:
qty = int(item['quantity'])
except:
pass
favorites.quantity += qty
favorites.save()
db_logger.info(f"favorites => {favorites}")
else:
db_logger.info(f"article n'existe pas => {article}")
db_logger.info("FIN payment/functions/save_order")
return True
def send_mail_to_user(request, order: Order, user: User, pdf: IO):
"""
Send mail to user
Args:
order: db object
user: db object
pdf: file
Returns: void
"""
current_site = request.get_host()
message = render_to_string('mail/order_email.html', {
'user': user,
'order': order,
'domain': current_site
})
tab_mails = [settings.DELIVERY_MAIL]
if user.email is not None:
tab_mails.append(user.email)
email = EmailMessage(
'Commande effectuée sur le site internet',
message,
settings.EMAIL_HOST_USER,
tab_mails
)
email.attach_file(pdf)
email.send(fail_silently=True)
def remove_file(path: str):
"""
Remove file
Args:
path: str
Returns: void
"""
if os.path.exists(path):
os.remove(path)
| [
"ledain.alexis@gmail.com"
] | ledain.alexis@gmail.com |
8c167de60486df2e7f98815c805c1cf0e63930fd | 8e340be7072bb9cb8b8bbe427e259fba51d54192 | /MyroName.py | 751ef00b50a8e69be96929e4ea45e71c0db29b6d | [] | no_license | KBrownASC/allstarcode | a80141c514cac662a9293655cdec3c1656dc53c1 | e22a0c2fa19e1e29ec6cfe273a9cb82ec6ec3865 | refs/heads/master | 2021-01-20T22:19:48.951183 | 2016-08-09T16:00:14 | 2016-08-09T16:00:14 | 63,092,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py | from Myro import *
init("sim")
#loops
#Functions
def drawK(size):
turnBy(90,"deg")
forward(2,size)
backward(1,size)
turnBy(-35,"deg")
forward(1,size)
backward(1,size)
turnBy(-95,"deg")
forward(1,size+.2)
turnBy(30,"deg")
def drawB(size):
turnBy(90,"deg")
forward(2,1)
motors(30,-3,1)
turnBy(270,"deg")
motors(30,-3,1)
#Code- actual work being done
penDown()
#drawK(1)
penUp()
penDown()
drawB(8)
| [
"Keroneobrownjr@gmail.com"
] | Keroneobrownjr@gmail.com |
0404f601868205f4d85cf25071622a33e7bd409e | 85f96ed9ab5510ec1120a0687c6de5c4a8774a9f | /RestAPI/config.py | 4c6208b12f3b361ae2c9f9c7f0cbe7df16fd8a18 | [] | no_license | ronistone/toilter-APP | cc40e7e65ad68845f9d1a58b9f955dd29a3a1e13 | da211df826045a5cf4b463ebd82fddce3949ee25 | refs/heads/master | 2020-07-11T12:34:06.718155 | 2017-06-14T00:41:36 | 2017-06-14T00:41:36 | 94,269,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | DEBUG = True
DEVELOPMENT = True
SQLALCHEMY_DATABASE_URI = 'postgres:///restapi'
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = 'a1b2c3d4e5f6g7h8j9k10l11'
BUNDLE_ERRORS = True # related to Flask-RESTful errors, see docs
ERROR_404_HELP = False
| [
"ronistonejunior@gmail.com"
] | ronistonejunior@gmail.com |
560ff9f3f493317e04240dcf5f75f3fb3c0b41e7 | 500bca3e22bd0c30c79b74918e9847742b3c428e | /sdk/python/endpoints/online/mlflow/sklearn-diabetes/src/score.py | 4e2c269f5cb447804f693d12932e283e9219e83f | [
"MIT"
] | permissive | Azure/azureml-examples | 2304c862fd2e36e6640ecc4d09f69c5ed93b48ab | e5f7b247d4753f115a8f7da30cbe25294f71f9d7 | refs/heads/main | 2023-08-31T00:10:14.107509 | 2023-08-30T17:29:22 | 2023-08-30T17:29:22 | 289,334,021 | 1,219 | 1,074 | MIT | 2023-09-14T16:00:55 | 2020-08-21T18:04:26 | Jupyter Notebook | UTF-8 | Python | false | false | 979 | py | import logging
import os
import json
import mlflow
from io import StringIO
from mlflow.pyfunc.scoring_server import infer_and_parse_json_input, predictions_to_json
def init():
global model
global input_schema
# "model" is the path of the mlflow artifacts when the model was registered. For automl
# models, this is generally "mlflow-model".
model_path = os.path.join(os.getenv("AZUREML_MODEL_DIR"), "model")
model = mlflow.pyfunc.load_model(model_path)
input_schema = model.metadata.get_input_schema()
def run(raw_data):
json_data = json.loads(raw_data)
if "input_data" not in json_data.keys():
raise Exception("Request must contain a top level key named 'input_data'")
serving_input = json.dumps(json_data["input_data"])
data = infer_and_parse_json_input(serving_input, input_schema)
predictions = model.predict(data)
result = StringIO()
predictions_to_json(predictions, result)
return result.getvalue()
| [
"noreply@github.com"
] | noreply@github.com |
02b8e5dee5b57fda9c9099b42c6e685df0976663 | 0cc8c7cfeea7aa44436e4b09769033e7dbe75b93 | /scripts/vgg_face.py | 4b1205caf2566e59682e4c88a2436bacb628393b | [] | no_license | carlylou/IndividualProject-AffWild | 75ab5bd34796f8b5d763f6e41d491cf51f6db192 | f0c861fbc83c0d11c74b8ac3b31d90cb768fe3b4 | refs/heads/master | 2020-04-05T20:45:34.020609 | 2018-11-12T10:25:45 | 2018-11-12T10:25:45 | 157,194,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,316 | py | # import h5py
import tensorflow as tf
# import numpy as np
# import cv2
# import os
# import pdb
import copy
class VGGFace(object):
def __init__(self, batch_size):
self.params = None
self.batch_size = batch_size
self.vars = []
self.layers = []
self.names = [] #[line.strip() for line in file(os.path.join(os.path.dirname(os.path.realpath("__file__")), 'vggface/names.txt'))]
self.restore_names = []
# (1): nn.SpatialConvolutionMM(3 -> 64, 3x3, 1,1, 1,1)
self.layers.append(('conv','1',3,3,3,64))
# (3): nn.SpatialConvolutionMM(64 -> 64, 3x3, 1,1, 1,1)
self.layers.append(('conv','3',3,3,64,64))
# (5): nn.SpatialMaxPooling(2,2,2,2)
self.layers.append(('pool',2,2,2,2))
# (6): nn.SpatialConvolutionMM(64 -> 128, 3x3, 1,1, 1,1)
self.layers.append(('conv','6',3,3,64,128))
# (8): nn.SpatialConvolutionMM(128 -> 128, 3x3, 1,1, 1,1)
self.layers.append(('conv','8',3,3,128,128))
# (10): nn.SpatialMaxPooling(2,2,2,2)
self.layers.append(('pool',2,2,2,2))
# (11): nn.SpatialConvolutionMM(128 -> 256, 3x3, 1,1, 1,1)
self.layers.append(('conv','11',3,3,128,256))
# (13): nn.SpatialConvolutionMM(256 -> 256, 3x3, 1,1, 1,1)
self.layers.append(('conv','13',3,3,256,256))
# (15): nn.SpatialConvolutionMM(256 -> 256, 3x3, 1,1, 1,1)
self.layers.append(('conv','15',3,3,256,256))
# (17): nn.SpatialMaxPooling(2,2,2,2)
self.layers.append(('pool',2,2,2,2))
# (18): nn.SpatialConvolutionMM(256 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','18',3,3,256,512))
# (20): nn.SpatialConvolutionMM(512 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','20',3,3,512,512))
# (22): nn.SpatialConvolutionMM(512 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','22',3,3,512,512))
# (24): nn.SpatialMaxPooling(2,2,2,2)
self.layers.append(('pool',2,2,2,2))
# (25): nn.SpatialConvolutionMM(512 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','25',3,3,512,512))
# (27): nn.SpatialConvolutionMM(512 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','27',3,3,512,512))
# (29): nn.SpatialConvolutionMM(512 -> 512, 3x3, 1,1, 1,1)
self.layers.append(('conv','29',3,3,512,512))
# (31): nn.SpatialMaxPooling(2,2,2,2)
self.layers.append(('pool',2,2,2,2))
# (32): nn.View
# (33): nn.Linear(25088 -> 4096)
self.layers.append(('linear','33',4096,True))
# (34): nn.ReLU
# (35): nn.Dropout(0.500000)
# (36): nn.Linear(4096 -> 4096)
self.layers.append(('linear2','36',4096,True))
# (37): nn.ReLU
# (38): nn.Dropout(0.500000)
# (39): nn.Linear(4096 -> 2622)
self.layers.append(('linear3','39',2622,False))
def get_unique_name_(self, prefix):
id = sum(t.startswith(prefix) for t,_,_ in self.vars)+1
return '%s_%d'%(prefix, id)
def add_(self, name, var,layer):
self.vars.append((name, var,layer))
def get_output(self):
return self.vars[-1][1]
def make_var(self, name, shape,trainable):
return tf.get_variable(name, shape,trainable=trainable)
# return scope names
def get_restore_vars(self):
restore_vars = copy.deepcopy(self.restore_names)
# when match conv_1, get variables to restore will also return 'conv_10', 'conv_11', 'conv_12', 'conv_13'
remove = ['linear_1', 'linear2_1', 'linear3_1', 'conv_10', 'conv_11', 'conv_12', 'conv_13']
for item in remove:
restore_vars.remove(item)
return restore_vars
def get_face_fc0(self):
return self.vars[-4][1]
def get_face_fc1(self):
return self.vars[-3][1]
def setup(self, image_batch, trainable=False):
self.vars.append(('input', image_batch, ['input']))
for layer in self.layers:
name = self.get_unique_name_(layer[0])
self.restore_names.append(name)
if layer[0] == 'conv':
with tf.variable_scope(name) as scope:
h, w, c_i, c_o = layer[2], layer[3], layer[4], layer[5]
kernel = self.make_var('weights', shape=[h, w, c_i, c_o], trainable=trainable)
conv = tf.nn.conv2d(self.get_output(), kernel, [1] * 4, padding='SAME')
biases = self.make_var('biases', [c_o], trainable=trainable)
bias = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape().as_list())
relu = tf.nn.relu(bias, name=scope.name)
self.add_(name, relu, layer)
elif layer[0] == 'pool':
size, size, stride, stride = layer[1], layer[2], layer[3], layer[4]
pool = tf.nn.max_pool(self.get_output(),
ksize=[1, size, size, 1],
strides=[1, stride, stride, 1],
padding='SAME',
name=name)
self.add_(name, pool, layer)
elif layer[0] == 'linear':
num_out = layer[2]
relu = layer[3]
with tf.variable_scope(name) as scope:
input = self.get_output()
input_shape = input.get_shape()
if input_shape.ndims == 4:
dim = 1
for d in input_shape[1:].as_list():
dim *= d
feed_in = tf.reshape(input, [self.batch_size, dim])
else:
feed_in, dim = (input, int(input_shape[-1]))
weights = self.make_var('weights', shape=[dim, num_out], trainable=True)
biases = self.make_var('biases', [num_out], trainable=True)
op = tf.nn.relu_layer if relu else tf.nn.xw_plus_b
fc = op(feed_in, weights, biases, name=scope.name)
########
drop = tf.nn.dropout(fc, 0.5)
########
self.add_(name, drop, layer)
elif layer[0] == 'linear2':
num_out = layer[2]
relu = layer[3]
with tf.variable_scope(name) as scope:
input = self.get_output()
input_shape = input.get_shape()
if input_shape.ndims == 4:
dim = 1
for d in input_shape[1:].as_list():
dim *= d
feed_in = tf.reshape(input, [self.batch_size, dim])
else:
feed_in, dim = (input, int(input_shape[-1]))
weights = self.make_var('weights', shape=[dim, num_out], trainable=True)
biases = self.make_var('biases', [num_out], trainable=True)
op = tf.nn.relu_layer if relu else tf.nn.xw_plus_b
fc = op(feed_in, weights, biases, name=scope.name)
########
# drop = tf.nn.dropout(fc,0.5)
########
self.add_(name, fc, layer)
elif layer[0] == 'linear3':
num_out = layer[2]
relu = layer[3]
with tf.variable_scope(name) as scope:
input = self.get_output()
input_shape = input.get_shape()
if input_shape.ndims == 4:
dim = 1
for d in input_shape[1:].as_list():
dim *= d
feed_in = tf.reshape(input, [self.batch_size, dim])
else:
feed_in, dim = (input, int(input_shape[-1]))
weights = self.make_var('weights', shape=[dim, num_out], trainable=True)
biases = self.make_var('biases', [num_out], trainable=True)
op = tf.nn.relu_layer if relu else tf.nn.xw_plus_b
fc = op(feed_in, weights, biases, name=scope.name)
self.add_(name, fc, layer)
| [
"564458176@qq.com"
] | 564458176@qq.com |
aae0e6098f5fffd6f5df5e9109899e0ddfcf5d9b | 5de3f612df0dbda712b39403dbafb0617e597651 | /devel/lib/python2.7/dist-packages/pal_control_msgs/__init__.py | e21ec5fdbefcfd4eaadd1be96174a29e086c69d8 | [] | no_license | AdriiTrujillo/tiago_public_ws | 1bd62d51c2eb694d07db83738f7bebd582d8126c | 6eaeabd1ec177df837b81fd9f42887318128766b | refs/heads/main | 2023-04-03T13:09:09.749190 | 2021-04-01T10:05:43 | 2021-04-01T10:05:43 | 350,026,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | /home/adrii/tiago_public_ws/devel/.private/pal_control_msgs/lib/python2.7/dist-packages/pal_control_msgs/__init__.py | [
"adrii.trujillo@gmail.com"
] | adrii.trujillo@gmail.com |
f7d577610c90e9aa93d478ba5211244881ae4241 | 4711a51655cf8944039246d3cf28f5798af089fe | /coder/wsgi.py | 29f26bbb10285e85a1939e86c02aebaf634743b4 | [] | no_license | GauravTyagi67/myblog | 827756f8334b4e3df5b3fce2f598185d13551530 | 86288bb1b403206654752a1890f70cae4dc9d4c2 | refs/heads/main | 2023-04-20T02:08:23.752116 | 2021-05-11T03:08:12 | 2021-05-11T03:08:12 | 366,239,227 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | """
WSGI config for coder project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'coder.settings')
application = get_wsgi_application()
| [
"noreply@github.com"
] | noreply@github.com |
d953ca400bd19d3dfe6e933f1f88124d428bbb14 | 4686605ad3c277a0776a653c6b3528db5313ec9c | /PY/PY1.py | 1b819831f1a87ac0b10df6ccab810f61671ef527 | [] | no_license | zhukongh123/- | 4f03c1f20fc5c369aa14d10cdb302bac08b850b9 | 6dfdd84b66fe82bd4b2113c6270d98ea1705401b | refs/heads/master | 2020-11-26T19:10:58.052438 | 2019-12-20T06:52:37 | 2019-12-20T06:52:37 | 229,181,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | d = {"name":"小明","sex":"男","age":18}
d.clear()
print(d)
| [
"364144274@qq.com"
] | 364144274@qq.com |
3ec15a885991693045ca69757489420fd2440bc1 | ee22ec2076a79e8de3011377fe205bc87163ab9f | /src/algo-p5/0828/q27/player.py | 1631f0c162cd5940c1385e5e74a4e95c3ea58bec | [] | no_license | n18018/programming-term2 | 039a95c67372a38a34e2aa8c5975045a9fc731be | 86c455269eed312def529604e1ac3b00f476226c | refs/heads/master | 2020-03-22T08:59:29.545280 | 2018-08-29T07:57:37 | 2018-08-29T07:57:37 | 139,806,131 | 0 | 0 | null | 2018-07-05T06:42:11 | 2018-07-05T06:42:11 | null | UTF-8 | Python | false | false | 12,069 | py | import field_map
import sys
import random
from enemy import Enemy
class Player:
def __init__(self, name):
"""
コンストラクタ
Parameters
----------
name : str
プレイヤーの名前
Returns
-------
自分自身のインスタンス
"""
self.name = name
self.cur_pos = 0
self.hp = 100
self.max_hp = 100
self.min_damage = 4
self.max_damage = 7
self.freq = 10
self.plant_nums = 10
self.exp = 0
self.level = 1
def choose_action_in_field(self):
"""
フィールド中での操作を選択する
Parameters
----------
なし
Returns
-------
なし
"""
# 見やすさのために、空白行を表示
print()
# 「何をしますか?」を表示
print("何をしますか?")
# 「1:サイコロを振る、2:現在の状態を確認する、3:薬草を使う、9:ゲームを終了する>> 」を表示し、入力待ちにする
cmd_num = input("1:サイコロを振る、2:現在の状態を確認する、3:薬草を使う、9:ゲームを終了する>> ")
# cmd_numの値によって条件分岐
if cmd_num == "1":
# その場から動く
self.move()
elif cmd_num == "2":
# 状態を表示する
self.show_status()
elif cmd_num == "3":
# 薬草を使う
self.use_plants()
elif cmd_num == "9":
# ゲームを終了する
self.quit_game()
def move(self):
"""
動く(サイコロを振る行為を含む)
Parameters
----------
なし
Returns
-------
なし
"""
# サイコロを振る
dice_num = field_map.shake_dice()
# 出た目の数だけ前に進む
self.go_forward(dice_num)
def go_forward(self, cells):
"""
前に進む
Parameters
----------
cells : int
進むマス目の数
Returns
-------
なし
"""
# 引数のマス目だけ進む
self.cur_pos += cells
# 現在位置を表示
print("現在位置は" + str(self.cur_pos) + "です。")
# 止まったマス目のイベントを取得する
event_nm = field_map.get_event(self.cur_pos)
if event_nm == "BattleVsZako":
# ザコキャラ「スラスラ」と戦う
zako = Enemy("スラスラ")
self.battle(zako)
elif event_nm == "GoMoreForward":
# 2マスさらに前に進む
self.go_more_forward(2)
elif event_nm == "GoBack":
# 3マス戻る
self.go_back(3)
elif event_nm == "GoBackToStart":
# 振り出しに戻る
self.go_back_to_start()
elif event_nm == "HealingLake":
# event_nmが"HealingLake"の場合、新たに定義したself.healed_in_lake()を呼び出してください。
self.healed_in_lake()
elif event_nm == "PoisonSwamp":
# event_nmが"PoisonSwamp"の場合、新たに定義したself.poisoned_in_swamp()を呼び出してください。
self.poisoned_in_swamp()
def go_more_forward(self, cells):
"""
出た目の分さらに前に進む
Parameters
----------
cells : int
進むマス目の数
Returns
-------
なし
"""
print("イベント発生!" + str(cells) + "マスさらに進みます。")
# 引数で渡された目の分だけ前に進む
self.go_forward(cells)
def go_back(self, cells):
"""
出た目の分後ろに戻る
Parameters
----------
cells : int
戻るマス目の数
Returns
-------
なし
"""
print("イベント発生!" + str(cells) + "マス後ろに戻ります。")
# 引数で出た目の分だけ前に戻る(引数に-1を掛けることで戻る動作をしている)
self.go_forward((cells * -1))
def go_back_to_start(self):
"""
出た目の分後ろに戻る
Parameters
----------
なし
Returns
-------
なし
"""
print("イベント発生!振り出しに戻ってしまいます!")
# 引数で出た目の分だけ前に戻る(引数に-1を掛けることで戻る動作をしている)
self.go_forward((self.cur_pos * -1))
def show_status(self):
"""
現在の状態を表示する
Parameters
----------
なし
Returns
-------
なし
"""
# 状態を表示する
print(self.name + "の現在位置は" + str(self.cur_pos)
+ "、HPは" + str(self.hp) + "です。")
# 薬草の枚数も表示する。
print("薬草を" + str(self.plant_nums) + "枚持っています。")
def battle(self, enemy):
"""
敵とたたかう
Parameters
----------
enemy : Enemy
敵のオブジェクト
Returns
-------
なし
"""
# イベント発生メッセージ
print("イベント発生!" + enemy.name + "があらわれた!")
# 敵が倒されるまで戦い続ける
while enemy.hp > 0:
# 見やすさのために空行を表示
print()
# ガイドメッセージを表示
print("どうする?")
# 「1:攻撃する、3:薬草を使う、9:逃げる>> 」を表示し、入力待ちにする
cmd_num = input("1:攻撃する、3:薬草を使う、9:逃げる>> ")
if cmd_num == "1":
# プレイヤーが敵を攻撃。倒したらループを抜ける
if self.attack(enemy):
break
elif cmd_num == "3":
# 薬草を使う
self.use_plants()
elif cmd_num == "9":
# 逃げる
print(self.name + "は逃げ出した!")
return
# 敵がプレイヤーを攻撃。倒されたらゲームオーバー
if not enemy.attack(self):
print(self.name + "はしんでしまった!世界は闇に包まれてしまった...")
sys.exit()
# バトル終了
print(self.name + "は" + enemy.name + "を倒した!")
def attack(self, enemy):
"""
敵を攻撃する
Parameters
----------
enemy : Enemy
敵のオブジェクト
Returns
-------
bool
True:敵を倒した、False:敵がまだ生きている
"""
# ダメージを最小〜最大の範囲でランダムに取得
damage = random.randint(self.min_damage, self.max_damage)
is_critical = False # 「かいしんのいちげき」かどうか
# 1/(self.freq)の確率で「かいしんのいちげき」を出す
rand_num = random.randint(1, self.freq)
if rand_num % self.freq == 0:
is_critical = True
# 自分のターンのメッセージ表示
print(self.name + "のこうげき!")
# かいしんのいちげきの場合、ダメージを倍にする
if is_critical:
print("かいしんのいちげき!!")
damage *= 2
# 相手にダメージを与える
enemy.hp -= damage
if enemy.hp > 0:
print(enemy.name + "に" + str(damage) + "のダメージを与えた!"
+ enemy.name + "のHPは" + str(enemy.hp) + "です。")
return False
else:
print(enemy.name + "に" + str(damage) + "のダメージを与えた!"
+ enemy.name + "のHPは0です。")
return True
def use_plants(self):
"""
薬草を使う
Parameters
----------
なし
Returns
-------
なし
"""
# 薬草を持っていなければ、その旨表示して終了
if self.plant_nums <= 0:
print(self.name + "は薬草を持っていない")
return
# メッセージ表示
print(self.name + "は薬草を使った!")
# HPを30ポイント回復
self.hp += 30
# HPが最大を超えないように調整
if self.hp > self.max_hp:
self.hp = self.max_hp
# 持っている薬草を1枚減らす
self.plant_nums -= 1
# 回復したHPの状態を表示
print(self.name + "のHPが" + str(self.hp) + "に回復した!")
# healed_in_lakeメソッドを定義します。引数はselfのみです。
def healed_in_lake(self):
"""
湖でHPを回復される
Parameters
----------
なし
Returns
-------
なし
"""
# 「イベント発生!癒しの湖で身を清めます。」を表示してください。
print("イベント発生!癒しの湖で身を清めます。")
# HPを最大まで回復します。self.hpにself.max_hpを代入してください。
self.hp = self.max_hp
# 「(self.name)のHPが全回復した!現在のHPは(self.hp)です。」を表示してください。
print(self.name, "のHPは全回復した!現在のHPは", self.hp, "です。")
# poisoned_in_swampメソッドを定義します。引数はselfのみです。
def poisoned_in_swamp(self):
"""
沼で毒に冒される
Parameters
----------
なし
Returns
-------
なし
"""
# 「イベント発生!沼で毒に冒されました。」を表示してください。
print("イベント発生!沼で毒に冒されました。")
# 20のダメージを受けます。self.hpから20を引いて(self.hpに再代入して)ください。
self.hp = self.hp - 20
if self.hp > 0:
# self.hpが0より大きい場合、「(self.name)は20のダメージを受けた!現在のHPは(self.hp)です。」を表示してください。
print(self.name, "は20のダメージを受けた!現在のHPは", self.hp, "です。")
else:
# 上記以外の場合、「(self.name)は20のダメージを受けた!(self.name)はしんでしまった!世界は闇に包まれてしまった...」を表示してください。
print(self.name, "は20のダメージを受けた!", self.name,
"はしんでしまった!世界は闇に包まれてしまった...")
# ゲームオーバーなので終了です。1つ前のメッセージに続けて、sys.exit()を呼び出してください。
sys.exit()
def quit_game(self):
"""
ゲームを終了する
Parameters
----------
なし
Returns
-------
なし
"""
# 終了するかどうかの確認メッセージを表示
cmd_str = input("ゲームの状態はセーブされません。終了しますか?(y/n) ")
# Yが押されたら終了
if cmd_str.upper() == "Y":
sys.exit()
# 以下メイン処理
if __name__ == '__main__':
# プレイヤーのオブジェクトを作成
kevin = Player("ケビン")
# 敵のオブジェクトを作成
enemy = Enemy("スラスラ")
# ケビンとスラスラが戦う
kevin.battle(enemy)
# バトル後のケビンのステータスを表示
kevin.show_status()
| [
"n18018@std.it-college.ac.jp"
] | n18018@std.it-college.ac.jp |
e25fb293e8841b87c8979b159fe4daadf9eed51e | 8ed215ee731bc8c55eabdc66ee028a43771510bc | /tasks-deploy/rsa/check.py | 5bac71f193f848b84031ce3a62e0ff96d6fb6acd | [
"MIT"
] | permissive | irdkwmnsb/lkshl-ctf | c6c0b0ae58653d3d7c427073221043d2adea212c | e5c0200ddc8ba73df5f321b87b9763fb1bbaba57 | refs/heads/master | 2020-03-23T22:22:23.499985 | 2019-02-22T13:29:51 | 2019-02-22T13:29:51 | 142,172,055 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,868 | py | def check(attempt, context):
if attempt.answer == flags[attempt.participant.id % len(flags)]:
return Checked(True)
if attempt.answer in flags:
return CheckedPlagiarist(False, flags.index(attempt.answer))
return Checked(False)
flags = ['LKL{RSA_is_s0metimes_insecur3_3Udjwqg6}', 'LKL{RSA_is_s0metimes_insecur3_UibEbfRa}', 'LKL{RSA_is_s0metimes_insecur3_wGqZy5DF}', 'LKL{RSA_is_s0metimes_insecur3_2LYyyNWF}', 'LKL{RSA_is_s0metimes_insecur3_l9d809Zg}', 'LKL{RSA_is_s0metimes_insecur3_BneTxPca}', 'LKL{RSA_is_s0metimes_insecur3_NfEFCIRX}', 'LKL{RSA_is_s0metimes_insecur3_4WAEvVxt}', 'LKL{RSA_is_s0metimes_insecur3_wQ800lk0}', 'LKL{RSA_is_s0metimes_insecur3_HedQD1vE}', 'LKL{RSA_is_s0metimes_insecur3_pKXxALJn}', 'LKL{RSA_is_s0metimes_insecur3_YZhZvmqN}', 'LKL{RSA_is_s0metimes_insecur3_v1iaaHxu}', 'LKL{RSA_is_s0metimes_insecur3_fm0xHYvf}', 'LKL{RSA_is_s0metimes_insecur3_wKGk99KZ}', 'LKL{RSA_is_s0metimes_insecur3_AycXpexc}', 'LKL{RSA_is_s0metimes_insecur3_H27gGhFt}', 'LKL{RSA_is_s0metimes_insecur3_ipXKDpyl}', 'LKL{RSA_is_s0metimes_insecur3_bDVeeCSu}', 'LKL{RSA_is_s0metimes_insecur3_IOIowsHu}', 'LKL{RSA_is_s0metimes_insecur3_X1J51z2g}', 'LKL{RSA_is_s0metimes_insecur3_qwcBeb7f}', 'LKL{RSA_is_s0metimes_insecur3_BYvIBQl3}', 'LKL{RSA_is_s0metimes_insecur3_lWRmz5AJ}', 'LKL{RSA_is_s0metimes_insecur3_EI4quULK}', 'LKL{RSA_is_s0metimes_insecur3_sILihSt0}', 'LKL{RSA_is_s0metimes_insecur3_Jf1mS2A4}', 'LKL{RSA_is_s0metimes_insecur3_rEpoUHFc}', 'LKL{RSA_is_s0metimes_insecur3_3aOzjiDi}', 'LKL{RSA_is_s0metimes_insecur3_2X4LGivB}', 'LKL{RSA_is_s0metimes_insecur3_E3XpMQ4Z}', 'LKL{RSA_is_s0metimes_insecur3_JkmfbPhc}', 'LKL{RSA_is_s0metimes_insecur3_gSjumGpD}', 'LKL{RSA_is_s0metimes_insecur3_MBvtPPKA}', 'LKL{RSA_is_s0metimes_insecur3_WWn9Txw8}', 'LKL{RSA_is_s0metimes_insecur3_12kavBoH}', 'LKL{RSA_is_s0metimes_insecur3_vkw0O9rB}', 'LKL{RSA_is_s0metimes_insecur3_Remqp7Tc}', 'LKL{RSA_is_s0metimes_insecur3_cJpQlr6K}', 'LKL{RSA_is_s0metimes_insecur3_CnXN72KW}', 'LKL{RSA_is_s0metimes_insecur3_w8Fdsu7b}', 'LKL{RSA_is_s0metimes_insecur3_zwetRh2m}', 'LKL{RSA_is_s0metimes_insecur3_2XDisW1d}', 'LKL{RSA_is_s0metimes_insecur3_nI12YHMk}', 'LKL{RSA_is_s0metimes_insecur3_Zc7yKWN7}', 'LKL{RSA_is_s0metimes_insecur3_UM0NCS7b}', 'LKL{RSA_is_s0metimes_insecur3_FvLHJZwH}', 'LKL{RSA_is_s0metimes_insecur3_jBkK1mgy}', 'LKL{RSA_is_s0metimes_insecur3_ah7tGRm3}', 'LKL{RSA_is_s0metimes_insecur3_V9x3rTk7}', 'LKL{RSA_is_s0metimes_insecur3_72Zr73Q0}', 'LKL{RSA_is_s0metimes_insecur3_MGXTz8Xk}', 'LKL{RSA_is_s0metimes_insecur3_GKCnGHrk}', 'LKL{RSA_is_s0metimes_insecur3_Ar9ok9d7}', 'LKL{RSA_is_s0metimes_insecur3_whpfREVI}', 'LKL{RSA_is_s0metimes_insecur3_UDBDalbH}', 'LKL{RSA_is_s0metimes_insecur3_U1FH7Cf1}', 'LKL{RSA_is_s0metimes_insecur3_KIaqedik}', 'LKL{RSA_is_s0metimes_insecur3_dqPmGn0z}', 'LKL{RSA_is_s0metimes_insecur3_bEusmfrG}', 'LKL{RSA_is_s0metimes_insecur3_wjgfHTeI}', 'LKL{RSA_is_s0metimes_insecur3_CLTG1Vhx}', 'LKL{RSA_is_s0metimes_insecur3_MRX7svAE}', 'LKL{RSA_is_s0metimes_insecur3_6TBCIJY6}', 'LKL{RSA_is_s0metimes_insecur3_kVxzzxLQ}', 'LKL{RSA_is_s0metimes_insecur3_Vkv2woLM}', 'LKL{RSA_is_s0metimes_insecur3_Bo8VUtVU}', 'LKL{RSA_is_s0metimes_insecur3_6GrvaoC1}', 'LKL{RSA_is_s0metimes_insecur3_YibIEvsP}', 'LKL{RSA_is_s0metimes_insecur3_ba9YkBff}', 'LKL{RSA_is_s0metimes_insecur3_x2B0KLjH}', 'LKL{RSA_is_s0metimes_insecur3_JiWBzSRv}', 'LKL{RSA_is_s0metimes_insecur3_QyLDwokQ}', 'LKL{RSA_is_s0metimes_insecur3_nZZ8tb0Z}', 'LKL{RSA_is_s0metimes_insecur3_CnHFcLbS}', 'LKL{RSA_is_s0metimes_insecur3_izNJOHO2}', 'LKL{RSA_is_s0metimes_insecur3_9ukX4Uxy}', 'LKL{RSA_is_s0metimes_insecur3_n0YiGB82}', 'LKL{RSA_is_s0metimes_insecur3_T5VYsfc5}', 'LKL{RSA_is_s0metimes_insecur3_UQ6KvIZB}', 'LKL{RSA_is_s0metimes_insecur3_mEIdKYee}', 'LKL{RSA_is_s0metimes_insecur3_I3rpSyie}', 'LKL{RSA_is_s0metimes_insecur3_Zi0ClOtB}', 'LKL{RSA_is_s0metimes_insecur3_JAVcK2UU}', 'LKL{RSA_is_s0metimes_insecur3_1Tx3Crkx}', 'LKL{RSA_is_s0metimes_insecur3_2FbkNKnk}', 'LKL{RSA_is_s0metimes_insecur3_YRhonqdT}', 'LKL{RSA_is_s0metimes_insecur3_gQkoA50I}', 'LKL{RSA_is_s0metimes_insecur3_axRX4qyw}', 'LKL{RSA_is_s0metimes_insecur3_IFCOj1V7}', 'LKL{RSA_is_s0metimes_insecur3_k4gHI5D8}', 'LKL{RSA_is_s0metimes_insecur3_zFThpVTM}', 'LKL{RSA_is_s0metimes_insecur3_iYDJPaN7}', 'LKL{RSA_is_s0metimes_insecur3_awzaYVZK}', 'LKL{RSA_is_s0metimes_insecur3_aSYyVYud}', 'LKL{RSA_is_s0metimes_insecur3_CEzWlUdO}', 'LKL{RSA_is_s0metimes_insecur3_PSHlcp35}', 'LKL{RSA_is_s0metimes_insecur3_c2NhDpw8}', 'LKL{RSA_is_s0metimes_insecur3_0l3UwHlF}', 'LKL{RSA_is_s0metimes_insecur3_WQeRwaPM}', 'LKL{RSA_is_s0metimes_insecur3_4N7mzVAG}', 'LKL{RSA_is_s0metimes_insecur3_9nkGZpXA}', 'LKL{RSA_is_s0metimes_insecur3_FWB38tRG}', 'LKL{RSA_is_s0metimes_insecur3_TvZshh5M}', 'LKL{RSA_is_s0metimes_insecur3_odkN2hAr}', 'LKL{RSA_is_s0metimes_insecur3_diN6caou}', 'LKL{RSA_is_s0metimes_insecur3_rIrFBQB9}', 'LKL{RSA_is_s0metimes_insecur3_A2bAzEpF}', 'LKL{RSA_is_s0metimes_insecur3_39Uo9bYj}', 'LKL{RSA_is_s0metimes_insecur3_klWefkMl}', 'LKL{RSA_is_s0metimes_insecur3_iWWOVbZZ}', 'LKL{RSA_is_s0metimes_insecur3_ETJzDjaj}', 'LKL{RSA_is_s0metimes_insecur3_xSNZYFhJ}', 'LKL{RSA_is_s0metimes_insecur3_k9Xse4cs}', 'LKL{RSA_is_s0metimes_insecur3_EXZC95Kh}', 'LKL{RSA_is_s0metimes_insecur3_pmodkyrx}', 'LKL{RSA_is_s0metimes_insecur3_gwTzucl7}', 'LKL{RSA_is_s0metimes_insecur3_Hx1bvm1Z}', 'LKL{RSA_is_s0metimes_insecur3_7v8eLOwZ}', 'LKL{RSA_is_s0metimes_insecur3_DxbDPG5X}', 'LKL{RSA_is_s0metimes_insecur3_lobjFfcF}', 'LKL{RSA_is_s0metimes_insecur3_LLLmbRNO}', 'LKL{RSA_is_s0metimes_insecur3_kI6EKTOF}', 'LKL{RSA_is_s0metimes_insecur3_5HSnyTLH}', 'LKL{RSA_is_s0metimes_insecur3_M4ofvfwP}', 'LKL{RSA_is_s0metimes_insecur3_coLWPtfu}', 'LKL{RSA_is_s0metimes_insecur3_qxkvUSRP}', 'LKL{RSA_is_s0metimes_insecur3_2MmsVqUg}', 'LKL{RSA_is_s0metimes_insecur3_Yc52WnBP}', 'LKL{RSA_is_s0metimes_insecur3_yGt1uPiG}', 'LKL{RSA_is_s0metimes_insecur3_qFjrX5Ji}', 'LKL{RSA_is_s0metimes_insecur3_gSebOWUT}', 'LKL{RSA_is_s0metimes_insecur3_XARUHTcG}', 'LKL{RSA_is_s0metimes_insecur3_51QDUC7l}', 'LKL{RSA_is_s0metimes_insecur3_i6p6iiUH}', 'LKL{RSA_is_s0metimes_insecur3_kzUSlkav}', 'LKL{RSA_is_s0metimes_insecur3_2RBFT2GT}', 'LKL{RSA_is_s0metimes_insecur3_ByOtjihb}', 'LKL{RSA_is_s0metimes_insecur3_cLKBCVZ2}', 'LKL{RSA_is_s0metimes_insecur3_Trq7k1wI}', 'LKL{RSA_is_s0metimes_insecur3_Q60qbGcZ}', 'LKL{RSA_is_s0metimes_insecur3_Fp37ejF6}', 'LKL{RSA_is_s0metimes_insecur3_tLBJ6Gix}', 'LKL{RSA_is_s0metimes_insecur3_U7tBKrpB}', 'LKL{RSA_is_s0metimes_insecur3_XDAt8LAu}', 'LKL{RSA_is_s0metimes_insecur3_m60Nw97g}', 'LKL{RSA_is_s0metimes_insecur3_krYk40zo}', 'LKL{RSA_is_s0metimes_insecur3_V3WWrrlx}', 'LKL{RSA_is_s0metimes_insecur3_KsybMcjy}', 'LKL{RSA_is_s0metimes_insecur3_yVWR00Sp}', 'LKL{RSA_is_s0metimes_insecur3_Rt1IFAr8}', 'LKL{RSA_is_s0metimes_insecur3_aHkXSnfe}', 'LKL{RSA_is_s0metimes_insecur3_zEp1mZc1}', 'LKL{RSA_is_s0metimes_insecur3_zv0ffkZ2}', 'LKL{RSA_is_s0metimes_insecur3_ueVY4ipK}', 'LKL{RSA_is_s0metimes_insecur3_ocDnu8u6}', 'LKL{RSA_is_s0metimes_insecur3_pPnTgD60}', 'LKL{RSA_is_s0metimes_insecur3_2rnwVTJ4}', 'LKL{RSA_is_s0metimes_insecur3_20ZEcGl8}', 'LKL{RSA_is_s0metimes_insecur3_fL9Ympb5}', 'LKL{RSA_is_s0metimes_insecur3_3GwYLaqg}', 'LKL{RSA_is_s0metimes_insecur3_qiXClm4E}', 'LKL{RSA_is_s0metimes_insecur3_d2en2vz6}', 'LKL{RSA_is_s0metimes_insecur3_SOLo31WB}', 'LKL{RSA_is_s0metimes_insecur3_OB9dtc4j}', 'LKL{RSA_is_s0metimes_insecur3_98FGOfT9}', 'LKL{RSA_is_s0metimes_insecur3_xM10cADQ}', 'LKL{RSA_is_s0metimes_insecur3_hpMKiswj}', 'LKL{RSA_is_s0metimes_insecur3_FTjpdffi}', 'LKL{RSA_is_s0metimes_insecur3_1iEMCbA4}', 'LKL{RSA_is_s0metimes_insecur3_yEH5gk0l}', 'LKL{RSA_is_s0metimes_insecur3_LhYemwow}', 'LKL{RSA_is_s0metimes_insecur3_PJBY7kTD}', 'LKL{RSA_is_s0metimes_insecur3_Y2RZ1YTf}', 'LKL{RSA_is_s0metimes_insecur3_FQPmnfg5}', 'LKL{RSA_is_s0metimes_insecur3_hNBb63ry}', 'LKL{RSA_is_s0metimes_insecur3_RJ8slmjb}', 'LKL{RSA_is_s0metimes_insecur3_xSodLxm0}', 'LKL{RSA_is_s0metimes_insecur3_HDxXhB9X}', 'LKL{RSA_is_s0metimes_insecur3_vPOiIRZA}', 'LKL{RSA_is_s0metimes_insecur3_mYdW9rli}', 'LKL{RSA_is_s0metimes_insecur3_B1gHPXjt}', 'LKL{RSA_is_s0metimes_insecur3_om7BTmLD}', 'LKL{RSA_is_s0metimes_insecur3_6z9ZUc5z}', 'LKL{RSA_is_s0metimes_insecur3_RvxykO1G}', 'LKL{RSA_is_s0metimes_insecur3_k0Le2xyX}', 'LKL{RSA_is_s0metimes_insecur3_0GRj9QWU}', 'LKL{RSA_is_s0metimes_insecur3_23Kx2a9O}', 'LKL{RSA_is_s0metimes_insecur3_PSAiCs7Z}', 'LKL{RSA_is_s0metimes_insecur3_v6aG3j0B}', 'LKL{RSA_is_s0metimes_insecur3_xXxmsOuX}', 'LKL{RSA_is_s0metimes_insecur3_92Pe84C8}', 'LKL{RSA_is_s0metimes_insecur3_Dx0qMgaA}', 'LKL{RSA_is_s0metimes_insecur3_OaUGvuMU}', 'LKL{RSA_is_s0metimes_insecur3_c2zHPwlu}', 'LKL{RSA_is_s0metimes_insecur3_UJIh7nj1}', 'LKL{RSA_is_s0metimes_insecur3_fexW2IIJ}', 'LKL{RSA_is_s0metimes_insecur3_FxVr8Y7Q}', 'LKL{RSA_is_s0metimes_insecur3_Zgvph30I}', 'LKL{RSA_is_s0metimes_insecur3_8aezHJSp}'] | [
"supermax74.02@gmail.com"
] | supermax74.02@gmail.com |
406c110b30acb23f4d2b89fa97603e853e4b9c26 | 5d263af3a57e0eaa1dfc55df964e61ed74208bb2 | /feature_extraction/extract_features.py | 811abb12454c69f6c67627835c5d8386ede54ef6 | [] | no_license | chenyr0021/multimodal-human-action-recognotion | 1c5374c93050f56eb00f87d00aea400f0158bafb | bf69abb2355de83b53f652416f29bd832ced5afc | refs/heads/main | 2023-02-04T03:22:42.611616 | 2020-12-25T06:35:39 | 2020-12-25T06:35:39 | 318,051,286 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,361 | py | import os
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
import sys
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-load_model', type=str)
parser.add_argument('-root', type=str)
parser.add_argument('-gpu', type=str)
parser.add_argument('-save_dir', type=str)
args = parser.parse_args()
os.environ["CUDA_VISIBLE_DEVICES"]='0,1,2,3'
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.optim import lr_scheduler
from torch.autograd import Variable
import torchvision
from torchvision import datasets, transforms
import videotransforms
import numpy as np
from pytorch_i3d import InceptionI3d
from salads_dataset import Salads50_without_label
def run(root, load_model, save_dir, batch_size=1):
# setup dataset
test_transforms = transforms.Compose([transforms.RandomCrop((224, 224)), transforms.ToTensor()])
dataset = Salads50_without_label(root, test_transforms)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=0)
print('load model...')
i3d = InceptionI3d(400, in_channels=3)
i3d.load_state_dict(torch.load(load_model))
i3d.cuda()
i3d = nn.DataParallel(i3d, device_ids=[0,1,2,3])
i3d.eval() # Set model to evaluate mode
# Iterate over data.
print('processing data...')
for inputs, name in dataloader:
# get the inputs
# if os.path.exists(os.path.join(save_dir, name[0]+'.npy')):
# # print(os.path.join(save_dir, name[0]+'.npy'), ' already exist.')
# # continue
b,c,t,h,w = inputs.shape
print(name[0], inputs.shape)
features = []
for start in range(t-20):
ip = Variable(torch.from_numpy(inputs.numpy()[:,:,start:start+21]).cuda())
out = i3d.module.extract_features(ip).cpu()
features.append(out.squeeze(0).detach().numpy())
np_feature = np.concatenate(features, axis=1)
print(np_feature.shape)
np.save(os.path.join(save_dir, name[0]), np_feature)
print('save %s finished.' % os.path.join(save_dir, name[0]))
if __name__ == '__main__':
# need to add argparse
run(root='/home/backup/data_cyr/assemble_ori', load_model='./models/rgb_imagenet.pt', save_dir='/home/backup/data_cyr/assemble/features_video')
| [
"chenyiran0021@163.com"
] | chenyiran0021@163.com |
834876b1059232ee24322d209800d83c0d91d521 | de7b80e949b8890e8beec5da711c33fa74a49f01 | /catnado/properties/choice_property.py | 679b7c73a5872660d58c99d697c6ee75e8c3c629 | [
"Apache-2.0"
] | permissive | tylertrussell/gae-catnado | 39a0d1a7931acbb09ab739d6536f1b475b367a5f | 91a73e9108bb724fb780cc8dcfca4da579313cb9 | refs/heads/master | 2020-03-17T20:24:25.942542 | 2018-07-25T07:02:42 | 2018-07-25T07:02:42 | 133,907,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,242 | py | from google.appengine.ext import db
class ChoiceProperty(db.IntegerProperty):
"""A property for efficiently storing choices made from a finite set.
This works by mapping each choice to an integer. The choices must be hashable
(so that they can be efficiently mapped back to their corresponding index).
"""
def __init__(self, choices, make_choice_attrs=True, *args, **kwargs):
"""Constructor.
Args:
choices: A non-empty list of 2-tuples of the form (id, choice). id must be
the int to store in the database. choice may be any hashable value.
make_choice_attrs: If True, the uppercase version of each string choice is
set as an attribute whose value is the choice's int representation.
"""
super(ChoiceProperty, self).__init__(*args, **kwargs)
self.index_to_choice = dict(choices)
self.choice_to_index = dict((c, i) for i, c in self.index_to_choice.iteritems())
if make_choice_attrs:
for i, c in self.index_to_choice.iteritems():
if isinstance(c, basestring):
setattr(self, c.upper(), i)
def get_choices(self):
"""Get a list of values which may be assigned to this property."""
return self.choice_to_index.keys()
def c2i(self, choice):
"""Convert a choice to its datastore representation."""
return self.choice_to_index[choice]
def __get__(self, model_instance, model_class):
if model_instance is None:
return self
index = super(ChoiceProperty, self).__get__(model_instance, model_class)
return self.index_to_choice[index]
def __set__(self, model_instance, value):
try:
index = self.c2i(value)
except KeyError:
raise db.BadValueError('Property %s must be one of the allowed choices: %s' %
(self.name, self.get_choices()))
super(ChoiceProperty, self).__set__(model_instance, index)
def get_value_for_datastore(self, model_instance):
"""Use underlying int value for datastore."""
return super(ChoiceProperty, self).__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Convert int from datastore to choice."""
if value is None:
return None
return self.index_to_choice[value]
| [
"tigertrussell@gmail.com"
] | tigertrussell@gmail.com |
04cdfacc94cba4b6547b23c48613e764fff8eea7 | c04766334a0c9bec3583c707ac177aedc3247fbb | /example/report/test/SeoulCityDead.py | 3c3e2530b731292973656ffb451b1a24fb1bb2bb | [] | no_license | realwater20/city-seoul | 6abe870447cedcfc29315ebc2f28e6d878dd4cd5 | 8f889a2667de554c83e76492f08c47838198caee | refs/heads/master | 2023-04-07T23:12:30.598955 | 2021-04-21T06:00:05 | 2021-04-21T06:00:05 | 360,049,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,982 | py | # -*- coding: utf-8 -*-
# 서울시 월별 연간 사망자 수 집계
import numpy as np
import matplotlib.pyplot as plt
from operator import eq
import csv
def analyzeDie():
# csv 파일 읽어오기
pieces = []
datafile = '.\\csv\\SeoulDeadReport.csv'
with open(datafile, 'rt') as f :
data = csv.reader(f, delimiter = ',')
for d in data:
pieces.append(d)
# csv 파일 데이터 배열로 만들기
bf_date = ''
dieCol = 0
dieRow = 0
dieArray = [[0 for col in range(0)] for row in range(7)]
for date, dieCnt in pieces:
if eq(bf_date, '') == True :
bf_date = date[:4]
elif eq(bf_date, date[:4]) == False : # 연도별로 데이터 담기
bf_date = date[:4]
dieCol += 1
dieRow = 0
# 행은 연도 열은 월기준으로 데이터를 만든다.
dieArray[dieCol].insert(dieRow, dieCnt)
month = ['1','2','3','4','5','6','7','8','9','10','11','12']
year = ['2010', '2011', '2012', '2013', '2014', '2015', '2016']
color = ['b','g','r','c','m','y','k']
n_groups = 12 # 노출되는 그래프 x축 개수
index = np.arange(n_groups)
bar_width = 0.1 # 막대그래프 넓이
opacity = 0.4
error_config = {'ecolor': '0.3'}
width_g = 0
cnt = 0
for yearDieArray in dieArray:
plt.bar(index+width_g-0.2, yearDieArray, bar_width,
alpha=opacity,
color=color[cnt],
error_kw=error_config,
label=year[cnt],
align='edge')
width_g = width_g + bar_width
cnt = cnt + 1
plt.xlabel('Year') # X축 제목
plt.ylabel('Count') # Y축 제목
plt.title('Analyze Die Graph') # 메인 제목 설정
plt.xticks(index + bar_width, month)
plt.legend()
plt.tight_layout()
plt.show()
if __name__== '__main__':
analyzeDie() | [
"realwater@staby.co.kr"
] | realwater@staby.co.kr |
de173d2bb760fbf4bd04e8b5784cb2d50c4a74b0 | f4af33b9a46effbd6cbcd84eedbc8992d3f3a5ce | /unit4/sps_function.py | 413e089cb9d718d71765a9b5c49c594758e39c3d | [] | no_license | ajdt/udacity_cs212 | d7fac354e8cc4ee54674cf40baf605f47f758bbf | bc9225ba7e04b7d219fed387a045dfec09c9bbcf | refs/heads/master | 2020-11-30T01:44:36.911165 | 2016-09-17T17:22:27 | 2016-09-17T17:22:27 | 68,466,208 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,778 | py | # -----------------
# User Instructions
#
# Write a function, shortest_path_search, that generalizes the search algorithm
# that we have been using. This function should have three inputs, a start state,
# a successors function, and an is_goal function.
#
# You can use the solution to mc_problem as a template for constructing your
# shortest_path_search. You can also see the example is_goal and successors
# functions for a simple test problem below.
def shortest_path_search(start, successors, is_goal):
"""Find the shortest path from start state to a state
such that is_goal(state) is true."""
if is_goal(start):
return [start]
frontier, explored = [ [start] ], set()
while frontier:
path = frontier.pop(0) # pop the last path
last_state = path[-1]
if is_goal(last_state): # check for goal here
return path
for (state, action) in successors(last_state).items():
if state not in explored:
explored.add(state)
frontier.append( path + [action, state] )
frontier.sort(key=len)
return Fail
def mc_problem1(start=(3, 3, 1, 0, 0, 0), goal=None):
"""Solve the missionaries and cannibals problem.
State is 6 ints: (M1, C1, B1, M2, C2, B2) on the start (1) and other (2) sides.
Find a path that goes from the initial state to the goal state (which, if
not specified, is the state with no people or boats on the start side."""
if goal is None:
goal = (0, 0, 0) + start[:3]
if start == goal:
return [start]
explored = set() # set of states we have visited
frontier = [ [start] ] # ordered list of paths we have blazed
while frontier:
path = frontier.pop(0)
s = path[-1]
for (state, action) in csuccessors(s).items():
if state not in explored:
explored.add(state)
path2 = path + [action, state]
if state == goal:
return path2
else:
frontier.append(path2)
return Fail
Fail = []
def csuccessors(state):
"""Find successors (including those that result in dining) to this
state. But a state where the cannibals can dine has no successors."""
M1, C1, B1, M2, C2, B2 = state
## Check for state with no successors
if C1 > M1 > 0 or C2 > M2 > 0:
return {}
items = []
if B1 > 0:
items += [(sub(state, delta), a + '->')
for delta, a in deltas.items()]
if B2 > 0:
items += [(add(state, delta), '<-' + a)
for delta, a in deltas.items()]
return dict(items)
def add(X, Y):
"add two vectors, X and Y."
return tuple(x+y for x,y in zip(X, Y))
def sub(X, Y):
"subtract vector Y from X."
return tuple(x-y for x,y in zip(X, Y))
deltas = {(2, 0, 1, -2, 0, -1): 'MM',
(0, 2, 1, 0, -2, -1): 'CC',
(1, 1, 1, -1, -1, -1): 'MC',
(1, 0, 1, -1, 0, -1): 'M',
(0, 1, 1, 0, -1, -1): 'C'}
Fail = []
# --------------
# Example problem
#
# Let's say the states in an optimization problem are given by integers.
# From a state, i, the only possible successors are i+1 and i-1. Given
# a starting integer, find the shortest path to the integer 8.
#
# This is an overly simple example of when we can use the
# shortest_path_search function. We just need to define the appropriate
# is_goal and successors functions.
def is_goal(state):
if state == 8:
return True
else:
return False
def successors(state):
successors = {state + 1: '->',
state - 1: '<-'}
return successors
#test
assert shortest_path_search(5, successors, is_goal) == [5, '->', 6, '->', 7, '->', 8]
| [
"ajdt@uw.edu"
] | ajdt@uw.edu |
4c198afdf441b3b85b7630151015f6fc947c91ca | 5d423684f7db6dd3f528e0ccc27ab41d6dfca9bd | /seniors/admin.py | ec0914b79716d1cc384dc3ee739cb1def581bc0e | [] | no_license | tnq/grogosite | e7459080188252c169c5bb71fbd183f06a2fe293 | c528826967aba6240a48f344a9a579c442695ddb | refs/heads/master | 2021-01-02T08:56:20.147735 | 2018-05-07T22:49:11 | 2018-05-07T23:04:11 | 1,848,585 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,137 | py | # -*- coding: utf-8 -*-
import codecs
import csv
from collections import defaultdict
from StringIO import StringIO
from zipfile import ZipFile
from django.contrib import admin
from django.core.paginator import Paginator
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.contenttypes.models import ContentType
from seniors.models import Senior, Activity
import re
class ActivityInline(admin.TabularInline):
model = Activity
extra = 1
majors = {
"Mechanical Engineering": "2",
"Physics": "8",
"Electrical Engineering": "6-1",
"Computer Science": "6-3",
"Chemical Engineering": "10",
"Management": "15",
"Political Science": "17",
"Brain Cognitive Sciences": "9",
"Civil Engineering": "1",
"Chemistry": "5",
"Biology": "7",
"Music": "21M",
"Aerospace Engineering": "16",
"History": "21H",
"Writing": "21W",
"Nuclear Engineering": "22",
"Philosophy": "24"
}
## {{{ http://code.activestate.com/recipes/577305/ (r1)
states = {
'AK': 'Alaska',
'AL': 'Alabama',
'AR': 'Arkansas',
'AS': 'American Samoa',
'AZ': 'Arizona',
'CA': 'California',
'CO': 'Colorado',
'CT': 'Connecticut',
'DC': 'District of Columbia',
'DE': 'Delaware',
'FL': 'Florida',
'GA': 'Georgia',
'GU': 'Guam',
'HI': 'Hawaii',
'IA': 'Iowa',
'ID': 'Idaho',
'IL': 'Illinois',
'IN': 'Indiana',
'KS': 'Kansas',
'KY': 'Kentucky',
'LA': 'Louisiana',
'MA': 'Massachusetts',
'MD': 'Maryland',
'ME': 'Maine',
'MI': 'Michigan',
'MN': 'Minnesota',
'MO': 'Missouri',
'MP': 'Northern Mariana Islands',
'MS': 'Mississippi',
'MT': 'Montana',
'NA': 'National',
'NC': 'North Carolina',
'ND': 'North Dakota',
'NE': 'Nebraska',
'NH': 'New Hampshire',
'NJ': 'New Jersey',
'NM': 'New Mexico',
'NV': 'Nevada',
'NY': 'New York',
'OH': 'Ohio',
'OK': 'Oklahoma',
'OR': 'Oregon',
'PA': 'Pennsylvania',
'PR': 'Puerto Rico',
'RI': 'Rhode Island',
'SC': 'South Carolina',
'SD': 'South Dakota',
'TN': 'Tennessee',
'TX': 'Texas',
'UT': 'Utah',
'VA': 'Virginia',
'VI': 'Virgin Islands',
'VT': 'Vermont',
'WA': 'Washington',
'WI': 'Wisconsin',
'WV': 'West Virginia',
'WY': 'Wyoming'
}
## end of http://code.activestate.com/recipes/577305/ }}}
state_abbrs = {}
for abbr in states.keys():
state_abbrs[states[abbr]] = abbr
lg_expansions = [x.split("\t", 2) for x in
"""ADPhi Alpha Delta Phi
AEP Alpha Epsilon Pi
AXO Alpha Chi Omega
B-Entry MacGregor B-Entry
Annex, McCormick McCormick Annex
Baker House Baker
Beast East Campus 2E
""".splitlines()]
greek_letters = {
"ALPHA" : u"\u0391",
"BETA" : u"\u0392",
"GAMMA" : u"\u0393",
"DELTA" : u"\u0394",
"EPSILON" : u"\u0395",
"ZETA" : u"\u0396",
"ETA" : u"\u0397",
"THETA" : u"\u0398",
"IOTA" : u"\u0399",
"KAPPA" : u"\u039A",
"LAMBDA" : u"\u039B",
"MU" : u"\u039C",
"NU" : u"\u039D",
"XI" : u"\u039E",
"OMICRON" : u"\u039F",
"PI" : u"\u03A0",
"RHO" : u"\u03A1",
"SIGMA" : u"\u03A3",
"TAU" : u"\u03A4",
"UPSILON" : u"\u03A5",
"PHI" : u"\u03A6",
"CHI" : u"\u03A7",
"PSI" : u"\u03A8",
"OMEGA" : u"\u03A9",
}
def format_lg(lg):
fragments = lg.split()
output = ""
in_greek = False
for i, word in enumerate(fragments):
if word.upper() in greek_letters.keys():
if not in_greek:
output += "<CharStyle:Senior Info Greek>"
in_greek = True
output += greek_letters[word.upper()]
else:
if in_greek:
output += "<CharStyle:> "
in_greek = False
output += word + " "
if in_greek:
output += "<CharStyle:>"
return output.strip()
def format_major(major):
major = major.upper().strip()
major = major.replace("AND", "")
major = major.replace("COURSE", "")
major = major.replace(" - ", " / ")
for one, two in majors.iteritems():
major = major.replace(one.upper(), two)
major = re.sub(ur'^([0-9A-Z–-]+)[^0-9A-Z–-]+([0-9A-Z–-]+)$', r'\1 / \2', major)
major = re.sub(r'([0-9]+)-([A-Z]+)', r'\1\2', major)
major = major.replace("-", u"\u2013")
major = major.strip()
return major
def format_state(state):
state = state.strip()
if state.upper() in states.keys():
state = states[state.upper()]
if state in state_abbrs.keys():
state = state_abbrs[state]
return state
def format_name(name):
name = re.sub(r' ([A-Z]) ', r' \1. ', name)
return name.strip()
def format_years(years):
years = re.sub(r',\s*', r' ', years)
return years.strip()
def format_quote(quote):
quote = re.sub(r'^"(.*)"$', r'\1', quote)
quote = re.sub(r"^'(.*)'$", r"\1", quote)
return quote
def format_author(author):
author = re.sub(r'^"(.*)"$', r'\1', author)
author = re.sub(r"^'(.*)'$", r"\1", author)
author = re.sub(r'^-', r'', author)
author = re.sub(r'^([^,]*?),\s*([^0-9,][^,]*?)$', r'\1 (\2)', author)
author = re.sub(r'\("(.+)"\)', r'(\1)', author)
return author.strip()
def fix_seniors(tnq_year, func, attr=None, get=None, set=None):
if not get:
get = lambda senior: getattr(senior, attr)
if not set:
set = lambda senior, value: setattr(senior, attr, value)
queryset = Senior.objects.filter(tnq_year=2012).order_by("sort_letter")
pages = Paginator(queryset, 30)
def do_senior(senior):
try:
val = get(senior)
if val:
new_val = func(val)
if new_val != val:
print "%s\t%s\t%s" % (val, new_val, senior.name)
return [(senior, new_val)]
except IndexError:
pass
return []
for i in range(pages.num_pages):
seniors = list(pages.page(i+1).object_list)
todo = []
for senior in seniors:
todo.extend(do_senior(senior))
if not todo:
continue
if raw_input("Okay [yN]? ").lower() == "y":
for senior, new_val in todo:
set(senior, new_val)
senior.save()
else:
for senior in seniors:
change = do_senior(senior)
if change:
new_val = change[0][1]
if raw_input("Okay [yN]? ").lower() == "y":
set(senior, new_val)
senior.save()
def _sort_seniors(queryset):
import PyICU
collator = PyICU.Collator.createInstance(PyICU.Locale("es_ES"))
queryset = queryset.exclude(image_path=None)
sorted_seniors = list(queryset)
sort_first_name = lambda _: _.name.split()[0].strip()
sort_last_name = lambda _: [w for w in _.name.split() if w[0].lower() == _.sort_letter.lower()][-1].lower().strip()
sorted_seniors.sort(key=lambda _: sort_last_name(_)+" "+sort_first_name(_), cmp=collator.compare)
return sorted_seniors
class SeniorAdmin(admin.ModelAdmin):
inlines = [ ActivityInline, ]
search_fields = ('name', 'kerberos',)
list_display = ('name', 'kerberos', 'sort_letter',)
list_filter = ('tnq_year',)
fieldsets = [
('Biographical Information', {'fields':['name', 'sort_letter', 'name_comments', 'home_town', 'home_state_or_country', 'image_path',]}),
('MIT Information', {'fields':['tnq_year', 'kerberos', 'major', 'minor', 'lg']}),
('Quote', {'fields':['quote', 'quote_author']}),
]
actions = ['export_as_csv', 'export_as_tagged_text', ]
def export_as_tagged_text(modeladmin, request, queryset):
"""
Export senior information as a series of Adobe Tagged Text files inside
a wrapper zip file, suitable for import into an Indesign document.
"""
response = HttpResponse(mimetype='application/zip')
response['Content-Disposition'] = 'attachment; filename=seniors.zip'
zip = ZipFile(response, 'w')
SENIORS_PER_PAGE = 8
SENIORS_PER_ROW = 4
BULLET = u" · "
SLASHES = u" // "
DASH = u" – "
SPECIAL_PAGES = defaultdict(lambda: SENIORS_PER_PAGE)
SPECIAL_PAGES.update({11:4,
28:4,
49:4,
68:4})
sorted_seniors = _sort_seniors(queryset)
pages = []
unpaginated_seniors = list(sorted_seniors)
page = 0
while unpaginated_seniors:
on_page = SPECIAL_PAGES[page]
this_page, unpaginated_seniors = unpaginated_seniors[:on_page], unpaginated_seniors[on_page:]
pages.append(this_page)
page += 1
def sanitize(str):
return str.replace(r"<", r"\<").replace(r">", r"\>")
def format_senior(senior):
if not senior:
return "<ParaStyle:Senior Info Text>"
else:
senior_string = u"<ParaStyle:Senior Info Text>"
senior_string += senior.kerberos
senior_string += BULLET
senior_string += senior.major
if senior.minor:
senior_string += ", "+senior.minor
senior_string += SLASHES
senior_string += senior.home_town + ", " + format_state(senior.home_state_or_country)
if senior.lg.strip():
senior_string += BULLET
senior_string += format_lg(senior.lg)
activities = Activity.objects.filter(senior = senior)
if activities:
senior_string += SLASHES
for i, activity in enumerate(activities):
if i:
senior_string += BULLET
senior_string += activity.title
senior_string += " <cPosition:Superscript>"
senior_string += activity.years
senior_string += "<cPosition:>"
if activity.offices:
senior_string += " (" + activity.offices + ")"
if senior.quote:
senior_string += SLASHES
senior_string += u'\u201C' + format_quote(sanitize(senior.quote)) + u'\u201D'
if senior.quote_author:
senior_string += DASH
senior_string += sanitize(senior.quote_author)
return senior_string
for i in range(len(pages)):
seniors = pages[i]
if len(seniors) < SENIORS_PER_PAGE:
half_num = int(len(seniors)/2.0 + 0.5)
if i % 2 == 0: #On a left-hand page
seniors = [None]*(SENIORS_PER_ROW-half_num) \
+ seniors[:half_num]\
+[None]*(SENIORS_PER_PAGE-len(seniors)-(SENIORS_PER_ROW-half_num))\
+ seniors[half_num:]
else:
seniors = seniors[:half_num]\
+[None]*(SENIORS_PER_ROW-half_num)\
+seniors[half_num:]\
+[None]*(SENIORS_PER_PAGE-len(seniors)-(SENIORS_PER_ROW-half_num))
images = ""
page_string = u"""<UNICODE-MAC>
<Version:7><FeatureSet:InDesign-Roman>"""
for senior in seniors:
if senior:
page_string += "<ParaStyle:Senior Name>%s<cNextXChars:Box>\n" % format_name(senior.name)
images += senior.image_path+"\n"
else:
page_string += "<ParaStyle:Senior Name><cNextXChars:Box>\n"
images += "\n"
for j in range(SENIORS_PER_ROW):
page_string += format_senior(seniors[j])
page_string += "\n"
page_string += format_senior(seniors[j+SENIORS_PER_ROW])
page_string += "<cNextXChars:Column>\n"
zip.writestr("page%02d.txt" % i, codecs.BOM_UTF16_LE + page_string.encode("utf_16_le"))
zip.writestr("images%02d.txt" % i, images)
zip.close()
return response
export_as_tagged_text.short_description = "Export selected seniors to Adobe Tagged Text"
def export_as_csv(modeladmin, request, queryset):
"""
Export senior information in CSV format.
"""
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=seniors.csv'
sorted_seniors = _sort_seniors(queryset)
writer = csv.writer(response,)
writer.writerow(['name', 'firstname', 'lastname', 'comments',
'kerberos', 'major', 'minor', 'hometown',
'homeState', 'lg', 'quote', 'author',
'activity1', 'years1', 'offices1',
'activity2', 'years2', 'offices2',
'activity3', 'years3', 'offices3',
'activity4', 'years4', 'offices4',
'activity5', 'years5', 'offices5', ])
for senior in sorted_seniors:
this_row = [format_name(senior.name).encode('utf8'),
senior.name.strip().split(" ")[0].encode('utf8'),
senior.name.strip().split(" ")[-1].encode('utf8'),
senior.name_comments.encode('utf8'),
senior.kerberos.encode('utf8'),
format_major(senior.major).encode('utf8'),
senior.minor.encode('utf8'),
senior.home_town.encode('utf8'),
senior.home_state_or_country.encode('utf8'),
senior.lg.encode('utf8'),
senior.quote.encode('utf8'),
senior.quote_author.encode('utf8')]
activities = Activity.objects.filter(senior = senior)
for activity in activities:
this_row.append(activity.title.encode('utf8'))
this_row.append(format_years(activity.years).encode('utf8'))
this_row.append(activity.offices.encode('utf8'))
writer.writerow(this_row)
return response
export_as_csv.short_description = "Export selected seniors to CSV"
class ActivityAdmin(admin.ModelAdmin):
list_display = ('title', 'senior')
admin.site.register(Senior, SeniorAdmin)
admin.site.register(Activity, ActivityAdmin)
| [
"nwiltsie@mit.edu"
] | nwiltsie@mit.edu |
6cbf9974caf542980afdcb04dd20da0afa523385 | a835f4daa719e0060d5f0c9def9b51ff319ea17d | /MyEDmodules/HFraddamAnal/python/hfraddamanal_cfi.py | 493225a7c22025bf8d63b76188548455508b1635 | [] | no_license | pdudero/usercode | 8e2582df407aa81e1d674c5adb498e5268f54aa7 | e53c110632ef046e0944697611d727e1f8841510 | refs/heads/master | 2021-01-01T06:28:25.007997 | 2018-05-04T05:32:32 | 2018-05-04T05:32:32 | 11,696,730 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,654 | py | import FWCore.ParameterSet.Config as cms
hfraddam = cms.EDAnalyzer('HFraddamAnal',
eventDataPset = cms.untracked.PSet(
fedRawDataLabel = cms.untracked.InputTag("source"),
tbTrigDataLabel = cms.untracked.InputTag("tbunpack"),
laserDigiLabel = cms.untracked.InputTag("hcalLaserReco"),
hfDigiLabel = cms.untracked.InputTag("hcalDigis"),
hcalibDigiLabel = cms.untracked.InputTag("hcalDigis"),
verbose = cms.untracked.bool(False)
),
TDCpars = cms.untracked.PSet(
TDCCutCenter = cms.untracked.double(1075),
TDCCutWindow = cms.untracked.double(25),
CorrectedTimeModCeiling = cms.untracked.int32(9999),
TimeModCeiling = cms.untracked.int32(9999)
),
ampCutsInfC = cms.bool(True),
minHit_GeVorfC = cms.double(0),
maxHit_GeVorfC = cms.double(9e99),
doPerChannel = cms.bool(True),
doTree = cms.untracked.bool(True),
hfraddamchannels = cms.vint32(-30,35,1, -30,71,1, -32,15,1, -32,51,1,
-34,35,1, -34,71,1, -36,15,1, -36,51,1,
-38,35,1, -38,71,1, -40,15,1, -40,51,1,
-41,35,1, -41,71,1,
-30,15,2, -30,51,2, -32,35,2, -32,71,2,
-34,15,2, -34,51,2, -36,35,2, -36,71,2,
-38,15,2, -38,51,2, -40,35,2, -40,71,2,
-41,15,2, -41,51,2,
30,21,1, 30,57,1, 32, 1,1, 32,37,1,
34,21,1, 34,57,1, 36, 1,1, 36,37,1,
38,21,1, 38,57,1, 40,35,1, 40,71,1,
41,19,1, 41,55,1,
30, 1,2, 30,37,2, 32,21,2, 32,57,2,
34, 1,2, 34,37,2, 36,21,2, 36,57,2,
38, 1,2, 38,37,2, 40,19,2, 40,55,2,
41,35,2, 41,71,2
),
tdcwindowsfile = cms.untracked.string("perchanwindows.txt"),
rundatesfile = cms.untracked.string("../data/rundates2012.txt"),
s2overs1meansfile = cms.untracked.string("../data/s2overs1meansperchan.txt"),
lumiprofilefile = cms.untracked.string("../data/2012-delivered-perday.csv"),
bottomfeeder = cms.untracked.int32(0xbadf00d)
)
| [
""
] | |
333b3e57b03c06635723ab136380a76d369174b0 | edfcd96f0010ea068a4c046bdcf7067ff92d3f9b | /Modules/datetime/1.py | 3dcb2607e4524fae4299e4d4cb1d07b43e896777 | [] | no_license | afsanehshu/python-project | a99ff558f375c1f5e17ea6ffc13af9216ec4733f | 48905cfd24df6d1f48460d421ed774f19403cf53 | refs/heads/main | 2023-08-03T01:53:32.812949 | 2021-09-22T19:36:25 | 2021-09-22T19:36:25 | 409,303,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | import datetime
datetime_object = datetime.datetime.now()
print(datetime_object)
| [
"afsanehshu@gmail.com"
] | afsanehshu@gmail.com |
833150ec357d3ab8a3ffb1d0b530443494e22440 | 6423626dcb7c6d2d261e9c87095736bcff888359 | /mainApp/views.py | 0f89809047fb03adb631c5f00f6269b3e53f4dd1 | [] | no_license | andrew-cmdltt/blog | d39031f7e1c8c5402fb201676c4b360c6b2ad3eb | 96e819ad1da056739c4ed854bbb7426d27f80c39 | refs/heads/master | 2022-11-05T13:32:32.720195 | 2020-06-22T07:07:37 | 2020-06-22T07:07:37 | 274,064,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,654 | py | from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.views.generic.base import View
from django.views.generic.edit import FormView
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.contrib.auth import login, logout
from posts.models import Post
from django.http import HttpResponse
class PostController():
def index(request):
if not request.user.is_authenticated:
return render(request, 'mainApp/message.html', {"message": "You are not authorized"})
posts = Post.objects.filter(owner_id=request.user.pk)
return render(request, "mainApp/index.html", {"posts": posts})
def addPost(request):
if request.Method == 'GET':
return render(request, "posts/add.html")
def searchPosts(request):
posts = Post.objects.filter(title__contains=request.GET['title'])
return render(request, "mainApp/index.html", {"posts": posts})
class RegisterFormView(FormView):
form_class = UserCreationForm
success_url = "/login/"
template_name = "mainApp/register.html"
def form_valid(self, form):
form.save()
return super(RegisterFormView, self).form_valid(form)
class LoginFormView(FormView):
form_class = AuthenticationForm
template_name = "mainApp/login.html"
success_url = "/"
def form_valid(self, form):
self.user = form.get_user()
login(self.request, self.user)
return super(LoginFormView, self).form_valid(form)
class LogoutView(View):
def get(self, request):
logout(request)
return HttpResponseRedirect("/login")
| [
"menwhohas2279@gmail.com"
] | menwhohas2279@gmail.com |
8a1ca419dff4adbd0c351ffc4b87553ec6abd288 | b134420ad05667ae191c3a2f3753ce5966594fb1 | /02_Info/hw02/src/docreader.py | 7746f9a82e692830eb169f912e43a82443d4b2a3 | [] | no_license | Fen99/TehnoSphere | aad17f9dca11561378d38ba292db1599e9bcfbec | 8a11c3d26f4eb6ad88c154e10e5411a5f625a17e | refs/heads/master | 2022-03-07T03:56:54.781807 | 2019-09-13T23:03:45 | 2019-09-13T23:03:45 | 106,061,225 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 891 | py | #!/usr/bin/env python
import document_pb2
import struct
import gzip
class DocumentStreamReader:
def __init__(self, paths):
self.paths = paths
def open_single(self, path):
return gzip.open(path, 'rb') if path.endswith('.gz') else open(path, 'rb')
#Document format - <Len><Text>
#document fields: .url, .text
def __iter__(self):
for path in self.paths:
with self.open_single(path) as stream:
while True:
sb = stream.read(4)
if sb == '':
break
size = struct.unpack('i', sb)[0]
msg = stream.read(size)
doc = document_pb2.document()
doc.ParseFromString(msg)
yield doc
def GetDocs(filenames):
reader = DocumentStreamReader(filenames)
return reader
| [
"feda.petraykin@gmail.com"
] | feda.petraykin@gmail.com |
8baafd6e359d9fb1be1f926e4333393e9d332c08 | 6ceb5c8d4276165e61063edf4c4d7ddd4e23ad93 | /tests/pf/test_mag_MVI_Octree.py | be0979f77bb644d2fa5708a03a2dc24fdf135846 | [
"MIT"
] | permissive | fperez/simpeg | e3f552c654d1b57b8f6e407a8f9460799a300cba | 5babfbfb0e74a41f20dfa81eb872603fdc33b17a | refs/heads/master | 2020-09-15T19:39:35.547901 | 2020-01-10T00:25:57 | 2020-01-10T00:25:57 | 223,541,836 | 0 | 1 | MIT | 2019-11-23T06:21:14 | 2019-11-23T06:21:13 | null | UTF-8 | Python | false | false | 11,712 | py | from __future__ import print_function
import unittest
from SimPEG import (Directives, Maps,
InvProblem, Optimization, DataMisfit,
Inversion, Utils, Regularization, Mesh)
import SimPEG.PF as PF
import numpy as np
from scipy.interpolate import NearestNDInterpolator
from SimPEG.Utils import mkvc
class MVIProblemTest(unittest.TestCase):
def setUp(self):
np.random.seed(0)
H0 = (50000., 90., 0.)
# The magnetization is set along a different
# direction (induced + remanence)
M = np.array([45., 90.])
# Create grid of points for topography
# Lets create a simple Gaussian topo
# and set the active cells
[xx, yy] = np.meshgrid(
np.linspace(-200, 200, 50),
np.linspace(-200, 200, 50)
)
b = 100
A = 50
zz = A*np.exp(-0.5*((xx/b)**2. + (yy/b)**2.))
# We would usually load a topofile
topo = np.c_[Utils.mkvc(xx), Utils.mkvc(yy), Utils.mkvc(zz)]
# Create and array of observation points
xr = np.linspace(-100., 100., 20)
yr = np.linspace(-100., 100., 20)
X, Y = np.meshgrid(xr, yr)
Z = A*np.exp(-0.5*((X/b)**2. + (Y/b)**2.)) + 5
# Create a MAGsurvey
xyzLoc = np.c_[Utils.mkvc(X.T), Utils.mkvc(Y.T), Utils.mkvc(Z.T)]
rxLoc = PF.BaseMag.RxObs(xyzLoc)
srcField = PF.BaseMag.SrcField([rxLoc], param=H0)
survey = PF.BaseMag.LinearSurvey(srcField)
# Create a mesh
h = [5, 5, 5]
padDist = np.ones((3, 2)) * 100
nCpad = [2, 4, 2]
# Get extent of points
limx = np.r_[topo[:, 0].max(), topo[:, 0].min()]
limy = np.r_[topo[:, 1].max(), topo[:, 1].min()]
limz = np.r_[topo[:, 2].max(), topo[:, 2].min()]
# Get center of the mesh
midX = np.mean(limx)
midY = np.mean(limy)
midZ = np.mean(limz)
nCx = int(limx[0]-limx[1]) / h[0]
nCy = int(limy[0]-limy[1]) / h[1]
nCz = int(limz[0]-limz[1]+int(np.min(np.r_[nCx, nCy])/3)) / h[2]
# Figure out full extent required from input
extent = np.max(np.r_[nCx * h[0] + padDist[0, :].sum(),
nCy * h[1] + padDist[1, :].sum(),
nCz * h[2] + padDist[2, :].sum()])
maxLevel = int(np.log2(extent/h[0]))+1
# Number of cells at the small octree level
nCx, nCy, nCz = 2**(maxLevel), 2**(maxLevel), 2**(maxLevel)
# Define the mesh and origin
# For now cubic cells
mesh = Mesh.TreeMesh([np.ones(nCx)*h[0],
np.ones(nCx)*h[1],
np.ones(nCx)*h[2]])
# Set origin
mesh.x0 = np.r_[
-nCx*h[0]/2.+midX,
-nCy*h[1]/2.+midY,
-nCz*h[2]/2.+midZ
]
# Refine the mesh around topography
# Get extent of points
F = NearestNDInterpolator(topo[:, :2], topo[:, 2])
zOffset = 0
# Cycle through the first 3 octree levels
for ii in range(3):
dx = mesh.hx.min()*2**ii
nCx = int((limx[0]-limx[1]) / dx)
nCy = int((limy[0]-limy[1]) / dx)
# Create a grid at the octree level in xy
CCx, CCy = np.meshgrid(
np.linspace(limx[1], limx[0], nCx),
np.linspace(limy[1], limy[0], nCy)
)
z = F(mkvc(CCx), mkvc(CCy))
# level means number of layers in current OcTree level
for level in range(int(nCpad[ii])):
mesh.insert_cells(
np.c_[
mkvc(CCx),
mkvc(CCy),
z-zOffset
], np.ones_like(z)*maxLevel-ii,
finalize=False
)
zOffset += dx
mesh.finalize()
self.mesh = mesh
# Define an active cells from topo
actv = Utils.surface2ind_topo(mesh, topo)
nC = int(actv.sum())
model = np.zeros((mesh.nC, 3))
# Convert the inclination declination to vector in Cartesian
M_xyz = Utils.matutils.dip_azimuth2cartesian(M[0], M[1])
# Get the indicies of the magnetized block
ind = Utils.ModelBuilder.getIndicesBlock(
np.r_[-20, -20, -10], np.r_[20, 20, 25],
mesh.gridCC,
)[0]
# Assign magnetization values
model[ind, :] = np.kron(
np.ones((ind.shape[0], 1)), M_xyz*0.05
)
# Remove air cells
self.model = model[actv, :]
# Create active map to go from reduce set to full
self.actvMap = Maps.InjectActiveCells(mesh, actv, np.nan)
# Creat reduced identity map
idenMap = Maps.IdentityMap(nP=nC*3)
# Create the forward model operator
prob = PF.Magnetics.MagneticIntegral(
mesh, chiMap=idenMap, actInd=actv,
modelType='vector'
)
# Pair the survey and problem
survey.pair(prob)
# Compute some data and add some random noise
data = prob.fields(Utils.mkvc(self.model))
std = 5 # nT
data += np.random.randn(len(data))*std
wd = np.ones(len(data))*std
# Assigne data and uncertainties to the survey
survey.dobs = data
survey.std = wd
# Create an projection matrix for plotting later
actvPlot = Maps.InjectActiveCells(mesh, actv, np.nan)
# Create sensitivity weights from our linear forward operator
rxLoc = survey.srcField.rxList[0].locs
# This Mapping connects the regularizations for the three-component
# vector model
wires = Maps.Wires(('p', nC), ('s', nC), ('t', nC))
# Create sensitivity weights from our linear forward operator
# so that all cells get equal chance to contribute to the solution
wr = np.sum(prob.G**2., axis=0)**0.5
wr = (wr/np.max(wr))
# Create three regularization for the different components
# of magnetization
reg_p = Regularization.Sparse(mesh, indActive=actv, mapping=wires.p)
reg_p.mref = np.zeros(3*nC)
reg_p.cell_weights = (wires.p * wr)
reg_s = Regularization.Sparse(mesh, indActive=actv, mapping=wires.s)
reg_s.mref = np.zeros(3*nC)
reg_s.cell_weights = (wires.s * wr)
reg_t = Regularization.Sparse(mesh, indActive=actv, mapping=wires.t)
reg_t.mref = np.zeros(3*nC)
reg_t.cell_weights = (wires.t * wr)
reg = reg_p + reg_s + reg_t
reg.mref = np.zeros(3*nC)
# Data misfit function
dmis = DataMisfit.l2_DataMisfit(survey)
dmis.W = 1./survey.std
# Add directives to the inversion
opt = Optimization.ProjectedGNCG(maxIter=30, lower=-10, upper=10.,
maxIterLS=20, maxIterCG=20, tolCG=1e-4)
invProb = InvProblem.BaseInvProblem(dmis, reg, opt)
# A list of directive to control the inverson
betaest = Directives.BetaEstimate_ByEig()
# Here is where the norms are applied
# Use pick a treshold parameter empirically based on the distribution of
# model parameters
IRLS = Directives.Update_IRLS(
f_min_change=1e-3, maxIRLSiter=0, beta_tol=5e-1
)
# Pre-conditioner
update_Jacobi = Directives.UpdatePreconditioner()
inv = Inversion.BaseInversion(invProb,
directiveList=[IRLS, update_Jacobi, betaest])
# Run the inversion
m0 = np.ones(3*nC) * 1e-4 # Starting model
mrec_MVIC = inv.run(m0)
self.mstart = Utils.matutils.cartesian2spherical(mrec_MVIC.reshape((nC, 3), order='F'))
beta = invProb.beta
dmis.prob.coordinate_system = 'spherical'
dmis.prob.model = self.mstart
# Create a block diagonal regularization
wires = Maps.Wires(('amp', nC), ('theta', nC), ('phi', nC))
# Create a Combo Regularization
# Regularize the amplitude of the vectors
reg_a = Regularization.Sparse(mesh, indActive=actv,
mapping=wires.amp)
reg_a.norms = np.c_[0., 0., 0., 0.] # Sparse on the model and its gradients
reg_a.mref = np.zeros(3*nC)
# Regularize the vertical angle of the vectors
reg_t = Regularization.Sparse(mesh, indActive=actv,
mapping=wires.theta)
reg_t.alpha_s = 0. # No reference angle
reg_t.space = 'spherical'
reg_t.norms = np.c_[2., 0., 0., 0.] # Only norm on gradients used
# Regularize the horizontal angle of the vectors
reg_p = Regularization.Sparse(mesh, indActive=actv,
mapping=wires.phi)
reg_p.alpha_s = 0. # No reference angle
reg_p.space = 'spherical'
reg_p.norms = np.c_[2., 0., 0., 0.] # Only norm on gradients used
reg = reg_a + reg_t + reg_p
reg.mref = np.zeros(3*nC)
Lbound = np.kron(np.asarray([0, -np.inf, -np.inf]), np.ones(nC))
Ubound = np.kron(np.asarray([10, np.inf, np.inf]), np.ones(nC))
# Add directives to the inversion
opt = Optimization.ProjectedGNCG(maxIter=20,
lower=Lbound,
upper=Ubound,
maxIterLS=20,
maxIterCG=30,
tolCG=1e-3,
stepOffBoundsFact=1e-3,
)
opt.approxHinv = None
invProb = InvProblem.BaseInvProblem(dmis, reg, opt, beta=beta)
# Here is where the norms are applied
IRLS = Directives.Update_IRLS(f_min_change=1e-4, maxIRLSiter=20,
minGNiter=1, beta_tol=0.5,
coolingRate=1, coolEps_q=True,
betaSearch=False)
# Special directive specific to the mag amplitude problem. The sensitivity
# weights are update between each iteration.
ProjSpherical = Directives.ProjectSphericalBounds()
update_SensWeight = Directives.UpdateSensitivityWeights()
update_Jacobi = Directives.UpdatePreconditioner()
self.inv = Inversion.BaseInversion(
invProb,
directiveList=[
ProjSpherical, IRLS, update_SensWeight, update_Jacobi
]
)
def test_mag_inverse(self):
# Run the inversion
mrec_MVI_S = self.inv.run(self.mstart)
nC = int(mrec_MVI_S.shape[0]/3)
vec_xyz = Utils.matutils.spherical2cartesian(
mrec_MVI_S.reshape((nC, 3), order='F')).reshape((nC, 3), order='F')
residual = np.linalg.norm(vec_xyz-self.model) / np.linalg.norm(self.model)
# print(residual)
# import matplotlib.pyplot as plt
# mrec = np.sum(vec_xyz**2., axis=1)**0.5
# plt.figure()
# ax = plt.subplot(1, 2, 1)
# midx = 65
# self.mesh.plotSlice(self.actvMap*mrec, ax=ax, normal='Y', ind=midx,
# grid=True, clim=(0, 0.03))
# ax.set_xlim(self.mesh.gridCC[:, 0].min(), self.mesh.gridCC[:, 0].max())
# ax.set_ylim(self.mesh.gridCC[:, 2].min(), self.mesh.gridCC[:, 2].max())
# plt.show()
self.assertTrue(residual < 0.25)
# self.assertTrue(residual < 0.05)
if __name__ == '__main__':
unittest.main()
| [
"fourndo@gmail.com"
] | fourndo@gmail.com |
c43c7bcbbad1cac818d700eaac5a23d93dc3d3f6 | f701b45d56964f70d2b1a59d42348bde89f9f80e | /ase_extensions/transformations.py | 48a0c96290e977646de0b7d2a8b5d3aa404be6a5 | [] | no_license | Clyde-fare/ase_extensions | 01c8c2433a65e521a1ba867bfd6878d3e22aea0e | d3295d6306b5cc83fe6f7686c84dc7cc07aa91f8 | refs/heads/master | 2021-01-18T22:47:28.733715 | 2017-01-12T12:31:11 | 2017-01-12T12:31:11 | 27,386,500 | 0 | 1 | null | 2016-09-05T16:13:45 | 2014-12-01T15:49:19 | Python | UTF-8 | Python | false | false | 65,592 | py | #taken from http://www.lfd.uci.edu/~gohlke/code/transformations.py.html
# -*- coding: utf-8 -*-
# transformations.py
# Copyright (c) 2006-2012, Christoph Gohlke
# Copyright (c) 2006-2012, The Regents of the University of California
# Produced at the Laboratory for Fluorescence Dynamics
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holders nor the names of any
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Homogeneous Transformation Matrices and Quaternions.
A library for calculating 4x4 matrices for translating, rotating, reflecting,
scaling, shearing, projecting, orthogonalizing, and superimposing arrays of
3D homogeneous coordinates as well as for converting between rotation matrices,
Euler angles, and quaternions. Also includes an Arcball control object and
functions to decompose transformation matrices.
:Authors:
`Christoph Gohlke <http://www.lfd.uci.edu/~gohlke/>`__,
Laboratory for Fluorescence Dynamics, University of California, Irvine
:Version: 2012.10.18
Requirements
------------
* `CPython 2.7 or 3.2 <http://www.python.org>`__
* `Numpy 1.6 <http://numpy.scipy.org>`__
* `transformations.c 2012.01.01 <http://www.lfd.uci.edu/~gohlke/>`__
(optional implementation of some functions in C)
Notes
-----
The API is not stable yet and is expected to change between revisions.
This Python code is not optimized for speed. Refer to the transformations.c
module for a faster implementation of some functions.
Documentation in HTML format can be generated with epydoc.
Matrices (M) can be inverted using numpy.linalg.inv(M), be concatenated using
numpy.dot(M0, M1), or transform homogeneous coordinate arrays (v) using
numpy.dot(M, v) for shape (4, \*) column vectors, respectively
numpy.dot(v, M.T) for shape (\*, 4) row vectors ("array of points").
This module follows the "column vectors on the right" and "row major storage"
(C contiguous) conventions. The translation components are in the right column
of the transformation matrix, i.e. M[:3, 3].
The transpose of the transformation matrices may have to be used to interface
with other graphics systems, e.g. with OpenGL's glMultMatrixd(). See also [16].
Calculations are carried out with numpy.float64 precision.
Vector, point, quaternion, and matrix function arguments are expected to be
"array like", i.e. tuple, list, or numpy arrays.
Return types are numpy arrays unless specified otherwise.
Angles are in radians unless specified otherwise.
Quaternions w+ix+jy+kz are represented as [w, x, y, z].
A triple of Euler angles can be applied/interpreted in 24 ways, which can
be specified using a 4 character string or encoded 4-tuple:
*Axes 4-string*: e.g. 'sxyz' or 'ryxy'
- first character : rotations are applied to 's'tatic or 'r'otating frame
- remaining characters : successive rotation axis 'x', 'y', or 'z'
*Axes 4-tuple*: e.g. (0, 0, 0, 0) or (1, 1, 1, 1)
- inner axis: code of axis ('x':0, 'y':1, 'z':2) of rightmost matrix.
- parity : even (0) if inner axis 'x' is followed by 'y', 'y' is followed
by 'z', or 'z' is followed by 'x'. Otherwise odd (1).
- repetition : first and last axis are same (1) or different (0).
- frame : rotations are applied to static (0) or rotating (1) frame.
References
----------
(1) Matrices and transformations. Ronald Goldman.
In "Graphics Gems I", pp 472-475. Morgan Kaufmann, 1990.
(2) More matrices and transformations: shear and pseudo-perspective.
Ronald Goldman. In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
(3) Decomposing a matrix into simple transformations. Spencer Thomas.
In "Graphics Gems II", pp 320-323. Morgan Kaufmann, 1991.
(4) Recovering the data from the transformation matrix. Ronald Goldman.
In "Graphics Gems II", pp 324-331. Morgan Kaufmann, 1991.
(5) Euler angle conversion. Ken Shoemake.
In "Graphics Gems IV", pp 222-229. Morgan Kaufmann, 1994.
(6) Arcball rotation control. Ken Shoemake.
In "Graphics Gems IV", pp 175-192. Morgan Kaufmann, 1994.
(7) Representing attitude: Euler angles, unit quaternions, and rotation
vectors. James Diebel. 2006.
(8) A discussion of the solution for the best rotation to relate two sets
of vectors. W Kabsch. Acta Cryst. 1978. A34, 827-828.
(9) Closed-form solution of absolute orientation using unit quaternions.
BKP Horn. J Opt Soc Am A. 1987. 4(4):629-642.
(10) Quaternions. Ken Shoemake.
http://www.sfu.ca/~jwa3/cmpt461/files/quatut.pdf
(11) From quaternion to matrix and back. JMP van Waveren. 2005.
http://www.intel.com/cd/ids/developer/asmo-na/eng/293748.htm
(12) Uniform random rotations. Ken Shoemake.
In "Graphics Gems III", pp 124-132. Morgan Kaufmann, 1992.
(13) Quaternion in molecular modeling. CFF Karney.
J Mol Graph Mod, 25(5):595-604
(14) New method for extracting the quaternion from a rotation matrix.
Itzhack Y Bar-Itzhack, J Guid Contr Dynam. 2000. 23(6): 1085-1087.
(15) Multiple View Geometry in Computer Vision. Hartley and Zissermann.
Cambridge University Press; 2nd Ed. 2004. Chapter 4, Algorithm 4.7, p 130.
(16) Column Vectors vs. Row Vectors.
http://steve.hollasch.net/cgindex/math/matrix/column-vec.html
Examples
--------
>>> alpha, beta, gamma = 0.123, -1.234, 2.345
>>> origin, xaxis, yaxis, zaxis = [0, 0, 0], [1, 0, 0], [0, 1, 0], [0, 0, 1]
>>> I = identity_matrix()
>>> Rx = rotation_matrix(alpha, xaxis)
>>> Ry = rotation_matrix(beta, yaxis)
>>> Rz = rotation_matrix(gamma, zaxis)
>>> R = concatenate_matrices(Rx, Ry, Rz)
>>> euler = euler_from_matrix(R, 'rxyz')
>>> numpy.allclose([alpha, beta, gamma], euler)
True
>>> Re = euler_matrix(alpha, beta, gamma, 'rxyz')
>>> is_same_transform(R, Re)
True
>>> al, be, ga = euler_from_matrix(Re, 'rxyz')
>>> is_same_transform(Re, euler_matrix(al, be, ga, 'rxyz'))
True
>>> qx = quaternion_about_axis(alpha, xaxis)
>>> qy = quaternion_about_axis(beta, yaxis)
>>> qz = quaternion_about_axis(gamma, zaxis)
>>> q = quaternion_multiply(qx, qy)
>>> q = quaternion_multiply(q, qz)
>>> Rq = quaternion_matrix(q)
>>> is_same_transform(R, Rq)
True
>>> S = scale_matrix(1.23, origin)
>>> T = translation_matrix([1, 2, 3])
>>> Z = shear_matrix(beta, xaxis, origin, zaxis)
>>> R = random_rotation_matrix(numpy.random.rand(3))
>>> M = concatenate_matrices(T, R, Z, S)
>>> scale, shear, angles, trans, persp = decompose_matrix(M)
>>> numpy.allclose(scale, 1.23)
True
>>> numpy.allclose(trans, [1, 2, 3])
True
>>> numpy.allclose(shear, [0, math.tan(beta), 0])
True
>>> is_same_transform(R, euler_matrix(axes='sxyz', *angles))
True
>>> M1 = compose_matrix(scale, shear, angles, trans, persp)
>>> is_same_transform(M, M1)
True
>>> v0, v1 = random_vector(3), random_vector(3)
>>> M = rotation_matrix(angle_between_vectors(v0, v1), vector_product(v0, v1))
>>> v2 = numpy.dot(v0, M[:3,:3].T)
>>> numpy.allclose(unit_vector(v1), unit_vector(v2))
True
"""
from __future__ import division, print_function
import sys
import os
import math
import numpy
__version__ = '2012.01.18'
__docformat__ = 'restructuredtext en'
__all__ = []
def identity_matrix():
"""Return 4x4 identity/unit matrix.
>>> I = identity_matrix()
>>> numpy.allclose(I, numpy.dot(I, I))
True
>>> numpy.sum(I), numpy.trace(I)
(4.0, 4.0)
>>> numpy.allclose(I, numpy.identity(4))
True
"""
return numpy.identity(4)
def translation_matrix(direction):
"""Return matrix to translate by direction vector.
>>> v = numpy.random.random(3) - 0.5
>>> numpy.allclose(v, translation_matrix(v)[:3, 3])
True
"""
M = numpy.identity(4)
M[:3, 3] = direction[:3]
return M
def translation_from_matrix(matrix):
"""Return translation vector from translation matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = translation_from_matrix(translation_matrix(v0))
>>> numpy.allclose(v0, v1)
True
"""
return numpy.array(matrix, copy=False)[:3, 3].copy()
def reflection_matrix(point, normal):
"""Return matrix to mirror at plane defined by point and normal vector.
>>> v0 = numpy.random.random(4) - 0.5
>>> v0[3] = 1.
>>> v1 = numpy.random.random(3) - 0.5
>>> R = reflection_matrix(v0, v1)
>>> numpy.allclose(2, numpy.trace(R))
True
>>> numpy.allclose(v0, numpy.dot(R, v0))
True
>>> v2 = v0.copy()
>>> v2[:3] += v1
>>> v3 = v0.copy()
>>> v2[:3] -= v1
>>> numpy.allclose(v2, numpy.dot(R, v3))
True
"""
normal = unit_vector(normal[:3])
M = numpy.identity(4)
M[:3, :3] -= 2.0 * numpy.outer(normal, normal)
M[:3, 3] = (2.0 * numpy.dot(point[:3], normal)) * normal
return M
def reflection_from_matrix(matrix):
"""Return mirror plane point and normal vector from reflection matrix.
>>> v0 = numpy.random.random(3) - 0.5
>>> v1 = numpy.random.random(3) - 0.5
>>> M0 = reflection_matrix(v0, v1)
>>> point, normal = reflection_from_matrix(M0)
>>> M1 = reflection_matrix(point, normal)
>>> is_same_transform(M0, M1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
# normal: unit eigenvector corresponding to eigenvalue -1
w, V = numpy.linalg.eig(M[:3, :3])
i = numpy.where(abs(numpy.real(w) + 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue -1")
normal = numpy.real(V[:, i[0]]).squeeze()
# point: any unit eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
return point, normal
def rotation_matrix(angle, direction, point=None):
"""Return matrix to rotate about axis defined by point and direction.
>>> R = rotation_matrix(math.pi/2, [0, 0, 1], [1, 0, 0])
>>> numpy.allclose(numpy.dot(R, [0, 0, 0, 1]), [1, -1, 0, 1])
True
>>> angle = (random.random() - 0.5) * (2*math.pi)
>>> direc = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> R0 = rotation_matrix(angle, direc, point)
>>> R1 = rotation_matrix(angle-2*math.pi, direc, point)
>>> is_same_transform(R0, R1)
True
>>> R0 = rotation_matrix(angle, direc, point)
>>> R1 = rotation_matrix(-angle, -direc, point)
>>> is_same_transform(R0, R1)
True
>>> I = numpy.identity(4, numpy.float64)
>>> numpy.allclose(I, rotation_matrix(math.pi*2, direc))
True
>>> numpy.allclose(2, numpy.trace(rotation_matrix(math.pi/2,
... direc, point)))
True
"""
sina = math.sin(angle)
cosa = math.cos(angle)
direction = unit_vector(direction[:3])
# rotation matrix around unit vector
R = numpy.diag([cosa, cosa, cosa])
R += numpy.outer(direction, direction) * (1.0 - cosa)
direction *= sina
R += numpy.array([[ 0.0, -direction[2], direction[1]],
[ direction[2], 0.0, -direction[0]],
[-direction[1], direction[0], 0.0]])
M = numpy.identity(4)
M[:3, :3] = R
if point is not None:
# rotation not around origin
point = numpy.array(point[:3], dtype=numpy.float64, copy=False)
M[:3, 3] = point - numpy.dot(R, point)
return M
def rotation_from_matrix(matrix):
"""Return rotation angle and axis from rotation matrix.
>>> angle = (random.random() - 0.5) * (2*math.pi)
>>> direc = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> R0 = rotation_matrix(angle, direc, point)
>>> angle, direc, point = rotation_from_matrix(R0)
>>> R1 = rotation_matrix(angle, direc, point)
>>> is_same_transform(R0, R1)
True
"""
R = numpy.array(matrix, dtype=numpy.float64, copy=False)
R33 = R[:3, :3]
# direction: unit eigenvector of R33 corresponding to eigenvalue of 1
w, W = numpy.linalg.eig(R33.T)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
direction = numpy.real(W[:, i[-1]]).squeeze()
# point: unit eigenvector of R33 corresponding to eigenvalue of 1
w, Q = numpy.linalg.eig(R)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no unit eigenvector corresponding to eigenvalue 1")
point = numpy.real(Q[:, i[-1]]).squeeze()
point /= point[3]
# rotation angle depending on direction
cosa = (numpy.trace(R33) - 1.0) / 2.0
if abs(direction[2]) > 1e-8:
sina = (R[1, 0] + (cosa-1.0)*direction[0]*direction[1]) / direction[2]
elif abs(direction[1]) > 1e-8:
sina = (R[0, 2] + (cosa-1.0)*direction[0]*direction[2]) / direction[1]
else:
sina = (R[2, 1] + (cosa-1.0)*direction[1]*direction[2]) / direction[0]
angle = math.atan2(sina, cosa)
return angle, direction, point
def scale_matrix(factor, origin=None, direction=None):
"""Return matrix to scale by factor around origin in direction.
Use factor -1 for point symmetry.
>>> v = (numpy.random.rand(4, 5) - 0.5) * 20
>>> v[3] = 1
>>> S = scale_matrix(-1.234)
>>> numpy.allclose(numpy.dot(S, v)[:3], -1.234*v[:3])
True
>>> factor = random.random() * 10 - 5
>>> origin = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> S = scale_matrix(factor, origin)
>>> S = scale_matrix(factor, origin, direct)
"""
if direction is None:
# uniform scaling
M = numpy.diag([factor, factor, factor, 1.0])
if origin is not None:
M[:3, 3] = origin[:3]
M[:3, 3] *= 1.0 - factor
else:
# nonuniform scaling
direction = unit_vector(direction[:3])
factor = 1.0 - factor
M = numpy.identity(4)
M[:3, :3] -= factor * numpy.outer(direction, direction)
if origin is not None:
M[:3, 3] = (factor * numpy.dot(origin[:3], direction)) * direction
return M
def scale_from_matrix(matrix):
"""Return scaling factor, origin and direction from scaling matrix.
>>> factor = random.random() * 10 - 5
>>> origin = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> S0 = scale_matrix(factor, origin)
>>> factor, origin, direction = scale_from_matrix(S0)
>>> S1 = scale_matrix(factor, origin, direction)
>>> is_same_transform(S0, S1)
True
>>> S0 = scale_matrix(factor, origin, direct)
>>> factor, origin, direction = scale_from_matrix(S0)
>>> S1 = scale_matrix(factor, origin, direction)
>>> is_same_transform(S0, S1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
M33 = M[:3, :3]
factor = numpy.trace(M33) - 2.0
try:
# direction: unit eigenvector corresponding to eigenvalue factor
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w) - factor) < 1e-8)[0][0]
direction = numpy.real(V[:, i]).squeeze()
direction /= vector_norm(direction)
except IndexError:
# uniform scaling
factor = (factor + 2.0) / 3.0
direction = None
# origin: any eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 1")
origin = numpy.real(V[:, i[-1]]).squeeze()
origin /= origin[3]
return factor, origin, direction
def projection_matrix(point, normal, direction=None,
perspective=None, pseudo=False):
"""Return matrix to project onto plane defined by point and normal.
Using either perspective point, projection direction, or none of both.
If pseudo is True, perspective projections will preserve relative depth
such that Perspective = dot(Orthogonal, PseudoPerspective).
>>> P = projection_matrix([0, 0, 0], [1, 0, 0])
>>> numpy.allclose(P[1:, 1:], numpy.identity(4)[1:, 1:])
True
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(3) - 0.5
>>> P0 = projection_matrix(point, normal)
>>> P1 = projection_matrix(point, normal, direction=direct)
>>> P2 = projection_matrix(point, normal, perspective=persp)
>>> P3 = projection_matrix(point, normal, perspective=persp, pseudo=True)
>>> is_same_transform(P2, numpy.dot(P0, P3))
True
>>> P = projection_matrix([3, 0, 0], [1, 1, 0], [1, 0, 0])
>>> v0 = (numpy.random.rand(4, 5) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(P, v0)
>>> numpy.allclose(v1[1], v0[1])
True
>>> numpy.allclose(v1[0], 3-v1[1])
True
"""
M = numpy.identity(4)
point = numpy.array(point[:3], dtype=numpy.float64, copy=False)
normal = unit_vector(normal[:3])
if perspective is not None:
# perspective projection
perspective = numpy.array(perspective[:3], dtype=numpy.float64,
copy=False)
M[0, 0] = M[1, 1] = M[2, 2] = numpy.dot(perspective-point, normal)
M[:3, :3] -= numpy.outer(perspective, normal)
if pseudo:
# preserve relative depth
M[:3, :3] -= numpy.outer(normal, normal)
M[:3, 3] = numpy.dot(point, normal) * (perspective+normal)
else:
M[:3, 3] = numpy.dot(point, normal) * perspective
M[3, :3] = -normal
M[3, 3] = numpy.dot(perspective, normal)
elif direction is not None:
# parallel projection
direction = numpy.array(direction[:3], dtype=numpy.float64, copy=False)
scale = numpy.dot(direction, normal)
M[:3, :3] -= numpy.outer(direction, normal) / scale
M[:3, 3] = direction * (numpy.dot(point, normal) / scale)
else:
# orthogonal projection
M[:3, :3] -= numpy.outer(normal, normal)
M[:3, 3] = numpy.dot(point, normal) * normal
return M
def projection_from_matrix(matrix, pseudo=False):
"""Return projection plane and perspective point from projection matrix.
Return values are same as arguments for projection_matrix function:
point, normal, direction, perspective, and pseudo.
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.random.random(3) - 0.5
>>> direct = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(3) - 0.5
>>> P0 = projection_matrix(point, normal)
>>> result = projection_from_matrix(P0)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, direct)
>>> result = projection_from_matrix(P0)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=False)
>>> result = projection_from_matrix(P0, pseudo=False)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
>>> P0 = projection_matrix(point, normal, perspective=persp, pseudo=True)
>>> result = projection_from_matrix(P0, pseudo=True)
>>> P1 = projection_matrix(*result)
>>> is_same_transform(P0, P1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
M33 = M[:3, :3]
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not pseudo and len(i):
# point: any eigenvector corresponding to eigenvalue 1
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
# direction: unit eigenvector corresponding to eigenvalue 0
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 0")
direction = numpy.real(V[:, i[0]]).squeeze()
direction /= vector_norm(direction)
# normal: unit eigenvector of M33.T corresponding to eigenvalue 0
w, V = numpy.linalg.eig(M33.T)
i = numpy.where(abs(numpy.real(w)) < 1e-8)[0]
if len(i):
# parallel projection
normal = numpy.real(V[:, i[0]]).squeeze()
normal /= vector_norm(normal)
return point, normal, direction, None, False
else:
# orthogonal projection, where normal equals direction vector
return point, direction, None, None, False
else:
# perspective projection
i = numpy.where(abs(numpy.real(w)) > 1e-8)[0]
if not len(i):
raise ValueError(
"no eigenvector not corresponding to eigenvalue 0")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
normal = - M[3, :3]
perspective = M[:3, 3] / numpy.dot(point[:3], normal)
if pseudo:
perspective -= normal
return point, normal, None, perspective, pseudo
def clip_matrix(left, right, bottom, top, near, far, perspective=False):
"""Return matrix to obtain normalized device coordinates from frustrum.
The frustrum bounds are axis-aligned along x (left, right),
y (bottom, top) and z (near, far).
Normalized device coordinates are in range [-1, 1] if coordinates are
inside the frustrum.
If perspective is True the frustrum is a truncated pyramid with the
perspective point at origin and direction along z axis, otherwise an
orthographic canonical view volume (a box).
Homogeneous coordinates transformed by the perspective clip matrix
need to be dehomogenized (divided by w coordinate).
>>> frustrum = numpy.random.rand(6)
>>> frustrum[1] += frustrum[0]
>>> frustrum[3] += frustrum[2]
>>> frustrum[5] += frustrum[4]
>>> M = clip_matrix(perspective=False, *frustrum)
>>> numpy.dot(M, [frustrum[0], frustrum[2], frustrum[4], 1])
array([-1., -1., -1., 1.])
>>> numpy.dot(M, [frustrum[1], frustrum[3], frustrum[5], 1])
array([ 1., 1., 1., 1.])
>>> M = clip_matrix(perspective=True, *frustrum)
>>> v = numpy.dot(M, [frustrum[0], frustrum[2], frustrum[4], 1])
>>> v / v[3]
array([-1., -1., -1., 1.])
>>> v = numpy.dot(M, [frustrum[1], frustrum[3], frustrum[4], 1])
>>> v / v[3]
array([ 1., 1., -1., 1.])
"""
if left >= right or bottom >= top or near >= far:
raise ValueError("invalid frustrum")
if perspective:
if near <= _EPS:
raise ValueError("invalid frustrum: near <= 0")
t = 2.0 * near
M = [[t/(left-right), 0.0, (right+left)/(right-left), 0.0],
[0.0, t/(bottom-top), (top+bottom)/(top-bottom), 0.0],
[0.0, 0.0, (far+near)/(near-far), t*far/(far-near)],
[0.0, 0.0, -1.0, 0.0]]
else:
M = [[2.0/(right-left), 0.0, 0.0, (right+left)/(left-right)],
[0.0, 2.0/(top-bottom), 0.0, (top+bottom)/(bottom-top)],
[0.0, 0.0, 2.0/(far-near), (far+near)/(near-far)],
[0.0, 0.0, 0.0, 1.0]]
return numpy.array(M)
def shear_matrix(angle, direction, point, normal):
"""Return matrix to shear by angle along direction vector on shear plane.
The shear plane is defined by a point and normal vector. The direction
vector must be orthogonal to the plane's normal vector.
A point P is transformed by the shear matrix into P" such that
the vector P-P" is parallel to the direction vector and its extent is
given by the angle of P-P'-P", where P' is the orthogonal projection
of P onto the shear plane.
>>> angle = (random.random() - 0.5) * 4*math.pi
>>> direct = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.cross(direct, numpy.random.random(3))
>>> S = shear_matrix(angle, direct, point, normal)
>>> numpy.allclose(1, numpy.linalg.det(S))
True
"""
normal = unit_vector(normal[:3])
direction = unit_vector(direction[:3])
if abs(numpy.dot(normal, direction)) > 1e-6:
raise ValueError("direction and normal vectors are not orthogonal")
angle = math.tan(angle)
M = numpy.identity(4)
M[:3, :3] += angle * numpy.outer(direction, normal)
M[:3, 3] = -angle * numpy.dot(point[:3], normal) * direction
return M
def shear_from_matrix(matrix):
"""Return shear angle, direction and plane from shear matrix.
>>> angle = (random.random() - 0.5) * 4*math.pi
>>> direct = numpy.random.random(3) - 0.5
>>> point = numpy.random.random(3) - 0.5
>>> normal = numpy.cross(direct, numpy.random.random(3))
>>> S0 = shear_matrix(angle, direct, point, normal)
>>> angle, direct, point, normal = shear_from_matrix(S0)
>>> S1 = shear_matrix(angle, direct, point, normal)
>>> is_same_transform(S0, S1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)
M33 = M[:3, :3]
# normal: cross independent eigenvectors corresponding to the eigenvalue 1
w, V = numpy.linalg.eig(M33)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-4)[0]
if len(i) < 2:
raise ValueError("no two linear independent eigenvectors found %s" % w)
V = numpy.real(V[:, i]).squeeze().T
lenorm = -1.0
for i0, i1 in ((0, 1), (0, 2), (1, 2)):
n = numpy.cross(V[i0], V[i1])
w = vector_norm(n)
if w > lenorm:
lenorm = w
normal = n
normal /= lenorm
# direction and angle
direction = numpy.dot(M33 - numpy.identity(3), normal)
angle = vector_norm(direction)
direction /= angle
angle = math.atan(angle)
# point: eigenvector corresponding to eigenvalue 1
w, V = numpy.linalg.eig(M)
i = numpy.where(abs(numpy.real(w) - 1.0) < 1e-8)[0]
if not len(i):
raise ValueError("no eigenvector corresponding to eigenvalue 1")
point = numpy.real(V[:, i[-1]]).squeeze()
point /= point[3]
return angle, direction, point, normal
def decompose_matrix(matrix):
"""Return sequence of transformations from transformation matrix.
matrix : array_like
Non-degenerative homogeneous transformation matrix
Return tuple of:
scale : vector of 3 scaling factors
shear : list of shear factors for x-y, x-z, y-z axes
angles : list of Euler angles about static x, y, z axes
translate : translation vector along x, y, z axes
perspective : perspective partition of matrix
Raise ValueError if matrix is of wrong type or degenerative.
>>> T0 = translation_matrix([1, 2, 3])
>>> scale, shear, angles, trans, persp = decompose_matrix(T0)
>>> T1 = translation_matrix(trans)
>>> numpy.allclose(T0, T1)
True
>>> S = scale_matrix(0.123)
>>> scale, shear, angles, trans, persp = decompose_matrix(S)
>>> scale[0]
0.123
>>> R0 = euler_matrix(1, 2, 3)
>>> scale, shear, angles, trans, persp = decompose_matrix(R0)
>>> R1 = euler_matrix(*angles)
>>> numpy.allclose(R0, R1)
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=True).T
if abs(M[3, 3]) < _EPS:
raise ValueError("M[3, 3] is zero")
M /= M[3, 3]
P = M.copy()
P[:, 3] = 0.0, 0.0, 0.0, 1.0
if not numpy.linalg.det(P):
raise ValueError("matrix is singular")
scale = numpy.zeros((3, ))
shear = [0.0, 0.0, 0.0]
angles = [0.0, 0.0, 0.0]
if any(abs(M[:3, 3]) > _EPS):
perspective = numpy.dot(M[:, 3], numpy.linalg.inv(P.T))
M[:, 3] = 0.0, 0.0, 0.0, 1.0
else:
perspective = numpy.array([0.0, 0.0, 0.0, 1.0])
translate = M[3, :3].copy()
M[3, :3] = 0.0
row = M[:3, :3].copy()
scale[0] = vector_norm(row[0])
row[0] /= scale[0]
shear[0] = numpy.dot(row[0], row[1])
row[1] -= row[0] * shear[0]
scale[1] = vector_norm(row[1])
row[1] /= scale[1]
shear[0] /= scale[1]
shear[1] = numpy.dot(row[0], row[2])
row[2] -= row[0] * shear[1]
shear[2] = numpy.dot(row[1], row[2])
row[2] -= row[1] * shear[2]
scale[2] = vector_norm(row[2])
row[2] /= scale[2]
shear[1:] /= scale[2]
if numpy.dot(row[0], numpy.cross(row[1], row[2])) < 0:
numpy.negative(scale, scale)
numpy.negative(row, row)
angles[1] = math.asin(-row[0, 2])
if math.cos(angles[1]):
angles[0] = math.atan2(row[1, 2], row[2, 2])
angles[2] = math.atan2(row[0, 1], row[0, 0])
else:
#angles[0] = math.atan2(row[1, 0], row[1, 1])
angles[0] = math.atan2(-row[2, 1], row[1, 1])
angles[2] = 0.0
return scale, shear, angles, translate, perspective
def compose_matrix(scale=None, shear=None, angles=None, translate=None,
perspective=None):
"""Return transformation matrix from sequence of transformations.
This is the inverse of the decompose_matrix function.
Sequence of transformations:
scale : vector of 3 scaling factors
shear : list of shear factors for x-y, x-z, y-z axes
angles : list of Euler angles about static x, y, z axes
translate : translation vector along x, y, z axes
perspective : perspective partition of matrix
>>> scale = numpy.random.random(3) - 0.5
>>> shear = numpy.random.random(3) - 0.5
>>> angles = (numpy.random.random(3) - 0.5) * (2*math.pi)
>>> trans = numpy.random.random(3) - 0.5
>>> persp = numpy.random.random(4) - 0.5
>>> M0 = compose_matrix(scale, shear, angles, trans, persp)
>>> result = decompose_matrix(M0)
>>> M1 = compose_matrix(*result)
>>> is_same_transform(M0, M1)
True
"""
M = numpy.identity(4)
if perspective is not None:
P = numpy.identity(4)
P[3, :] = perspective[:4]
M = numpy.dot(M, P)
if translate is not None:
T = numpy.identity(4)
T[:3, 3] = translate[:3]
M = numpy.dot(M, T)
if angles is not None:
R = euler_matrix(angles[0], angles[1], angles[2], 'sxyz')
M = numpy.dot(M, R)
if shear is not None:
Z = numpy.identity(4)
Z[1, 2] = shear[2]
Z[0, 2] = shear[1]
Z[0, 1] = shear[0]
M = numpy.dot(M, Z)
if scale is not None:
S = numpy.identity(4)
S[0, 0] = scale[0]
S[1, 1] = scale[1]
S[2, 2] = scale[2]
M = numpy.dot(M, S)
M /= M[3, 3]
return M
def orthogonalization_matrix(lengths, angles):
"""Return orthogonalization matrix for crystallographic cell coordinates.
Angles are expected in degrees.
The de-orthogonalization matrix is the inverse.
>>> O = orthogonalization_matrix([10, 10, 10], [90, 90, 90])
>>> numpy.allclose(O[:3, :3], numpy.identity(3, float) * 10)
True
>>> O = orthogonalization_matrix([9.8, 12.0, 15.5], [87.2, 80.7, 69.7])
>>> numpy.allclose(numpy.sum(O), 43.063229)
True
"""
a, b, c = lengths
angles = numpy.radians(angles)
sina, sinb, _ = numpy.sin(angles)
cosa, cosb, cosg = numpy.cos(angles)
co = (cosa * cosb - cosg) / (sina * sinb)
return numpy.array([
[ a*sinb*math.sqrt(1.0-co*co), 0.0, 0.0, 0.0],
[-a*sinb*co, b*sina, 0.0, 0.0],
[ a*cosb, b*cosa, c, 0.0],
[ 0.0, 0.0, 0.0, 1.0]])
def affine_matrix_from_points(v0, v1, shear=True, scale=True, usesvd=True):
"""Return affine transform matrix to register two point sets.
v0 and v1 are shape (ndims, \*) arrays of at least ndims non-homogeneous
coordinates, where ndims is the dimensionality of the coordinate space.
If shear is False, a similarity transformation matrix is returned.
If also scale is False, a rigid/Eucledian transformation matrix
is returned.
By default the algorithm by Hartley and Zissermann [15] is used.
If usesvd is True, similarity and Eucledian transformation matrices
are calculated by minimizing the weighted sum of squared deviations
(RMSD) according to the algorithm by Kabsch [8].
Otherwise, and if ndims is 3, the quaternion based algorithm by Horn [9]
is used, which is slower when using this Python implementation.
The returned matrix performs rotation, translation and uniform scaling
(if specified).
>>> v0 = [[0, 1031, 1031, 0], [0, 0, 1600, 1600]]
>>> v1 = [[675, 826, 826, 677], [55, 52, 281, 277]]
>>> affine_matrix_from_points(v0, v1)
array([[ 0.14549, 0.00062, 675.50008],
[ 0.00048, 0.14094, 53.24971],
[ 0. , 0. , 1. ]])
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> S = scale_matrix(random.random())
>>> M = concatenate_matrices(T, R, S)
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-8, 300).reshape(3, -1)
>>> M = affine_matrix_from_points(v0[:3], v1[:3])
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
More examples in superimposition_matrix()
"""
v0 = numpy.array(v0, dtype=numpy.float64, copy=True)
v1 = numpy.array(v1, dtype=numpy.float64, copy=True)
ndims = v0.shape[0]
if ndims < 2 or v0.shape[1] < ndims or v0.shape != v1.shape:
raise ValueError("input arrays are of wrong shape or type")
# move centroids to origin
t0 = -numpy.mean(v0, axis=1)
M0 = numpy.identity(ndims+1)
M0[:ndims, ndims] = t0
v0 += t0.reshape(ndims, 1)
t1 = -numpy.mean(v1, axis=1)
M1 = numpy.identity(ndims+1)
M1[:ndims, ndims] = t1
v1 += t1.reshape(ndims, 1)
if shear:
# Affine transformation
A = numpy.concatenate((v0, v1), axis=0)
u, s, vh = numpy.linalg.svd(A.T)
vh = vh[:ndims].T
B = vh[:ndims]
C = vh[ndims:2*ndims]
t = numpy.dot(C, numpy.linalg.pinv(B))
t = numpy.concatenate((t, numpy.zeros((ndims, 1))), axis=1)
M = numpy.vstack((t, ((0.0,)*ndims) + (1.0,)))
elif usesvd or ndims != 3:
# Rigid transformation via SVD of covariance matrix
u, s, vh = numpy.linalg.svd(numpy.dot(v1, v0.T))
# rotation matrix from SVD orthonormal bases
R = numpy.dot(u, vh)
if numpy.linalg.det(R) < 0.0:
# R does not constitute right handed system
R -= numpy.outer(u[:, ndims-1], vh[ndims-1, :]*2.0)
s[-1] *= -1.0
# homogeneous transformation matrix
M = numpy.identity(ndims+1)
M[:ndims, :ndims] = R
else:
# Rigid transformation matrix via quaternion
# compute symmetric matrix N
xx, yy, zz = numpy.sum(v0 * v1, axis=1)
xy, yz, zx = numpy.sum(v0 * numpy.roll(v1, -1, axis=0), axis=1)
xz, yx, zy = numpy.sum(v0 * numpy.roll(v1, -2, axis=0), axis=1)
N = [[xx+yy+zz, 0.0, 0.0, 0.0],
[yz-zy, xx-yy-zz, 0.0, 0.0],
[zx-xz, xy+yx, yy-xx-zz, 0.0],
[xy-yx, zx+xz, yz+zy, zz-xx-yy]]
# quaternion: eigenvector corresponding to most positive eigenvalue
w, V = numpy.linalg.eigh(N)
q = V[:, numpy.argmax(w)]
q /= vector_norm(q) # unit quaternion
# homogeneous transformation matrix
M = quaternion_matrix(q)
if scale and not shear:
# Affine transformation; scale is ratio of RMS deviations from centroid
v0 *= v0
v1 *= v1
M[:ndims, :ndims] *= math.sqrt(numpy.sum(v1) / numpy.sum(v0))
# move centroids back
M = numpy.dot(numpy.linalg.inv(M1), numpy.dot(M, M0))
M /= M[ndims, ndims]
return M
def superimposition_matrix(v0, v1, scale=False, usesvd=True):
"""Return matrix to transform given 3D point set into second point set.
v0 and v1 are shape (3, \*) or (4, \*) arrays of at least 3 points.
The parameters scale and usesvd are explained in the more general
affine_matrix_from_points function.
The returned matrix is a similarity or Eucledian transformation matrix.
This function has a fast C implementation in transformations.c.
>>> v0 = numpy.random.rand(3, 10)
>>> M = superimposition_matrix(v0, v0)
>>> numpy.allclose(M, numpy.identity(4))
True
>>> R = random_rotation_matrix(numpy.random.random(3))
>>> v0 = [[1,0,0], [0,1,0], [0,0,1], [1,1,1]]
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v0 = (numpy.random.rand(4, 100) - 0.5) * 20
>>> v0[3] = 1
>>> v1 = numpy.dot(R, v0)
>>> M = superimposition_matrix(v0, v1)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> S = scale_matrix(random.random())
>>> T = translation_matrix(numpy.random.random(3)-0.5)
>>> M = concatenate_matrices(T, R, S)
>>> v1 = numpy.dot(M, v0)
>>> v0[:3] += numpy.random.normal(0, 1e-9, 300).reshape(3, -1)
>>> M = superimposition_matrix(v0, v1, scale=True)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v0))
True
>>> v = numpy.empty((4, 100, 3))
>>> v[:, :, 0] = v0
>>> M = superimposition_matrix(v0, v1, scale=True, usesvd=False)
>>> numpy.allclose(v1, numpy.dot(M, v[:, :, 0]))
True
"""
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)[:3]
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)[:3]
return affine_matrix_from_points(v0, v1, shear=False,
scale=scale, usesvd=usesvd)
def euler_matrix(ai, aj, ak, axes='sxyz'):
"""Return homogeneous rotation matrix from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> R = euler_matrix(1, 2, 3, 'syxz')
>>> numpy.allclose(numpy.sum(R[0]), -1.34786452)
True
>>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))
>>> numpy.allclose(numpy.sum(R[0]), -0.383436184)
True
>>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R = euler_matrix(ai, aj, ak, axes)
>>> for axes in _TUPLE2AXES.keys():
... R = euler_matrix(ai, aj, ak, axes)
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis
j = _NEXT_AXIS[i+parity]
k = _NEXT_AXIS[i-parity+1]
if frame:
ai, ak = ak, ai
if parity:
ai, aj, ak = -ai, -aj, -ak
si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak)
ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak)
cc, cs = ci*ck, ci*sk
sc, ss = si*ck, si*sk
M = numpy.identity(4)
if repetition:
M[i, i] = cj
M[i, j] = sj*si
M[i, k] = sj*ci
M[j, i] = sj*sk
M[j, j] = -cj*ss+cc
M[j, k] = -cj*cs-sc
M[k, i] = -sj*ck
M[k, j] = cj*sc+cs
M[k, k] = cj*cc-ss
else:
M[i, i] = cj*ck
M[i, j] = sj*sc-cs
M[i, k] = sj*cc+ss
M[j, i] = cj*sk
M[j, j] = sj*ss+cc
M[j, k] = sj*cs-sc
M[k, i] = -sj
M[k, j] = cj*si
M[k, k] = cj*ci
return M
def euler_from_matrix(matrix, axes='sxyz'):
"""Return Euler angles from rotation matrix for specified axis sequence.
axes : One of 24 axis sequences as string or encoded tuple
Note that many Euler angle triplets can describe one matrix.
>>> R0 = euler_matrix(1, 2, 3, 'syxz')
>>> al, be, ga = euler_from_matrix(R0, 'syxz')
>>> R1 = euler_matrix(al, be, ga, 'syxz')
>>> numpy.allclose(R0, R1)
True
>>> angles = (4*math.pi) * (numpy.random.random(3) - 0.5)
>>> for axes in _AXES2TUPLE.keys():
... R0 = euler_matrix(axes=axes, *angles)
... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes))
... if not numpy.allclose(R0, R1): print(axes, "failed")
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis
j = _NEXT_AXIS[i+parity]
k = _NEXT_AXIS[i-parity+1]
M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3]
if repetition:
sy = math.sqrt(M[i, j]*M[i, j] + M[i, k]*M[i, k])
if sy > _EPS:
ax = math.atan2( M[i, j], M[i, k])
ay = math.atan2( sy, M[i, i])
az = math.atan2( M[j, i], -M[k, i])
else:
ax = math.atan2(-M[j, k], M[j, j])
ay = math.atan2( sy, M[i, i])
az = 0.0
else:
cy = math.sqrt(M[i, i]*M[i, i] + M[j, i]*M[j, i])
if cy > _EPS:
ax = math.atan2( M[k, j], M[k, k])
ay = math.atan2(-M[k, i], cy)
az = math.atan2( M[j, i], M[i, i])
else:
ax = math.atan2(-M[j, k], M[j, j])
ay = math.atan2(-M[k, i], cy)
az = 0.0
if parity:
ax, ay, az = -ax, -ay, -az
if frame:
ax, az = az, ax
return ax, ay, az
def euler_from_quaternion(quaternion, axes='sxyz'):
"""Return Euler angles from quaternion for specified axis sequence.
>>> angles = euler_from_quaternion([0.99810947, 0.06146124, 0, 0])
>>> numpy.allclose(angles, [0.123, 0, 0])
True
"""
return euler_from_matrix(quaternion_matrix(quaternion), axes)
def quaternion_from_euler(ai, aj, ak, axes='sxyz'):
"""Return quaternion from Euler angles and axis sequence.
ai, aj, ak : Euler's roll, pitch and yaw angles
axes : One of 24 axis sequences as string or encoded tuple
>>> q = quaternion_from_euler(1, 2, 3, 'ryxz')
>>> numpy.allclose(q, [0.435953, 0.310622, -0.718287, 0.444435])
True
"""
try:
firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]
except (AttributeError, KeyError):
_TUPLE2AXES[axes] # validation
firstaxis, parity, repetition, frame = axes
i = firstaxis + 1
j = _NEXT_AXIS[i+parity-1] + 1
k = _NEXT_AXIS[i-parity] + 1
if frame:
ai, ak = ak, ai
if parity:
aj = -aj
ai /= 2.0
aj /= 2.0
ak /= 2.0
ci = math.cos(ai)
si = math.sin(ai)
cj = math.cos(aj)
sj = math.sin(aj)
ck = math.cos(ak)
sk = math.sin(ak)
cc = ci*ck
cs = ci*sk
sc = si*ck
ss = si*sk
q = numpy.empty((4, ))
if repetition:
q[0] = cj*(cc - ss)
q[i] = cj*(cs + sc)
q[j] = sj*(cc + ss)
q[k] = sj*(cs - sc)
else:
q[0] = cj*cc + sj*ss
q[i] = cj*sc - sj*cs
q[j] = cj*ss + sj*cc
q[k] = cj*cs - sj*sc
if parity:
q[j] *= -1.0
return q
def quaternion_about_axis(angle, axis):
"""Return quaternion for rotation about axis.
>>> q = quaternion_about_axis(0.123, [1, 0, 0])
>>> numpy.allclose(q, [0.99810947, 0.06146124, 0, 0])
True
"""
q = numpy.array([0.0, axis[0], axis[1], axis[2]])
qlen = vector_norm(q)
if qlen > _EPS:
q *= math.sin(angle/2.0) / qlen
q[0] = math.cos(angle/2.0)
return q
def quaternion_matrix(quaternion):
"""Return homogeneous rotation matrix from quaternion.
>>> M = quaternion_matrix([0.99810947, 0.06146124, 0, 0])
>>> numpy.allclose(M, rotation_matrix(0.123, [1, 0, 0]))
True
>>> M = quaternion_matrix([1, 0, 0, 0])
>>> numpy.allclose(M, numpy.identity(4))
True
>>> M = quaternion_matrix([0, 1, 0, 0])
>>> numpy.allclose(M, numpy.diag([1, -1, -1, 1]))
True
"""
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
n = numpy.dot(q, q)
if n < _EPS:
return numpy.identity(4)
q *= math.sqrt(2.0 / n)
q = numpy.outer(q, q)
return numpy.array([
[1.0-q[2, 2]-q[3, 3], q[1, 2]-q[3, 0], q[1, 3]+q[2, 0], 0.0],
[ q[1, 2]+q[3, 0], 1.0-q[1, 1]-q[3, 3], q[2, 3]-q[1, 0], 0.0],
[ q[1, 3]-q[2, 0], q[2, 3]+q[1, 0], 1.0-q[1, 1]-q[2, 2], 0.0],
[ 0.0, 0.0, 0.0, 1.0]])
def quaternion_from_matrix(matrix, isprecise=False):
"""Return quaternion from rotation matrix.
If isprecise is True, the input matrix is assumed to be a precise rotation
matrix and a faster algorithm is used.
>>> q = quaternion_from_matrix(numpy.identity(4), True)
>>> numpy.allclose(q, [1, 0, 0, 0])
True
>>> q = quaternion_from_matrix(numpy.diag([1, -1, -1, 1]))
>>> numpy.allclose(q, [0, 1, 0, 0]) or numpy.allclose(q, [0, -1, 0, 0])
True
>>> R = rotation_matrix(0.123, (1, 2, 3))
>>> q = quaternion_from_matrix(R, True)
>>> numpy.allclose(q, [0.9981095, 0.0164262, 0.0328524, 0.0492786])
True
>>> R = [[-0.545, 0.797, 0.260, 0], [0.733, 0.603, -0.313, 0],
... [-0.407, 0.021, -0.913, 0], [0, 0, 0, 1]]
>>> q = quaternion_from_matrix(R)
>>> numpy.allclose(q, [0.19069, 0.43736, 0.87485, -0.083611])
True
>>> R = [[0.395, 0.362, 0.843, 0], [-0.626, 0.796, -0.056, 0],
... [-0.677, -0.498, 0.529, 0], [0, 0, 0, 1]]
>>> q = quaternion_from_matrix(R)
>>> numpy.allclose(q, [0.82336615, -0.13610694, 0.46344705, -0.29792603])
True
>>> R = random_rotation_matrix()
>>> q = quaternion_from_matrix(R)
>>> is_same_transform(R, quaternion_matrix(q))
True
"""
M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4]
if isprecise:
q = numpy.empty((4, ))
t = numpy.trace(M)
if t > M[3, 3]:
q[0] = t
q[3] = M[1, 0] - M[0, 1]
q[2] = M[0, 2] - M[2, 0]
q[1] = M[2, 1] - M[1, 2]
else:
i, j, k = 1, 2, 3
if M[1, 1] > M[0, 0]:
i, j, k = 2, 3, 1
if M[2, 2] > M[i, i]:
i, j, k = 3, 1, 2
t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3]
q[i] = t
q[j] = M[i, j] + M[j, i]
q[k] = M[k, i] + M[i, k]
q[3] = M[k, j] - M[j, k]
q *= 0.5 / math.sqrt(t * M[3, 3])
else:
m00 = M[0, 0]
m01 = M[0, 1]
m02 = M[0, 2]
m10 = M[1, 0]
m11 = M[1, 1]
m12 = M[1, 2]
m20 = M[2, 0]
m21 = M[2, 1]
m22 = M[2, 2]
# symmetric matrix K
K = numpy.array([[m00-m11-m22, 0.0, 0.0, 0.0],
[m01+m10, m11-m00-m22, 0.0, 0.0],
[m02+m20, m12+m21, m22-m00-m11, 0.0],
[m21-m12, m02-m20, m10-m01, m00+m11+m22]])
K /= 3.0
# quaternion is eigenvector of K that corresponds to largest eigenvalue
w, V = numpy.linalg.eigh(K)
q = V[[3, 0, 1, 2], numpy.argmax(w)]
if q[0] < 0.0:
numpy.negative(q, q)
return q
def quaternion_multiply(quaternion1, quaternion0):
"""Return multiplication of two quaternions.
>>> q = quaternion_multiply([4, 1, -2, 3], [8, -5, 6, 7])
>>> numpy.allclose(q, [28, -44, -14, 48])
True
"""
w0, x0, y0, z0 = quaternion0
w1, x1, y1, z1 = quaternion1
return numpy.array([-x1*x0 - y1*y0 - z1*z0 + w1*w0,
x1*w0 + y1*z0 - z1*y0 + w1*x0,
-x1*z0 + y1*w0 + z1*x0 + w1*y0,
x1*y0 - y1*x0 + z1*w0 + w1*z0], dtype=numpy.float64)
def quaternion_conjugate(quaternion):
"""Return conjugate of quaternion.
>>> q0 = random_quaternion()
>>> q1 = quaternion_conjugate(q0)
>>> q1[0] == q0[0] and all(q1[1:] == -q0[1:])
True
"""
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
numpy.negative(q[1:], q[1:])
return q
def quaternion_inverse(quaternion):
"""Return inverse of quaternion.
>>> q0 = random_quaternion()
>>> q1 = quaternion_inverse(q0)
>>> numpy.allclose(quaternion_multiply(q0, q1), [1, 0, 0, 0])
True
"""
q = numpy.array(quaternion, dtype=numpy.float64, copy=True)
numpy.negative(q[1:], q[1:])
return q / numpy.dot(q, q)
def quaternion_real(quaternion):
"""Return real part of quaternion.
>>> quaternion_real([3, 0, 1, 2])
3.0
"""
return float(quaternion[0])
def quaternion_imag(quaternion):
"""Return imaginary part of quaternion.
>>> quaternion_imag([3, 0, 1, 2])
array([ 0., 1., 2.])
"""
return numpy.array(quaternion[1:4], dtype=numpy.float64, copy=True)
def quaternion_slerp(quat0, quat1, fraction, spin=0, shortestpath=True):
"""Return spherical linear interpolation between two quaternions.
>>> q0 = random_quaternion()
>>> q1 = random_quaternion()
>>> q = quaternion_slerp(q0, q1, 0)
>>> numpy.allclose(q, q0)
True
>>> q = quaternion_slerp(q0, q1, 1, 1)
>>> numpy.allclose(q, q1)
True
>>> q = quaternion_slerp(q0, q1, 0.5)
>>> angle = math.acos(numpy.dot(q0, q))
>>> numpy.allclose(2, math.acos(numpy.dot(q0, q1)) / angle) or \
numpy.allclose(2, math.acos(-numpy.dot(q0, q1)) / angle)
True
"""
q0 = unit_vector(quat0[:4])
q1 = unit_vector(quat1[:4])
if fraction == 0.0:
return q0
elif fraction == 1.0:
return q1
d = numpy.dot(q0, q1)
if abs(abs(d) - 1.0) < _EPS:
return q0
if shortestpath and d < 0.0:
# invert rotation
d = -d
numpy.negative(q1, q1)
angle = math.acos(d) + spin * math.pi
if abs(angle) < _EPS:
return q0
isin = 1.0 / math.sin(angle)
q0 *= math.sin((1.0 - fraction) * angle) * isin
q1 *= math.sin(fraction * angle) * isin
q0 += q1
return q0
def random_quaternion(rand=None):
"""Return uniform random unit quaternion.
rand: array like or None
Three independent random variables that are uniformly distributed
between 0 and 1.
>>> q = random_quaternion()
>>> numpy.allclose(1, vector_norm(q))
True
>>> q = random_quaternion(numpy.random.random(3))
>>> len(q.shape), q.shape[0]==4
(1, True)
"""
if rand is None:
rand = numpy.random.rand(3)
else:
assert len(rand) == 3
r1 = numpy.sqrt(1.0 - rand[0])
r2 = numpy.sqrt(rand[0])
pi2 = math.pi * 2.0
t1 = pi2 * rand[1]
t2 = pi2 * rand[2]
return numpy.array([numpy.cos(t2)*r2, numpy.sin(t1)*r1,
numpy.cos(t1)*r1, numpy.sin(t2)*r2])
def random_rotation_matrix(rand=None):
"""Return uniform random rotation matrix.
rand: array like
Three independent random variables that are uniformly distributed
between 0 and 1 for each returned quaternion.
>>> R = random_rotation_matrix()
>>> numpy.allclose(numpy.dot(R.T, R), numpy.identity(4))
True
"""
return quaternion_matrix(random_quaternion(rand))
class Arcball(object):
"""Virtual Trackball Control.
>>> ball = Arcball()
>>> ball = Arcball(initial=numpy.identity(4))
>>> ball.place([320, 320], 320)
>>> ball.down([500, 250])
>>> ball.drag([475, 275])
>>> R = ball.matrix()
>>> numpy.allclose(numpy.sum(R), 3.90583455)
True
>>> ball = Arcball(initial=[1, 0, 0, 0])
>>> ball.place([320, 320], 320)
>>> ball.setaxes([1, 1, 0], [-1, 1, 0])
>>> ball.setconstrain(True)
>>> ball.down([400, 200])
>>> ball.drag([200, 400])
>>> R = ball.matrix()
>>> numpy.allclose(numpy.sum(R), 0.2055924)
True
>>> ball.next()
"""
def __init__(self, initial=None):
"""Initialize virtual trackball control.
initial : quaternion or rotation matrix
"""
self._axis = None
self._axes = None
self._radius = 1.0
self._center = [0.0, 0.0]
self._vdown = numpy.array([0.0, 0.0, 1.0])
self._constrain = False
if initial is None:
self._qdown = numpy.array([1.0, 0.0, 0.0, 0.0])
else:
initial = numpy.array(initial, dtype=numpy.float64)
if initial.shape == (4, 4):
self._qdown = quaternion_from_matrix(initial)
elif initial.shape == (4, ):
initial /= vector_norm(initial)
self._qdown = initial
else:
raise ValueError("initial not a quaternion or matrix")
self._qnow = self._qpre = self._qdown
def place(self, center, radius):
"""Place Arcball, e.g. when window size changes.
center : sequence[2]
Window coordinates of trackball center.
radius : float
Radius of trackball in window coordinates.
"""
self._radius = float(radius)
self._center[0] = center[0]
self._center[1] = center[1]
def setaxes(self, *axes):
"""Set axes to constrain rotations."""
if axes is None:
self._axes = None
else:
self._axes = [unit_vector(axis) for axis in axes]
def setconstrain(self, constrain):
"""Set state of constrain to axis mode."""
self._constrain = constrain == True
def getconstrain(self):
"""Return state of constrain to axis mode."""
return self._constrain
def down(self, point):
"""Set initial cursor window coordinates and pick constrain-axis."""
self._vdown = arcball_map_to_sphere(point, self._center, self._radius)
self._qdown = self._qpre = self._qnow
if self._constrain and self._axes is not None:
self._axis = arcball_nearest_axis(self._vdown, self._axes)
self._vdown = arcball_constrain_to_axis(self._vdown, self._axis)
else:
self._axis = None
def drag(self, point):
"""Update current cursor window coordinates."""
vnow = arcball_map_to_sphere(point, self._center, self._radius)
if self._axis is not None:
vnow = arcball_constrain_to_axis(vnow, self._axis)
self._qpre = self._qnow
t = numpy.cross(self._vdown, vnow)
if numpy.dot(t, t) < _EPS:
self._qnow = self._qdown
else:
q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]]
self._qnow = quaternion_multiply(q, self._qdown)
def next(self, acceleration=0.0):
"""Continue rotation in direction of last drag."""
q = quaternion_slerp(self._qpre, self._qnow, 2.0+acceleration, False)
self._qpre, self._qnow = self._qnow, q
def matrix(self):
"""Return homogeneous rotation matrix."""
return quaternion_matrix(self._qnow)
def arcball_map_to_sphere(point, center, radius):
"""Return unit sphere coordinates from window coordinates."""
v0 = (point[0] - center[0]) / radius
v1 = (center[1] - point[1]) / radius
n = v0*v0 + v1*v1
if n > 1.0:
# position outside of sphere
n = math.sqrt(n)
return numpy.array([v0/n, v1/n, 0.0])
else:
return numpy.array([v0, v1, math.sqrt(1.0 - n)])
def arcball_constrain_to_axis(point, axis):
"""Return sphere point perpendicular to axis."""
v = numpy.array(point, dtype=numpy.float64, copy=True)
a = numpy.array(axis, dtype=numpy.float64, copy=True)
v -= a * numpy.dot(a, v) # on plane
n = vector_norm(v)
if n > _EPS:
if v[2] < 0.0:
numpy.negative(v, v)
v /= n
return v
if a[2] == 1.0:
return numpy.array([1.0, 0.0, 0.0])
return unit_vector([-a[1], a[0], 0.0])
def arcball_nearest_axis(point, axes):
"""Return axis, which arc is nearest to point."""
point = numpy.array(point, dtype=numpy.float64, copy=False)
nearest = None
mx = -1.0
for axis in axes:
t = numpy.dot(arcball_constrain_to_axis(point, axis), point)
if t > mx:
nearest = axis
mx = t
return nearest
# epsilon for testing whether a number is close to zero
_EPS = numpy.finfo(float).eps * 4.0
# axis sequences for Euler angles
_NEXT_AXIS = [1, 2, 0, 1]
# map axes strings to/from tuples of inner axis, parity, repetition, frame
_AXES2TUPLE = {
'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0),
'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0),
'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0),
'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0),
'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1),
'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1),
'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1),
'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)}
_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items())
def vector_norm(data, axis=None, out=None):
"""Return length, i.e. eucledian norm, of ndarray along axis.
>>> v = numpy.random.random(3)
>>> n = vector_norm(v)
>>> numpy.allclose(n, numpy.linalg.norm(v))
True
>>> v = numpy.random.rand(6, 5, 3)
>>> n = vector_norm(v, axis=-1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=2)))
True
>>> n = vector_norm(v, axis=1)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> v = numpy.random.rand(5, 4, 3)
>>> n = numpy.empty((5, 3))
>>> vector_norm(v, axis=1, out=n)
>>> numpy.allclose(n, numpy.sqrt(numpy.sum(v*v, axis=1)))
True
>>> vector_norm([])
0.0
>>> vector_norm([1])
1.0
"""
data = numpy.array(data, dtype=numpy.float64, copy=True)
if out is None:
if data.ndim == 1:
return math.sqrt(numpy.dot(data, data))
data *= data
out = numpy.atleast_1d(numpy.sum(data, axis=axis))
numpy.sqrt(out, out)
return out
else:
data *= data
numpy.sum(data, axis=axis, out=out)
numpy.sqrt(out, out)
def unit_vector(data, axis=None, out=None):
"""Return ndarray normalized by length, i.e. eucledian norm, along axis.
>>> v0 = numpy.random.random(3)
>>> v1 = unit_vector(v0)
>>> numpy.allclose(v1, v0 / numpy.linalg.norm(v0))
True
>>> v0 = numpy.random.rand(5, 4, 3)
>>> v1 = unit_vector(v0, axis=-1)
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=2)), 2)
>>> numpy.allclose(v1, v2)
True
>>> v1 = unit_vector(v0, axis=1)
>>> v2 = v0 / numpy.expand_dims(numpy.sqrt(numpy.sum(v0*v0, axis=1)), 1)
>>> numpy.allclose(v1, v2)
True
>>> v1 = numpy.empty((5, 4, 3))
>>> unit_vector(v0, axis=1, out=v1)
>>> numpy.allclose(v1, v2)
True
>>> list(unit_vector([]))
[]
>>> list(unit_vector([1]))
[1.0]
"""
if out is None:
data = numpy.array(data, dtype=numpy.float64, copy=True)
if data.ndim == 1:
data /= math.sqrt(numpy.dot(data, data))
return data
else:
if out is not data:
out[:] = numpy.array(data, copy=False)
data = out
length = numpy.atleast_1d(numpy.sum(data*data, axis))
numpy.sqrt(length, length)
if axis is not None:
length = numpy.expand_dims(length, axis)
data /= length
if out is None:
return data
def random_vector(size):
"""Return array of random doubles in the half-open interval [0.0, 1.0).
>>> v = random_vector(10000)
>>> numpy.all(v >= 0) and numpy.all(v < 1)
True
>>> v0 = random_vector(10)
>>> v1 = random_vector(10)
>>> numpy.any(v0 == v1)
False
"""
return numpy.random.random(size)
def vector_product(v0, v1, axis=0):
"""Return vector perpendicular to vectors.
>>> v = vector_product([2, 0, 0], [0, 3, 0])
>>> numpy.allclose(v, [0, 0, 6])
True
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
>>> v1 = [[3], [0], [0]]
>>> v = vector_product(v0, v1)
>>> numpy.allclose(v, [[0, 0, 0, 0], [0, 0, 6, 6], [0, -6, 0, -6]])
True
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
>>> v = vector_product(v0, v1, axis=1)
>>> numpy.allclose(v, [[0, 0, 6], [0, -6, 0], [6, 0, 0], [0, -6, 6]])
True
"""
return numpy.cross(v0, v1, axis=axis)
def angle_between_vectors(v0, v1, directed=True, axis=0):
"""Return angle between vectors.
If directed is False, the input vectors are interpreted as undirected axes,
i.e. the maximum angle is pi/2.
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3])
>>> numpy.allclose(a, math.pi)
True
>>> a = angle_between_vectors([1, -2, 3], [-1, 2, -3], directed=False)
>>> numpy.allclose(a, 0)
True
>>> v0 = [[2, 0, 0, 2], [0, 2, 0, 2], [0, 0, 2, 2]]
>>> v1 = [[3], [0], [0]]
>>> a = angle_between_vectors(v0, v1)
>>> numpy.allclose(a, [0, 1.5708, 1.5708, 0.95532])
True
>>> v0 = [[2, 0, 0], [2, 0, 0], [0, 2, 0], [2, 0, 0]]
>>> v1 = [[0, 3, 0], [0, 0, 3], [0, 0, 3], [3, 3, 3]]
>>> a = angle_between_vectors(v0, v1, axis=1)
>>> numpy.allclose(a, [1.5708, 1.5708, 1.5708, 0.95532])
True
"""
v0 = numpy.array(v0, dtype=numpy.float64, copy=False)
v1 = numpy.array(v1, dtype=numpy.float64, copy=False)
dot = numpy.sum(v0 * v1, axis=axis)
dot /= vector_norm(v0, axis=axis) * vector_norm(v1, axis=axis)
return numpy.arccos(dot if directed else numpy.fabs(dot))
def inverse_matrix(matrix):
"""Return inverse of square transformation matrix.
>>> M0 = random_rotation_matrix()
>>> M1 = inverse_matrix(M0.T)
>>> numpy.allclose(M1, numpy.linalg.inv(M0.T))
True
>>> for size in range(1, 7):
... M0 = numpy.random.rand(size, size)
... M1 = inverse_matrix(M0)
... if not numpy.allclose(M1, numpy.linalg.inv(M0)): print(size)
"""
return numpy.linalg.inv(matrix)
def concatenate_matrices(*matrices):
"""Return concatenation of series of transformation matrices.
>>> M = numpy.random.rand(16).reshape((4, 4)) - 0.5
>>> numpy.allclose(M, concatenate_matrices(M))
True
>>> numpy.allclose(numpy.dot(M, M.T), concatenate_matrices(M, M.T))
True
"""
M = numpy.identity(4)
for i in matrices:
M = numpy.dot(M, i)
return M
def is_same_transform(matrix0, matrix1):
"""Return True if two matrices perform same transformation.
>>> is_same_transform(numpy.identity(4), numpy.identity(4))
True
>>> is_same_transform(numpy.identity(4), random_rotation_matrix())
False
"""
matrix0 = numpy.array(matrix0, dtype=numpy.float64, copy=True)
matrix0 /= matrix0[3, 3]
matrix1 = numpy.array(matrix1, dtype=numpy.float64, copy=True)
matrix1 /= matrix1[3, 3]
return numpy.allclose(matrix0, matrix1)
def _import_module(name, package=None, warn=True, prefix='_py_', ignore='_'):
"""Try import all public attributes from module into global namespace.
Existing attributes with name clashes are renamed with prefix.
Attributes starting with underscore are ignored by default.
Return True on successful import.
"""
import warnings
from importlib import import_module
try:
if not package:
module = import_module(name)
else:
module = import_module('.' + name, package=package)
except ImportError:
if warn:
warnings.warn("failed to import module %s" % name)
else:
for attr in dir(module):
if ignore and attr.startswith(ignore):
continue
if prefix:
if attr in globals():
globals()[prefix + attr] = globals()[attr]
elif warn:
warnings.warn("no Python implementation of " + attr)
globals()[attr] = getattr(module, attr)
return True
_import_module('_transformations')
if __name__ == "__main__":
import doctest
import random # used in doctests
numpy.set_printoptions(suppress=True, precision=5)
doctest.testmod() | [
"clyde.fare@gmail.com"
] | clyde.fare@gmail.com |
6969c5a69023c51c4b9f057fc4d0ebc464317c30 | b4920771048ba1f7cc6ac266c3f3576290c00718 | /session1/HW/ex1.py | fc29c34c92c90a445624ba1c1a341c8b163b3e6c | [] | no_license | dungbk10t/phamtuandung-webmodule-c4e26 | 969779da1d4bd8c1940583f4a11d1cfbe064eea2 | af793ba2765c8c17852c6bebcaf8250543488490 | refs/heads/master | 2021-10-23T12:59:32.532871 | 2019-03-17T14:03:31 | 2019-03-17T14:03:31 | 173,112,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 346 | py | from flask import Flask,redirect
app = Flask(__name__)
@app.route('/about-me')
def about():
myseft = {
"Name": "Dung",
"Age": "21",
"Hobbies": "Travel",
"Work": "Student",
}
return str(myseft)
@app.route('/')
def school():
return redirect("https://techkids.vn/", code=302)
if __name__ == '__main__':
app.run(debug=True) | [
"38665090+dungbk10t@users.noreply.github.com"
] | 38665090+dungbk10t@users.noreply.github.com |
770c7164abe7da38b537a93ec34d8f614f0a94cc | ef35552267ac45345c60135845470260afbd6687 | /Artifacts/run_verus.py | 62d4e1cd025febe0970d0d9cd628cd8b3f810c46 | [
"MIT"
] | permissive | xianliangjiang/ALCC | 2bbe7e48aaf7ab273cfea4622855be12e261730f | fc9c627de8c381987fc775ce0872339fceb43ddf | refs/heads/main | 2023-05-16T21:11:06.738812 | 2021-06-10T11:43:23 | 2021-06-10T11:43:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | import os
TIME=300
DIR='Results'
NUM_RUNS=20
os.system('sudo sysctl -w net.ipv4.tcp_congestion_control=cubic')
# compile bftpd with alcc verus library
os.system('echo "compiling bftpd for alcc verus" && cd ../Applications/bftpd && cp Makefile_verus Makefile && make')
for trace in ['highwayGold', 'CityDrive', 'Corniche', 'rapidGold']:
for i in range(1,NUM_RUNS+1):
print (trace)
os.system('''gnome-terminal -- sh -c 'echo "Running bftpd server" && cd ../Applications/bftpd && pwd && sudo ./bftpd -D -c bftpd.conf' ''')
os.system('python run.py -tr {0} -t {1} --name {0}{2} --dir {3} --algo alcc_verus'.format(trace,TIME,i,DIR))
os.system('sudo killall bftpd')
os.system('python run.py -tr {0} -t {1} --name {0}{2} --dir {3} --algo verus'.format(trace,TIME,i,DIR))
| [
"yasir.zaki@nyu.edu"
] | yasir.zaki@nyu.edu |
e308f0aa83b793bc83ed23a3d964b239a72ed6de | d4a5f8144855b201071c4657e37a7ad6b5994aff | /users/models.py | 7ae3002a7cc0a15449464e78df4109b39fe0abb8 | [] | no_license | Muratcol/Higher-Level-Django-Project | d453761197756d5b345640570f5a7b00c7948319 | cd82cc6bdc01196ad9a602be4bcd11ee655e1e1f | refs/heads/master | 2022-04-26T14:39:05.641565 | 2020-04-25T14:28:46 | 2020-04-25T14:28:46 | 258,793,791 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | from django.db import models
from django.contrib.auth.models import User
from PIL import Image
# Create your models here.
class Profile(models.Model):
user = models.OneToOneField(User, on_delete = models.CASCADE)
image = models.ImageField(default = 'default.jpg', upload_to = 'profile_pics')
def __str__(self):
return f'{self.user.username} Profile'
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
img = Image.open(self.image.path)
if img.height > 300 or img.width > 300:
output_size = (300, 300)
img.thumbnail(output_size)
img.save(self.image.path)
| [
"muratcolyaran@yahoo.com.tr"
] | muratcolyaran@yahoo.com.tr |
ad77f04ce6810e07fd8407db9354c5b4139ab67e | 17dca703eed28a859bba4984eba5b039b900e3d7 | /.history/nomina/views_20200227181321.py | a9f9c322cb015feead3955c66ebab05f4727ad27 | [] | no_license | alexogch1/SistemaOperaciones | 1a34872daf0e151672edd202a5089ee754805203 | ac72f6e3284061e240aebec6a3300ff463a3544c | refs/heads/master | 2021-01-03T15:32:45.470642 | 2020-03-03T07:47:27 | 2020-03-03T07:47:27 | 240,133,319 | 0 | 1 | null | 2020-02-28T05:21:57 | 2020-02-12T23:02:36 | Python | UTF-8 | Python | false | false | 5,733 | py | from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse_lazy
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.messages.views import SuccessMessageMixin
#from .filters import NominaFiltro
from dateutil.parser import parse
from django.views import generic
from generales.views import SinPrivilegios
from .form import NominaEncForm, NominaDetForm, DetalleNominaFormSet
from .models import NominaEnc, NominaDet
class NominaCompletaList(generic.ListView):
template_name='nomina/nomina_completa.html'
context_object_name='nomina'
queryset = NominaEnc.objects.all()
def get_context_data(self, **kwargs):
context = super(NominaCompletaList, self).get_context_data(**kwargs)
context['detalles'] = NominaDet.objects.all()
context['encabezado'] = self.queryset
return context
class NominaList( generic.ListView):
model=NominaEnc
template_name='nomina/nomina_list.html'
context_object_name='nomina'
""" def get_context_data(self, **kwargs):
context = super(NominaList, self).get_context_data(**kwargs)
initial_date = self.request.GET.get('fecha_inicial')
final_date = self.request.GET.get('fecha_final')
if not initial_date or not final_date:
context ['nomina'] = NominaEnc.objects.order_by('fecha_nomina')
else:
initial_date = parse(initial_date)
final_date = parse(final_date)
context['nomina'] = NominaEnc.objects.filter(fecha_nomina__gte=initial_date, fecha_nomina__lte=final_date )
return context """
#def get_context_data(self, **kwargs):
#context = super().get_context_data(**kwargs)
#context['filter']=NominaFiltro(self.request.GET, queryset=self.get_queryset())
#return context
class NominaNew(SinPrivilegios, generic.CreateView):
permission_required='nomina.add_nominaenc'
model=NominaEnc
login_url='generales:home'
template_name='nomina/nomina_form.html'
form_class=NominaEncForm
success_url=reverse_lazy('nomina:nomina_list')
def get(self, request, *args, **kwargs):
self.object=None
form_class=self.get_form_class()
form=self.get_form(form_class)
detalle_nomina_formset=DetalleNominaFormSet()
return self.render_to_response(
self.get_context_data(
form=form,
detalle_nomina = detalle_nomina_formset
)
)
def post(self, request, *args, **kwargs):
form_class=self.get_form_class()
form=self.get_form(form_class)
detalle_nomina=DetalleNominaFormSet(request.POST)
if form.is_valid() and detalle_nomina.is_valid():
return self.form_valid(form, detalle_nomina)
else:
return self.form_invalid(form, detalle_nomina)
def form_valid(self, form, detalle_nomina):
self.object=form.save()
detalle_nomina.instance=self.object
detalle_nomina.save()
return HttpResponseRedirect(self.success_url)
def form_invalid(self, form, detalle_nomina):
return self.render_to_response(
self.get_context_data(
form=form,
detalle_nomina=detalle_nomina
)
)
class NominaEdit(SinPrivilegios,generic.UpdateView):
permission_required='nomina.change_nominaenc'
model=NominaEnc
login_url='generales:home'
template_name='nomina/nomina_form.html'
form_class=NominaEncForm
success_url=reverse_lazy('nomina:nomina_list')
def get_success_url(self):
from django.urls import reverse
return reverse ('nomina:nomina_edit',
kwargs={'pk':self.get_object().id})
def get (self, request, *args, **kwargs):
self.object = self.get_object()
form_class = self.get_form_class()
form = self.get_form(form_class)
detalles =NominaDet.objects.filter(nomina=self.object).order_by('pk')
detalles_data = []
for detalle in detalles:
d={
'concepto':detalle.concepto,
'cantidad':detalle.cantidad
}
detalles_data.append(d)
detalle_nomina = DetalleNominaFormSet(initial=detalles_data)
detalle_nomina.extra += len(detalles_data)
return self.render_to_response(
self.get_context_data(
form=form,
detalle_nomina = detalle_nomina
)
)
def post(self,request, *args, **kwargs):
self.object = self.get_object()
form_class = self.get_form_class()
form=self.get_form(form_class)
detalle_nomina = DetalleNominaFormSet(request.POST)
if form.is_valid() and detalle_nomina.is_valid():
return self.form_valid(form, detalle_nomina)
else:
return self.form_valid(form, detalle_nomina)
def form_valid(self, form, detalle_nomina):
self.object = form.save()
detalle_nomina.instance =self.object
NominaDet.objects.filter(nomina=self.object).delete()
detalle_nomina.save()
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form, detalle_nomina):
return self.render_to_response(
self.get_context_data(
form=form,
detalle_nomina=detalle_nomina
)
)
class NominaDel(SinPrivilegios,generic.DeleteView):
permission_required='nomina:delete_nominaenc'
model= NominaEnc
template_name = 'nomina/nomina_del.html'
context_object_name='obj'
success_url=reverse_lazy('nomina:nomina_list')
| [
"alexogch@hotmail.com"
] | alexogch@hotmail.com |
3d97109bf415ea9269f7025758774cb1e2f9c5ab | e5add4ba0dc980b2129830142d91956f762d9835 | /CovidResourceFinder/urls.py | bf5e8da72bbc53f8764ed3e35dba36556799fb7e | [] | no_license | VirangParekh/CovidResourceFinder | a23ddb0db9167625f2a605ec061d4f8a0bd583aa | 168bc145d1e92e8285f3a38bfd0eb0ea3effea93 | refs/heads/master | 2023-04-08T03:34:51.677830 | 2021-04-25T06:02:25 | 2021-04-25T06:02:25 | 360,268,178 | 0 | 0 | null | 2021-04-22T13:41:36 | 2021-04-21T18:27:36 | Python | UTF-8 | Python | false | false | 366 | py | from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
urlpatterns = [
path('admin/', admin.site.urls),
path('resource_finder/', include("ResourceFinderApp.urls")),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"44228173+VirangParekh@users.noreply.github.com"
] | 44228173+VirangParekh@users.noreply.github.com |
c771d7fe4dad2294e05f47c1a734db065672f857 | 724713c8d5891e7dda67e8c250018250e6da44bf | /chapter_05/Variavle_length.py | 43a79e76c9e659d146355ac97a62f385eed7b721 | [] | no_license | conanlhj/python_inflearn | 1c5731e8634c18e15360097569541b419410f94e | c9c713ea1d374e8a5b9dfc1568e6a767c4d00305 | refs/heads/main | 2023-02-19T10:23:58.835506 | 2021-01-25T11:40:22 | 2021-01-25T11:40:22 | 332,727,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 629 | py | def asterisk_test(a,b,*args):
return args
def asterisk_test_2(*args):
x,y,z=args
return x,y,z
def kwargs_test_1(**kwargs):
print(kwargs)
print(kwargs["first"])
def kwargs_test_2(**kwargs):
print(kwargs)
print("First value is {first}".format(**kwargs))
print("Second value is {second}".format(**kwargs))
print("Third value is {third}".format(**kwargs))
def kwargs_test_3(one, two, *args, **kwargs):
print(kwargs)
print(args)
print(one+two+sum(args))
# print(asterisk_test(1,2,3,4,5))
# print(asterisk_test_2(1,2,3))
kwargs_test_3(3,4,5,6,7,8,9,10,second=3, first=4, third=5)
| [
"77838360+conanlhj@users.noreply.github.com"
] | 77838360+conanlhj@users.noreply.github.com |
09485a4a913d81b199e0e4f85f59f811f3947951 | 867bb24022e8908e66b9dbe52bcac81cc16e86db | /myshop/Employee/apps.py | 907e9dc183c55890576e53643f99d014415ddbe7 | [] | no_license | Gonza12345/Diplom_shop | 86120886b0bf77cb871d3de2f64075592bed09c8 | 0527561d9746d6e5f73c62b74814135af7aa52e8 | refs/heads/master | 2020-05-31T02:28:52.147478 | 2019-06-03T19:34:39 | 2019-06-03T19:34:39 | 190,066,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.apps import AppConfig
class EmployeeConfig(AppConfig):
name = 'orders'
| [
"uad134679@gmail.com"
] | uad134679@gmail.com |
f5a8b97a66c04bb5a50c0064ce19657b48d5b3ef | 596b6f769a19bd597ca235263b4518be3227b0f7 | /ExeDemo/Exe14.py | 148b0eb6792aa548ad6088c8760e6848c115f327 | [] | no_license | himanim-ept/Python-Execerise-1 | 0ac5022e6cd5f6fe6887b1154258c3c62d393150 | 1663cba8bae6b1e6d186e87c7058c70023e65ca7 | refs/heads/master | 2023-03-15T06:18:51.546786 | 2021-03-10T11:02:19 | 2021-03-10T11:02:19 | 336,448,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | #Write a Python program to count the number occurrence of a specific character in a string.
txt = "Hello, My name is Himani"
x = txt.count("e") # count particular charachter
print(x) | [
"himanim@emiprotechnologies.com"
] | himanim@emiprotechnologies.com |
51fe296f9a06966e6e243a907c4209236b1137e9 | 0c66e605e6e4129b09ea14dbb6aa353d18aaa027 | /diventi/ebooks/migrations/0007_auto_20190429_1732.py | 3b567492131adb79f3e21d1f851220e0b4b14f01 | [
"Apache-2.0"
] | permissive | flavoi/diventi | 58fbc8c947f387cbcc1ce607878a59a6f2b72313 | c0b1efe2baa3ff816d6ee9a8e86623f297973ded | refs/heads/master | 2023-07-20T09:32:35.897661 | 2023-07-11T19:44:26 | 2023-07-11T19:44:26 | 102,959,477 | 2 | 1 | Apache-2.0 | 2023-02-08T01:03:17 | 2017-09-09T14:10:51 | Python | UTF-8 | Python | false | false | 1,340 | py | # Generated by Django 2.1.7 on 2019-04-29 15:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ebooks', '0006_auto_20190429_1727'),
]
operations = [
migrations.AddField(
model_name='chapter',
name='description_en',
field=models.TextField(null=True, verbose_name='description'),
),
migrations.AddField(
model_name='chapter',
name='description_it',
field=models.TextField(null=True, verbose_name='description'),
),
migrations.AddField(
model_name='chapter',
name='slug_en',
field=models.SlugField(null=True, unique=True, verbose_name='slug'),
),
migrations.AddField(
model_name='chapter',
name='slug_it',
field=models.SlugField(null=True, unique=True, verbose_name='slug'),
),
migrations.AddField(
model_name='chapter',
name='title_en',
field=models.CharField(max_length=50, null=True, verbose_name='title'),
),
migrations.AddField(
model_name='chapter',
name='title_it',
field=models.CharField(max_length=50, null=True, verbose_name='title'),
),
]
| [
"flavius476@gmail.com"
] | flavius476@gmail.com |
9b1a400d3281860f99c1cb1c0f0a9b1c2006bf90 | 2d191eb46ed804c9029801832ff4016aeaf8d31c | /configs/_base_/models/deeplabv3_sep_r50-d8.py | bb8c92051e538c75132eb8666ccb1d1cc8698ffc | [
"Apache-2.0"
] | permissive | openseg-group/mmsegmentation | df99ac2c3510b7f2dff92405aae25026d1023d98 | 23939f09d2b0bd30fc26eb7f8af974f1f5441210 | refs/heads/master | 2023-03-02T07:49:23.652558 | 2021-02-15T04:16:28 | 2021-02-15T04:16:28 | 278,537,243 | 2 | 2 | null | 2020-07-10T04:24:16 | 2020-07-10T04:24:15 | null | UTF-8 | Python | false | false | 1,330 | py | # model settings
norm_cfg = dict(type='SyncBN', requires_grad=True)
model = dict(
type='EncoderDecoder',
pretrained='open-mmlab://resnet50_v1c',
backbone=dict(
type='ResNetV1c',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
dilations=(1, 1, 2, 4),
strides=(1, 2, 1, 1),
norm_cfg=norm_cfg,
norm_eval=False,
style='pytorch',
contract_dilation=True),
decode_head=dict(
type='DepthwiseSeparableASPPHead',
in_channels=2048,
in_index=3,
channels=512,
dilations=(1, 12, 24, 36),
c1_in_channels=0,
c1_channels=0,
dropout_ratio=0.1,
num_classes=19,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0)),
auxiliary_head=dict(
type='FCNHead',
in_channels=1024,
in_index=2,
channels=256,
num_convs=1,
concat_input=False,
dropout_ratio=0.1,
num_classes=19,
norm_cfg=norm_cfg,
align_corners=False,
loss_decode=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=0.4)))
# model training and testing settings
train_cfg = dict()
test_cfg = dict(mode='whole')
| [
"yhyuan@pku.edu.cn"
] | yhyuan@pku.edu.cn |
25d68ca33cbaa85ba2ebd6cc6cd38960029053a9 | 2ddfcb901978b4c8a431406098669d22c0c33c62 | /runaway/config/urls.py | 8feca1fe5a36af1262c8f959baf8fbc42ca3cb14 | [] | no_license | thezpliu/runaway | 1ce3e29e8e4b16d89c4c7bee5b44fc911089f586 | e32b3c9a40f40f3017b2b1d38b327cec78c3cad4 | refs/heads/master | 2020-04-29T21:15:53.971365 | 2019-03-19T02:37:35 | 2019-03-19T02:37:35 | 99,125,026 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | # coding=utf-8
from runaway.views.index import index
from runaway.views.login import login, user_login_out
from runaway.views.monitor import monitor
from runaway.views.svninfo import svn_v
from runaway.views.p4info import p4_v
from runaway.api.ldap_login import LoginCheck
from runaway.api.changepasswd import ChangePasswd
from runaway.api.zabbix import zabbix_info
from runaway.api.svninfo import svn_info
from runaway.api.p4info import p4_info
APIURL = [
[LoginCheck, '/api/v1/login'],
[ChangePasswd, '/api/v1/changepasswd'],
[zabbix_info,'/api/v1/zabbix_info'],
[svn_info,'/api/v1/svn_info'],
[p4_info,'/api/v1/p4_info'],
]
URLS = [
['/', 'index', 'GET', index],
['/login', 'login', 'GET', login],
['/user_login_out/', 'user_login_out', 'GET', user_login_out],
['/monitor', 'monitor', 'GET', monitor],
['/svninfo', 'svninfo', 'GET', svn_v],
['/p4info', 'p4info', 'GET', p4_v],
]
def regist_urls(app=None, api=None):
if app is None or api is None:
return
for url in APIURL:
api.add_resource(url[0], url[1])
for url in URLS:
if url[2].find(',') > 0:
mlist = url[2].split(',')
else:
mlist = [url[2]]
app.add_url_rule(rule=url[0], endpoint=url[1],
methods=mlist, view_func=url[3])
| [
"lzpac@163.com"
] | lzpac@163.com |
26cdf1910bc9e8ac0aefa7ee992aabf7376ac5ae | a2f0abc7e1ec380f387d51dcefd1c5c2e0f6ba21 | /auth/httpclient.py | 4eeac4e2eb21f2faa3c06f0a709419106a2ec479 | [] | no_license | sanand0/sms-greetings | 0771537fd73a0769cb0e5eff2c9db56d401f6889 | ab9d7757104f8b1d1129df598f78e4b507d07bc3 | refs/heads/master | 2021-01-15T11:48:30.030591 | 2010-03-19T14:12:48 | 2010-03-19T14:12:48 | 32,197,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,284 | py | """
web.py httpclient
~~~~~~~~~~~~~~~~
HTTP client to support `tornado.auth` on web.py.
:copyright: 2010 by tipfy.org and s-anand.net
:license: Apache License Version 2.0. See LICENSE.txt for more details.
"""
import functools
import logging
import httplib2
from webpyauth import RequestRedirect
browser = httplib2.Http()
class HttpResponseError(object):
"""A dummy response used when urlfetch raises an exception."""
code = 404
body = '404 Not Found'
error = 'Error 404'
class AsyncHTTPClient(object):
"""An blocking HTTP client that uses urllib."""
def fetch(self, url, callback, **kwargs):
if callback is None:
return None
try:
status, content = browser.request(url, **kwargs)
code = status.status
setattr(status, 'error', (code < 200 or code >= 300) and code or None)
setattr(status, 'body', content)
try:
return callback(status)
except RequestRedirect, e:
raise e
except Exception, e:
logging.error("Exception during callback", exc_info=True)
except RequestRedirect, e:
raise e
except Exception, e:
result = HttpResponseError()
| [
"subramanian_anand@localhost"
] | subramanian_anand@localhost |
da33fd90e8e21d2ac80a6cfe717c8d7bd2b41914 | c54f0b3a32e9043ca99a8ffbb6989747eaec50b1 | /infer_module/positional_encoding.py | 1d2b0b973a1a1c59ef6a8bb994e99e68a84eb1d0 | [
"MIT"
] | permissive | daniel-richter/DIN_GAR | bad849a6eeeecaf7f9779c64b75494d8e12c0eb5 | f97759038936ad36359cb8c0d9ff0951d2482e25 | refs/heads/main | 2023-09-02T21:40:11.727572 | 2021-10-29T12:35:03 | 2021-10-29T12:35:03 | 407,142,109 | 0 | 0 | MIT | 2021-09-16T11:41:54 | 2021-09-16T11:41:53 | null | UTF-8 | Python | false | false | 6,752 | py | import torch.nn as nn
import torch
import math
import torch.nn.functional as F
import numpy as np
import cv2
from config import Config
class PositionEmbeddingSine(nn.Module):
"""
This is a more standard version of the position embedding, very similar to the one
used by the Attention is all you need paper, generalized to work on images.
"""
def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None):
super().__init__()
self.num_pos_feats = num_pos_feats
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, tensors, mask):
x = tensors
mask = mask
assert mask is not None
not_mask = ~mask
y_embed = not_mask.cumsum(1, dtype=torch.float32) # B, H, W
x_embed = not_mask.cumsum(2, dtype=torch.float32) # B, H, W
if self.normalize:
eps = 1e-6
y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) # C,
dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) # C,
pos_x = x_embed[:, :, :, None] / dim_t # B, H, W / C, -> B, H, W, C
pos_y = y_embed[:, :, :, None] / dim_t # B, H, W / C, -> B, H, W, C
pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
# B, H, W, C/2, 2 -> B, H, W, C (in sin, cos, sin, cos order)
pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) # B, H, W, 2C -> B, 2C, H, W
return pos
class Context_PositionEmbeddingSine(nn.Module):
"""
This is a more standard version of the position embedding, very similar to the one
used by the Attention is all you need paper, generalized to work on images.
"""
def __init__(self, context_downscale_ratio, num_pos_feats, temperature=10000, normalize=False, scale=None):
super().__init__()
self.context_downscale_ratio = context_downscale_ratio
self.num_pos_feats = num_pos_feats
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, context):
x = context
mask_shape = (context.shape[0], context.shape[2], context.shape[3])
mask = torch.ones(mask_shape , device = context.device) == 0 # All False
assert mask is not None
not_mask = ~mask
y_embed = not_mask.cumsum(1, dtype=torch.float32) * self.context_downscale_ratio # B, H, W
x_embed = not_mask.cumsum(2, dtype=torch.float32) * self.context_downscale_ratio # B, H, W
if self.normalize:
eps = 1e-6
y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale
x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale
dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) # C,
dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) # C,
pos_x = x_embed[:, :, :, None] / dim_t # B, H, W / C, -> B, H, W, C
pos_y = y_embed[:, :, :, None] / dim_t # B, H, W / C, -> B, H, W, C
pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3)
# B, H, W, C/2, 2 -> B, H, W, C (in sin, cos, sin, cos order)
pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3)
pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) # B, H, W, 2C -> B, 2C, H, W
context_pos = context + pos
return context_pos
class Embfeature_PositionEmbedding(nn.Module):
def __init__(self, cfg, num_pos_feats=512, temperature=10000, normalize=False, scale=None):
super().__init__()
self.image_size = cfg.image_size # 720, 1280
self.out_size = cfg.out_size # 45, 80
self.num_pos_feats = num_pos_feats
self.temperature = temperature
self.normalize = normalize
if scale is not None and normalize is False:
raise ValueError("normalize should be True if scale is passed")
if scale is None:
scale = 2 * math.pi
self.scale = scale
def forward(self, feature, boxes_in_flat):
'''
:param feature: B * T * N, 1024
:param boxes_in_flat: B * T * N, 4
:return:
'''
assert self.num_pos_feats*2 == feature.shape[1]
out_boxes_x = (boxes_in_flat[:,0] + boxes_in_flat[:,2]) / 2.
out_boxes_y = (boxes_in_flat[:,1] + boxes_in_flat[:,3]) / 2.
image_boxes_x = out_boxes_x * self.image_size[1] / self.out_size[1] # B * T * N,
image_boxes_y = out_boxes_y * self.image_size[0] / self.out_size[0] # B * T * N,
dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=feature.device) # C,
dim_t = self.temperature ** (2 * (dim_t // 2) / self.num_pos_feats) # C,
pos_x = image_boxes_x[:,None] / dim_t
pos_y = image_boxes_y[:,None] / dim_t
pos_x = torch.stack((pos_x[:,0::2].sin(), pos_x[:,1::2].cos()), dim = 2).flatten(1)
pos_y = torch.stack((pos_y[:,0::2].sin(), pos_y[:,1::2].cos()), dim = 2).flatten(1)
pos_emb = torch.cat((pos_x, pos_y), dim = 1)
assert pos_emb.shape == feature.shape
feature_emb = pos_emb + feature
return feature_emb
if __name__ == '__main__':
''' test PositionEmbeddingSine
pe = PositionEmbeddingSine(4, 10000, False, None)
mask = torch.ones(1,2,4) == 0
tensors = torch.rand(1,2,2,4)
print(pe(tensors, mask).shape)
print(pe(tensors, mask))'''
''' test Embfeature_PositionEmbedding '''
cfg = Config('HrBase_volleyball')
#cfg = Config('InvReason_volleyball')
EP = Embfeature_PositionEmbedding(cfg, num_pos_feats=512)
feature = torch.randn(12, 1024)
boxes_in_flat = torch.randn(12, 4)
feature_emb = EP(feature, boxes_in_flat)
print(feature_emb.shape)
''' test Context_PositionEmbeddingSine '''
CP = Context_PositionEmbeddingSine(8, 128/2)
context = torch.randn(1, 128, 45, 80)
context_emb = CP(context)
print(context_emb.shape)
| [
"jie980446003@hotmail.com"
] | jie980446003@hotmail.com |
a4c78496e3e6c0ca7c8343f03b0e455be84de413 | 585fcfd09bcc37ad73c6f301cb8b16261a93df7e | /projects/pyDOE-master/pyDOE/build_regression_matrix.py | 5ea2c2f53342a023823a115a04a403407c9ccc3d | [
"MIT",
"BSD-3-Clause"
] | permissive | louisXW/Surrogate-Model | e9e8de3ab892eed2f8ed424e09b770e67126c1f3 | 65ec8a89c1b7a19d4c04c62e2c988340c96c69f8 | refs/heads/master | 2021-07-21T09:37:41.045898 | 2017-10-30T11:49:35 | 2017-10-30T11:49:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,909 | py | """
This code was originally published by the following individuals for use with
Scilab:
Copyright (C) 2012 - 2013 - Michael Baudin
Copyright (C) 2012 - Maria Christopoulou
Copyright (C) 2010 - 2011 - INRIA - Michael Baudin
Copyright (C) 2009 - Yann Collette
Copyright (C) 2009 - CEA - Jean-Marc Martinez
website: forge.scilab.org/index.php/p/scidoe/sourcetree/master/macros
Much thanks goes to these individuals. It has been converted to Python by
Abraham Lee.
"""
import numpy as np
def grep(haystack, needle):
start = 0
while True:
start = haystack.find(needle, start)
if start == -1:
return
yield start
start += len(needle)
def build_regression_matrix(H, model, build=None):
"""
Build a regression matrix using a DOE matrix and a list of monomials.
Parameters
----------
H : 2d-array
model : str
build : bool-array
Returns
-------
R : 2d-array
"""
ListOfTokens = model.split(' ')
if H.shape[1] == 1:
size_index = len(str(H.shape[0]))
else:
size_index = len(str(H.shape[1]))
if build is None:
build = [True] * len(ListOfTokens)
# Test if the vector has the wrong direction (lines instead of columns)
if H.shape[0] == 1:
H = H.T
# Collect the list of monomials
Monom_Index = []
for i in range(len(ListOfTokens)):
if build[i]:
Monom_Index += [grep(ListOfTokens, 'x' + str(0) * (size_index - \
len(str(i))) + str(i))]
Monom_Index = -np.sort(-Monom_Index)
Monom_Index = np.unique(Monom_Index)
if H.shape[1] == 1:
nb_var = H.shape[0] # vector "mode": the number of vars is equal to the number of lines of H
VectorMode = True
for i in range(nb_var):
for j in range(ListOfTokens.shape[0]):
ListOfTokens[j] = ListOfTokens[j].replace(
'x' + str(0) * (size_index - len(str(i))) + str(i),
'H(' + str(i) + ')')
else:
nb_var = H.shape[0] # matrix "mode": the number of vars is equal to the number of columns of H
VectorMode = False
for i in range(nb_var):
for j in range(ListOfTokens.shape[0]):
ListOfTokens[j] = ListOfTokens[j].replace(
'x' + str(0) * (size_index - len(str(i))) + str(i),
'H[i,' + str(i) + ')')
# Now build the regression matrix
if VectorMode:
R = np.zeros((len(ListOfTokens), 1))
for j in range(len(ListOfTokens)):
R[j, 0] = eval(ListOfTokens[j])
else:
R = np.zeros((H.shape[0], len(ListOfTokens)))
for i in range(H.shape[0]):
for j in range(len(ListOfTokens)):
R[i, j] = eval(ListOfTokens[j])
return R
| [
"quanpan302@hotmail.com"
] | quanpan302@hotmail.com |
a6f10eb4cac4d9ebb9ce1cafd18a0bd881d177e0 | 9f3151a966b1e682dd24d806d4705145c8b10c47 | /0x11-python-network_1/6-post_email.py | ba2b1dc4d81b3f013abb2827eab4d3cb8942dca6 | [] | no_license | DiegoSusviela/holbertonschool-higher_level_programming | 5ace3a63a1f79fc7d0e870b8308dc10964b7bb21 | 4cd3e677bd0ffd6b7fc034a53b8c357893b87a60 | refs/heads/main | 2023-08-14T05:12:20.926985 | 2021-09-27T15:47:15 | 2021-09-27T15:47:15 | 361,817,218 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | #!/usr/bin/python3
"""alibaba y los 40 ladrones"""
import requests
from sys import argv
if __name__ == "__main__":
payload = {'email': argv[2]}
r = requests.post(argv[1], data=payload)
print(r.text)
| [
"dieguitosus@hotmail.com"
] | dieguitosus@hotmail.com |
0dec940c8d9ee73e47f55d49a771aebb21beec6d | 55d560fe6678a3edc9232ef14de8fafd7b7ece12 | /tools/build/test/rescan_header.py | 36a007eb406fa403704cb5091d42f2606d7901ce | [
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | stardog-union/boost | ec3abeeef1b45389228df031bf25b470d3d123c5 | caa4a540db892caa92e5346e0094c63dea51cbfb | refs/heads/stardog/develop | 2021-06-25T02:15:10.697006 | 2020-11-17T19:50:35 | 2020-11-17T19:50:35 | 148,681,713 | 0 | 0 | BSL-1.0 | 2020-11-17T19:50:36 | 2018-09-13T18:38:54 | C++ | UTF-8 | Python | false | false | 5,653 | py | #!/usr/bin/python
# Copyright 2012 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
# Test a header loop that depends on (but does not contain) a generated header.
t.write("test.cpp", '#include "header1.h"\n')
t.write("header1.h", """\
#ifndef HEADER1_H
#define HEADER1_H
#include "header2.h"
#endif
""")
t.write("header2.h", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header1.h"
#include "header3.h"
#endif
""")
t.write("header3.in", "/* empty file */\n")
t.write("jamroot.jam", """\
import common ;
make header3.h : header3.in : @common.copy ;
obj test : test.cpp : <implicit-dependency>header3.h ;
""")
t.run_build_system(["-j2"])
t.expect_addition("bin/header3.h")
t.expect_addition("bin/$toolset/debug*/test.obj")
t.expect_nothing_more()
t.rm(".")
# Test a linear sequence of generated headers.
t.write("test.cpp", '#include "header1.h"\n')
t.write("header1.in", """\
#ifndef HEADER1_H
#define HEADER1_H
#include "header2.h"
#endif
""")
t.write("header2.in", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header3.h"
#endif
""")
t.write("header3.in", "/* empty file */\n")
t.write("jamroot.jam", """\
import common ;
make header1.h : header1.in : @common.copy ;
make header2.h : header2.in : @common.copy ;
make header3.h : header3.in : @common.copy ;
obj test : test.cpp :
<implicit-dependency>header1.h
<implicit-dependency>header2.h
<implicit-dependency>header3.h ;
""")
t.run_build_system(["-j2", "test"])
t.expect_addition("bin/header1.h")
t.expect_addition("bin/header2.h")
t.expect_addition("bin/header3.h")
t.expect_addition("bin/$toolset/debug*/test.obj")
t.expect_nothing_more()
t.rm(".")
# Test a loop in generated headers.
t.write("test.cpp", '#include "header1.h"\n')
t.write("header1.in", """\
#ifndef HEADER1_H
#define HEADER1_H
#include "header2.h"
#endif
""")
t.write("header2.in", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header3.h"
#endif
""")
t.write("header3.in", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header1.h"
#endif
""")
t.write("jamroot.jam", """\
import common ;
actions copy {
sleep 1
cp $(>) $(<)
}
make header1.h : header1.in : @common.copy ;
make header2.h : header2.in : @common.copy ;
make header3.h : header3.in : @common.copy ;
obj test : test.cpp :
<implicit-dependency>header1.h
<implicit-dependency>header2.h
<implicit-dependency>header3.h ;
""")
t.run_build_system(["-j2", "test"])
t.expect_addition("bin/header1.h")
t.expect_addition("bin/header2.h")
t.expect_addition("bin/header3.h")
t.expect_addition("bin/$toolset/debug*/test.obj")
t.expect_nothing_more()
t.rm(".")
# Test that all the dependencies of a loop are updated before any of the
# dependents.
t.write("test1.cpp", '#include "header1.h"\n')
t.write("test2.cpp", """\
#include "header2.h"
int main() {}
""")
t.write("header1.h", """\
#ifndef HEADER1_H
#define HEADER1_H
#include "header2.h"
#endif
""")
t.write("header2.h", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header1.h"
#include "header3.h"
#endif
""")
t.write("header3.in", "\n")
t.write("sleep.bat", """\
::@timeout /T %1 /NOBREAK >nul
@ping 127.0.0.1 -n 2 -w 1000 >nul
@ping 127.0.0.1 -n %1 -w 1000 >nul
@exit /B 0
""")
t.write("jamroot.jam", """\
import common ;
import os ;
if [ os.name ] = NT
{
SLEEP = call sleep.bat ;
}
else
{
SLEEP = sleep ;
}
rule copy { common.copy $(<) : $(>) ; }
actions copy { $(SLEEP) 1 }
make header3.h : header3.in : @copy ;
exe test : test2.cpp test1.cpp : <implicit-dependency>header3.h ;
""")
t.run_build_system(["-j2", "test"])
t.expect_addition("bin/header3.h")
t.expect_addition("bin/$toolset/debug*/test1.obj")
t.expect_addition("bin/$toolset/debug*/test2.obj")
t.expect_addition("bin/$toolset/debug*/test.exe")
t.expect_nothing_more()
t.touch("header3.in")
t.run_build_system(["-j2", "test"])
t.expect_touch("bin/header3.h")
t.expect_touch("bin/$toolset/debug*/test1.obj")
t.expect_touch("bin/$toolset/debug*/test2.obj")
t.expect_touch("bin/$toolset/debug*/test.exe")
t.expect_nothing_more()
t.rm(".")
# Test a loop that includes a generated header
t.write("test1.cpp", '#include "header1.h"\n')
t.write("test2.cpp", """\
#include "header2.h"
int main() {}
""")
t.write("header1.h", """\
#ifndef HEADER1_H
#define HEADER1_H
#include "header2.h"
#endif
""")
t.write("header2.in", """\
#ifndef HEADER2_H
#define HEADER2_H
#include "header3.h"
#endif
""")
t.write("header3.h", """\
#ifndef HEADER3_H
#define HEADER3_H
#include "header1.h"
#endif
""")
t.write("sleep.bat", """\
::@timeout /T %1 /NOBREAK >nul
@ping 127.0.0.1 -n 2 -w 1000 >nul
@ping 127.0.0.1 -n %1 -w 1000 >nul
@exit /B 0
""")
t.write("jamroot.jam", """\
import common ;
import os ;
if [ os.name ] = NT
{
SLEEP = call sleep.bat ;
}
else
{
SLEEP = sleep ;
}
rule copy { common.copy $(<) : $(>) ; }
actions copy { $(SLEEP) 1 }
make header2.h : header2.in : @copy ;
exe test : test2.cpp test1.cpp : <implicit-dependency>header2.h <include>. ;
""")
t.run_build_system(["-j2", "test"])
t.expect_addition("bin/header2.h")
t.expect_addition("bin/$toolset/debug*/test1.obj")
t.expect_addition("bin/$toolset/debug*/test2.obj")
t.expect_addition("bin/$toolset/debug*/test.exe")
t.expect_nothing_more()
t.cleanup()
| [
"james.pack@stardog.com"
] | james.pack@stardog.com |
8d2db0e03577849c03ffa9b296d5a266ea0fb0d7 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/rdbms/azure-mgmt-rdbms/azure/mgmt/rdbms/mysql/aio/operations/_replicas_operations.py | ed2a170e523af5c4bd570d0b9b817e6a9a04d6ce | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 5,650 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._replicas_operations import build_list_by_server_request
from .._vendor import MySQLManagementClientMixinABC
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ReplicasOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.rdbms.mysql.aio.MySQLManagementClient`'s
:attr:`replicas` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_server(
self, resource_group_name: str, server_name: str, **kwargs: Any
) -> AsyncIterable["_models.Server"]:
"""List all the replicas for a given server.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param server_name: The name of the server. Required.
:type server_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Server or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.rdbms.mysql.models.Server]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2017-12-01"))
cls: ClsType[_models.ServerListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_server_request(
resource_group_name=resource_group_name,
server_name=server_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_server.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ServerListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_server.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DBforMySQL/servers/{serverName}/replicas"
}
| [
"noreply@github.com"
] | noreply@github.com |
fe613641ae32fcb110bcbb399b2d465ebe57228e | 1ab053667d87389f673c7689b68dbb5c003f85b9 | /dict.py | 7713a2e202b415c6d2a18294a7232deae71528ec | [
"Apache-2.0"
] | permissive | acharyaparag/Compiler | dcf1d3aaf793b4fced7a1672f769caa995747dea | 046f99389d4ce1af46bc4a96a7ac58479986810d | refs/heads/master | 2016-09-05T16:37:20.371806 | 2014-05-14T03:26:27 | 2014-05-14T03:26:27 | 19,764,899 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,324 | py | reservedt = {
'start': 1,
'prog': 2,
'body': 3,
'declpart': 4,
'decllist': 5,
'decllist-': 6,
'declstat': 7,
'declstat-': 8,
'type': 9,
'procpart': 10,
'proclist': 11,
'proc': 12,
'prochead': 13,
'procname': 14,
'null-list': 15,
'fparmlist': 16,
'fparmlist-': 17,
'callby': 18,
'execpart': 19,
'exechead': 20,
'statlist': 21,
'statlist-': 22,
'stat': 23,
'instat': 24,
'instat-': 25,
'instathd': 26,
'outstat': 27,
'outstat-': 28,
'outstathd': 29,
'callstat': 30,
'callname': 31,
'aparmlist': 32,
'aparmlist-': 33,
'ifstat': 34,
'ifthen': 35,
'ifhead': 36,
'forinit': 37,
'forby': 38,
'forto': 39,
'forstat': 40,
'assignstat': 41,
'astat-': 42,
'bexpr': 43,
'orexpr': 44,
'andexpr': 45,
'andexpr-': 46,
'notexpr': 47,
'relexpr': 48,
'aexpr': 49,
'aexpr-': 50,
'term': 51,
'term-': 52,
'primary': 53,
'constant': 54,
'END': 55,
'PROGRAM': 56,
'var': 57,
'DECLARE': 58,
';': 59,
',': 60,
'SCALAR': 61,
'VECTOR': 62,
'integer': 63,
'MATRIX': 64,
'::': 65,
'INTEGER': 66,
'REAL': 67,
'PROCEDURE': 68,
'{': 69,
'}': 70,
'VALUE': 71,
'REFERENCE': 72,
'EXECUTE': 73,
'[': 74,
']': 75,
':': 76,
'STDIN': 77,
'string': 78,
'STOUT': 79,
'CALL': 80,
'ELSE': 81,
'IF': 82,
'THEN': 83,
'FOR': 84,
'<-': 85,
'BY': 86,
'UNTIL': 87,
'DO': 88,
'|': 89,
'&': 90,
'!': 91,
'<': 92,
'<=': 93,
'>': 94,
'>=': 95,
'==': 96,
'!=': 97,
'+': 98,
'-': 99,
'*': 100,
'/': 101,
'(': 102,
')': 103,
'real': 104,
}
asciidt ={ ';':23,
':':24,
',':25,
'[':26,
']':27,
'(':28,
')':29,
'<':30,
'>':31,
'!':32,
'+':33,
'-':34,
'*':35,
'/':36,
'{':37,
'}':38,
'&':39,
'|':40,
'==':41,
'!=':42,
'<=':43,
'>=':44,
'<-':45,
'::':46}
relation_dict = { 'Equal to':1,
'Greater than':2,
'Less than':3
}
def get_value(reservedt,key):
for k in reservedt:
if reservedt[k] == key:
return k
def get_list(reservedt,list_input):
inp_str = " "
for i in list_input:
for k in reservedt:
if reservedt[k] == int(i):
inp_str = inp_str +" " + k
return inp_str
| [
"acharyaparag@gmail.com"
] | acharyaparag@gmail.com |
21258aa7598c5f930fe4eaed3af4d0a499b648d9 | 98efe1aee73bd9fbec640132e6fb2e54ff444904 | /loldib/getratings/models/NA/na_ornn/na_ornn_top.py | 2ca0d07a4824d85d8de49a6105daf5c1b67f4de7 | [
"Apache-2.0"
] | permissive | koliupy/loldib | be4a1702c26546d6ae1b4a14943a416f73171718 | c9ab94deb07213cdc42b5a7c26467cdafaf81b7f | refs/heads/master | 2021-07-04T03:34:43.615423 | 2017-09-21T15:44:10 | 2017-09-21T15:44:10 | 104,359,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,269 | py | from getratings.models.ratings import Ratings
class NA_Ornn_Top_Aatrox(Ratings):
pass
class NA_Ornn_Top_Ahri(Ratings):
pass
class NA_Ornn_Top_Akali(Ratings):
pass
class NA_Ornn_Top_Alistar(Ratings):
pass
class NA_Ornn_Top_Amumu(Ratings):
pass
class NA_Ornn_Top_Anivia(Ratings):
pass
class NA_Ornn_Top_Annie(Ratings):
pass
class NA_Ornn_Top_Ashe(Ratings):
pass
class NA_Ornn_Top_AurelionSol(Ratings):
pass
class NA_Ornn_Top_Azir(Ratings):
pass
class NA_Ornn_Top_Bard(Ratings):
pass
class NA_Ornn_Top_Blitzcrank(Ratings):
pass
class NA_Ornn_Top_Brand(Ratings):
pass
class NA_Ornn_Top_Braum(Ratings):
pass
class NA_Ornn_Top_Caitlyn(Ratings):
pass
class NA_Ornn_Top_Camille(Ratings):
pass
class NA_Ornn_Top_Cassiopeia(Ratings):
pass
class NA_Ornn_Top_Chogath(Ratings):
pass
class NA_Ornn_Top_Corki(Ratings):
pass
class NA_Ornn_Top_Darius(Ratings):
pass
class NA_Ornn_Top_Diana(Ratings):
pass
class NA_Ornn_Top_Draven(Ratings):
pass
class NA_Ornn_Top_DrMundo(Ratings):
pass
class NA_Ornn_Top_Ekko(Ratings):
pass
class NA_Ornn_Top_Elise(Ratings):
pass
class NA_Ornn_Top_Evelynn(Ratings):
pass
class NA_Ornn_Top_Ezreal(Ratings):
pass
class NA_Ornn_Top_Fiddlesticks(Ratings):
pass
class NA_Ornn_Top_Fiora(Ratings):
pass
class NA_Ornn_Top_Fizz(Ratings):
pass
class NA_Ornn_Top_Galio(Ratings):
pass
class NA_Ornn_Top_Gangplank(Ratings):
pass
class NA_Ornn_Top_Garen(Ratings):
pass
class NA_Ornn_Top_Gnar(Ratings):
pass
class NA_Ornn_Top_Gragas(Ratings):
pass
class NA_Ornn_Top_Graves(Ratings):
pass
class NA_Ornn_Top_Hecarim(Ratings):
pass
class NA_Ornn_Top_Heimerdinger(Ratings):
pass
class NA_Ornn_Top_Illaoi(Ratings):
pass
class NA_Ornn_Top_Irelia(Ratings):
pass
class NA_Ornn_Top_Ivern(Ratings):
pass
class NA_Ornn_Top_Janna(Ratings):
pass
class NA_Ornn_Top_JarvanIV(Ratings):
pass
class NA_Ornn_Top_Jax(Ratings):
pass
class NA_Ornn_Top_Jayce(Ratings):
pass
class NA_Ornn_Top_Jhin(Ratings):
pass
class NA_Ornn_Top_Jinx(Ratings):
pass
class NA_Ornn_Top_Kalista(Ratings):
pass
class NA_Ornn_Top_Karma(Ratings):
pass
class NA_Ornn_Top_Karthus(Ratings):
pass
class NA_Ornn_Top_Kassadin(Ratings):
pass
class NA_Ornn_Top_Katarina(Ratings):
pass
class NA_Ornn_Top_Kayle(Ratings):
pass
class NA_Ornn_Top_Kayn(Ratings):
pass
class NA_Ornn_Top_Kennen(Ratings):
pass
class NA_Ornn_Top_Khazix(Ratings):
pass
class NA_Ornn_Top_Kindred(Ratings):
pass
class NA_Ornn_Top_Kled(Ratings):
pass
class NA_Ornn_Top_KogMaw(Ratings):
pass
class NA_Ornn_Top_Leblanc(Ratings):
pass
class NA_Ornn_Top_LeeSin(Ratings):
pass
class NA_Ornn_Top_Leona(Ratings):
pass
class NA_Ornn_Top_Lissandra(Ratings):
pass
class NA_Ornn_Top_Lucian(Ratings):
pass
class NA_Ornn_Top_Lulu(Ratings):
pass
class NA_Ornn_Top_Lux(Ratings):
pass
class NA_Ornn_Top_Malphite(Ratings):
pass
class NA_Ornn_Top_Malzahar(Ratings):
pass
class NA_Ornn_Top_Maokai(Ratings):
pass
class NA_Ornn_Top_MasterYi(Ratings):
pass
class NA_Ornn_Top_MissFortune(Ratings):
pass
class NA_Ornn_Top_MonkeyKing(Ratings):
pass
class NA_Ornn_Top_Mordekaiser(Ratings):
pass
class NA_Ornn_Top_Morgana(Ratings):
pass
class NA_Ornn_Top_Nami(Ratings):
pass
class NA_Ornn_Top_Nasus(Ratings):
pass
class NA_Ornn_Top_Nautilus(Ratings):
pass
class NA_Ornn_Top_Nidalee(Ratings):
pass
class NA_Ornn_Top_Nocturne(Ratings):
pass
class NA_Ornn_Top_Nunu(Ratings):
pass
class NA_Ornn_Top_Olaf(Ratings):
pass
class NA_Ornn_Top_Orianna(Ratings):
pass
class NA_Ornn_Top_Ornn(Ratings):
pass
class NA_Ornn_Top_Pantheon(Ratings):
pass
class NA_Ornn_Top_Poppy(Ratings):
pass
class NA_Ornn_Top_Quinn(Ratings):
pass
class NA_Ornn_Top_Rakan(Ratings):
pass
class NA_Ornn_Top_Rammus(Ratings):
pass
class NA_Ornn_Top_RekSai(Ratings):
pass
class NA_Ornn_Top_Renekton(Ratings):
pass
class NA_Ornn_Top_Rengar(Ratings):
pass
class NA_Ornn_Top_Riven(Ratings):
pass
class NA_Ornn_Top_Rumble(Ratings):
pass
class NA_Ornn_Top_Ryze(Ratings):
pass
class NA_Ornn_Top_Sejuani(Ratings):
pass
class NA_Ornn_Top_Shaco(Ratings):
pass
class NA_Ornn_Top_Shen(Ratings):
pass
class NA_Ornn_Top_Shyvana(Ratings):
pass
class NA_Ornn_Top_Singed(Ratings):
pass
class NA_Ornn_Top_Sion(Ratings):
pass
class NA_Ornn_Top_Sivir(Ratings):
pass
class NA_Ornn_Top_Skarner(Ratings):
pass
class NA_Ornn_Top_Sona(Ratings):
pass
class NA_Ornn_Top_Soraka(Ratings):
pass
class NA_Ornn_Top_Swain(Ratings):
pass
class NA_Ornn_Top_Syndra(Ratings):
pass
class NA_Ornn_Top_TahmKench(Ratings):
pass
class NA_Ornn_Top_Taliyah(Ratings):
pass
class NA_Ornn_Top_Talon(Ratings):
pass
class NA_Ornn_Top_Taric(Ratings):
pass
class NA_Ornn_Top_Teemo(Ratings):
pass
class NA_Ornn_Top_Thresh(Ratings):
pass
class NA_Ornn_Top_Tristana(Ratings):
pass
class NA_Ornn_Top_Trundle(Ratings):
pass
class NA_Ornn_Top_Tryndamere(Ratings):
pass
class NA_Ornn_Top_TwistedFate(Ratings):
pass
class NA_Ornn_Top_Twitch(Ratings):
pass
class NA_Ornn_Top_Udyr(Ratings):
pass
class NA_Ornn_Top_Urgot(Ratings):
pass
class NA_Ornn_Top_Varus(Ratings):
pass
class NA_Ornn_Top_Vayne(Ratings):
pass
class NA_Ornn_Top_Veigar(Ratings):
pass
class NA_Ornn_Top_Velkoz(Ratings):
pass
class NA_Ornn_Top_Vi(Ratings):
pass
class NA_Ornn_Top_Viktor(Ratings):
pass
class NA_Ornn_Top_Vladimir(Ratings):
pass
class NA_Ornn_Top_Volibear(Ratings):
pass
class NA_Ornn_Top_Warwick(Ratings):
pass
class NA_Ornn_Top_Xayah(Ratings):
pass
class NA_Ornn_Top_Xerath(Ratings):
pass
class NA_Ornn_Top_XinZhao(Ratings):
pass
class NA_Ornn_Top_Yasuo(Ratings):
pass
class NA_Ornn_Top_Yorick(Ratings):
pass
class NA_Ornn_Top_Zac(Ratings):
pass
class NA_Ornn_Top_Zed(Ratings):
pass
class NA_Ornn_Top_Ziggs(Ratings):
pass
class NA_Ornn_Top_Zilean(Ratings):
pass
class NA_Ornn_Top_Zyra(Ratings):
pass
| [
"noreply@github.com"
] | noreply@github.com |
87329ac75e0a03161d9c4ec7e50671e1a8c5b0d0 | 22299195d67f887d8de9f8764e8a85680cd3416c | /class7 (Color Filtering - OpenCV with Python for Image and Video Analysis 7)/main.py | e4430a318df1dc716db227d2a786414f7b6eb3ff | [] | no_license | EnggQasim/PythonOpenCV | 71268cb9bfa603b9aec1e239756f515f9693f74c | 2f1cd61df0fd520dbdc0e41a52ebfc4da410c771 | refs/heads/master | 2021-01-01T15:29:14.768477 | 2017-07-18T18:11:19 | 2017-07-18T18:11:19 | 97,629,494 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | import cv2
import numpy as np
cap = cv2.VideoCapture(1)
while True:
_, frame = cap.read()
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#hsv hue sat value
lower_red = np.array([150,150,50])
upper_red = np.array([180, 255, 150])
mask = cv2.inRange(hsv, lower_red, upper_red)
res = cv2.bitwise_and(frame, frame, mask=mask)
cv2.imshow('Frame', frame)
cv2.imshow('Mask', mask)
cv2.imshow('Result', res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cv2.destroyAllWindows()
cv2.release() | [
"m.qasim077@gmail.com"
] | m.qasim077@gmail.com |
ab25192c92848ca0ed44893a946b6fd0e050f0dd | 23976d5085e2c0b91057a9a4009d4a4bca2f111b | /prod/ttbarSingleLeptonAnalyzer_TTBar.py | 3614d58004994599ac5c37da89d91a6fffa5d590 | [] | no_license | dygyun/TopSemiLepton | 2f7202b1b93e055060911cb222a258e0e462be15 | 9b84415e12fd8146bb4523a1d01b3ab53cae8ca8 | refs/heads/master | 2021-01-24T03:48:35.915247 | 2017-09-20T12:30:04 | 2017-09-20T12:30:04 | 45,440,845 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,912 | py | #------------------------------------------------------------------
# Data or MC Sample
runOnMC = False
# runOnTTbarMC == 0, No ttbar
# runOnTTbarMC == 1, ttbar Signal
# runOnTTbarMC == 2, ttbar Background
runOnTTbarMC = 0
#------------------------------------------------------------------
import FWCore.ParameterSet.Config as cms
process = cms.Process("ttbarSingleLepton")
# initialize MessageLogger and output report
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cerr.threshold = 'INFO'
process.MessageLogger.categories.append('ttbarljets')
process.MessageLogger.cerr.INFO = cms.untracked.PSet(
limit = cms.untracked.int32(-1)
)
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
#'file:/afs/cern.ch/user/b/brochero/CATTuples_August/v7-3-6/cat74/src/CATTools/CatProducer/prod/catTuple-PUPPI-v7-3-6.root' # -- MC PUPPI (v7-3-6)
#'file:/cms/scratch/CAT/TT_TuneCUETP8M1_13TeV-powheg-pythia8/v7-3-2_RunIISpring15DR74-Asympt50ns_MCRUN2_74_V9A-v4/150805_203807/0000/catTuple_245.root' # -- MC
#'root://cms-xrdr.sdfarm.kr///xrd/store/group/CAT/TT_TuneCUETP8M1_13TeV-powheg-pythia8/v7-3-4_RunIISpring15DR74-Asympt50ns_MCRUN2_74_V9A-v4/150810_215031/0000/catTuple_276.root' # -- MC
#'file:/cms/home/brochero/CATTuples_August/v7-3-4/cat74/src/CATTools/CatAnalyzer/prod/SingleMu-PromptReco_catTuple_44.root' # -- DATA
#'root://cms-xrdr.sdfarm.kr///xrd/store/group/CAT/TT_TuneCUETP8M1_13TeV-powheg-pythia8/v7-3-6_RunIISpring15DR74-Asympt50ns_MCRUN2_74_V9A-v4/150820_215807/0000/catTuple_108.root' # -- XROOT test
### 'file:/cms/home/brochero/CATTuples_August/Central-v7-3-6/cat74/src/CATTools/CatAnalyzer/prod/catTuple_108.root' # -- MC
#'file:/cms/home/brochero/CATTuples_July/cat74/src/CATTools/CatAnalyzer/catTuple_83.root' # -- Data
)
)
# json file (Only Data)
#import FWCore.PythonUtilities.LumiList as LumiList
#process.source.lumisToProcess = LumiList.LumiList(filename = 'Cert_246908-251883_13TeV_PromptReco_Collisions15_JSON_v2.txt').getVLuminosityBlockRange()
process.ttbarSingleLepton = cms.EDAnalyzer('TtbarSingleLeptonAnalyzer_ttbar',
sampleLabel = cms.untracked.bool(runOnMC),
TTbarSampleLabel = cms.untracked.int32(runOnTTbarMC),
genLabel = cms.InputTag("prunedGenParticles"),
muonLabel = cms.InputTag("catMuons"),
electronLabel = cms.InputTag("catElectrons"),
jetLabel = cms.InputTag("catJets"),
metLabel = cms.InputTag("catMETs"),
metnoHFLabel = cms.InputTag("catMETsNoHF"),
metPuppiLabel = cms.InputTag("catMETsPuppi"),
#metLabel = cms.InputTag("catMETsNoHF"),
pvLabel = cms.InputTag("catVertex:nGoodPV"),
puWeight = cms.InputTag("pileupWeight"),
trigLabel = cms.InputTag("catTrigger"), # Not working yet
)
process.TFileService = cms.Service("TFileService",
fileName = cms.string('vallot.root')
)
#process.Tracer = cms.Service("Tracer")
#process.dump=cms.EDAnalyzer('EventContentAnalyzer')
#process.p = cms.Path(process.demo*process.dump)
process.p = cms.Path(process.ttbarSingleLepton)
| [
"dygyun@Dooyeonui-MacBook-Air.local"
] | dygyun@Dooyeonui-MacBook-Air.local |
5e0c3d672b3b5efb05aa6b2d2e55bd0e758f27e2 | 7c3ccfe8fdcbe05d04444da071b9b3469b11f351 | /.github/scripts/filter_test_configs.py | 96dff128572d4ee665a4731138827d928f152d84 | [
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | nihui/pytorch | ad4df723672300d5bd4290d6bc4e2e66a5ff2f0f | 999bae0f54108ffc5b7cf2524a02a83901554b16 | refs/heads/master | 2023-07-07T06:35:37.886490 | 2023-05-30T05:07:59 | 2023-05-30T05:07:59 | 177,545,801 | 3 | 2 | NOASSERTION | 2019-03-25T08:33:23 | 2019-03-25T08:33:23 | null | UTF-8 | Python | false | false | 14,809 | py | #!/usr/bin/env python3
import json
import os
import re
import sys
import warnings
from typing import Any, Callable, Dict, List, Optional, Set
from urllib.request import Request, urlopen
import yaml
PREFIX = "test-config/"
# Same as shard names
VALID_TEST_CONFIG_LABELS = {
f"{PREFIX}{label}"
for label in {
"backwards_compat",
"crossref",
"default",
"deploy",
"distributed",
"docs_tests",
"dynamo",
"force_on_cpu",
"functorch",
"inductor",
"inductor_distributed",
"inductor_huggingface",
"inductor_timm",
"inductor_torchbench",
"jit_legacy",
"multigpu",
"nogpu_AVX512",
"nogpu_NO_AVX2",
"slow",
"tsan",
"xla",
}
}
def is_cuda_or_rocm_job(job_name: Optional[str]) -> bool:
if not job_name:
return False
return "cuda" in job_name or "rocm" in job_name
# Supported modes when running periodically. Only applying the mode when
# its lambda condition returns true
SUPPORTED_PERIODICAL_MODES: Dict[str, Callable[[Optional[str]], bool]] = {
# Memory leak check is only needed for CUDA and ROCm jobs which utilize GPU memory
"mem_leak_check": is_cuda_or_rocm_job,
"rerun_disabled_tests": lambda job_name: True,
}
# The link to the published list of disabled jobs
DISABLED_JOBS_URL = "https://ossci-metrics.s3.amazonaws.com/disabled-jobs.json"
# Some constants used to remove disabled jobs
JOB_NAME_SEP = "/"
BUILD_JOB_NAME = "build"
TEST_JOB_NAME = "test"
BUILD_AND_TEST_JOB_NAME = "build-and-test"
JOB_NAME_CFG_REGEX = re.compile(r"(?P<job>[\w-]+)\s+\((?P<cfg>[\w-]+)\)")
EXCLUDED_BRANCHES = ["nightly"]
def parse_args() -> Any:
from argparse import ArgumentParser
parser = ArgumentParser(
"Filter all test configurations and keep only requested ones"
)
parser.add_argument(
"--test-matrix", type=str, required=True, help="the original test matrix"
)
parser.add_argument(
"--workflow", type=str, help="the name of the current workflow, i.e. pull"
)
parser.add_argument(
"--job-name",
type=str,
help="the name of the current job, i.e. linux-focal-py3.8-gcc7 / build",
)
parser.add_argument("--pr-number", type=str, help="the pull request number")
parser.add_argument("--tag", type=str, help="the associated tag if it exists")
parser.add_argument(
"--event-name",
type=str,
help="name of the event that triggered the job (pull, schedule, etc)",
)
parser.add_argument(
"--schedule",
type=str,
help="cron schedule that triggered the job",
)
parser.add_argument(
"--branch",
type=str,
default="main",
help="the branch name",
)
return parser.parse_args()
def get_labels(pr_number: int) -> Set[str]:
"""
Dynamical get the latest list of labels from the pull request
"""
# From https://docs.github.com/en/actions/learn-github-actions/environment-variables
pytorch_repo = os.environ.get("GITHUB_REPOSITORY", "pytorch/pytorch")
pytorch_github_api = f"https://api.github.com/repos/{pytorch_repo}"
github_token = os.environ["GITHUB_TOKEN"]
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {github_token}",
}
json_response = download_json(
url=f"{pytorch_github_api}/issues/{pr_number}/labels",
headers=headers,
)
if not json_response:
warnings.warn(f"Failed to get the labels for #{pr_number}")
return set()
return {label.get("name") for label in json_response if label.get("name")}
def filter(test_matrix: Dict[str, List[Any]], labels: Set[str]) -> Dict[str, List[Any]]:
"""
Select the list of test config to run from the test matrix. The logic works
as follows:
If the PR has one or more labels as specified in the VALID_TEST_CONFIG_LABELS set, only
these test configs will be selected. This also works with ciflow labels, for example,
if a PR has both ciflow/trunk and test-config/functorch, only trunk functorch builds
and tests will be run
If the PR has none of the test-config label, all tests are run as usual.
"""
filtered_test_matrix: Dict[str, List[Any]] = {"include": []}
for entry in test_matrix.get("include", []):
config_name = entry.get("config", "")
if not config_name:
continue
label = f"{PREFIX}{config_name.strip()}"
if label in labels:
print(
f"Select {config_name} because label {label} is presented in the pull request by the time the test starts"
)
filtered_test_matrix["include"].append(entry)
valid_test_config_labels = labels.intersection(VALID_TEST_CONFIG_LABELS)
if not filtered_test_matrix["include"] and not valid_test_config_labels:
# Found no valid label and the filtered test matrix is empty, return the same
# test matrix as before so that all tests can be run normally
return test_matrix
else:
# When the filter test matrix contain matches or if a valid test config label
# is found in the PR, return the filtered test matrix
return filtered_test_matrix
def set_periodic_modes(
test_matrix: Dict[str, List[Any]], job_name: Optional[str]
) -> Dict[str, List[Any]]:
"""
Apply all periodic modes when running under a schedule
"""
scheduled_test_matrix: Dict[str, List[Any]] = {
"include": [],
}
for config in test_matrix.get("include", []):
for mode, cond in SUPPORTED_PERIODICAL_MODES.items():
if not cond(job_name):
continue
cfg = config.copy()
cfg[mode] = mode
scheduled_test_matrix["include"].append(cfg)
return scheduled_test_matrix
def remove_disabled_jobs(
workflow: str, job_name: str, test_matrix: Dict[str, List[Any]]
) -> Dict[str, List[Any]]:
"""
Check the list of disabled jobs, remove the current job and all its dependents
if it exists in the list. The list of disabled jobs is as follows:
{
"WORKFLOW / PLATFORM / JOB (CONFIG)": [
AUTHOR,
ISSUE_NUMBER,
ISSUE_URL,
WORKFLOW,
PLATFORM,
JOB (CONFIG),
],
"pull / linux-bionic-py3.8-clang9 / test (dynamo)": [
"pytorchbot",
"94861",
"https://github.com/pytorch/pytorch/issues/94861",
"pull",
"linux-bionic-py3.8-clang9",
"test (dynamo)",
],
}
"""
try:
# The job name from github is in the PLATFORM / JOB (CONFIG) format, so breaking
# it into its two components first
current_platform, _ = [n.strip() for n in job_name.split(JOB_NAME_SEP, 1) if n]
except ValueError as error:
warnings.warn(f"Invalid job name {job_name}, returning")
return test_matrix
# The result will be stored here
filtered_test_matrix: Dict[str, List[Any]] = {"include": []}
for _, record in download_json(url=DISABLED_JOBS_URL, headers={}).items():
(
author,
_,
disabled_url,
disabled_workflow,
disabled_platform,
disabled_job_cfg,
) = record
if disabled_workflow != workflow:
# The current workflow is not disabled by this record
continue
cleanup_regex = rf"(-{BUILD_JOB_NAME}|-{TEST_JOB_NAME})$"
# There is an exception here for binary build workflows in which the platform
# names have the build and test suffix. For example, we have a build job called
# manywheel-py3-cuda11_8-build / build and its subsequent test job called
# manywheel-py3-cuda11_8-test / test. So they are linked, but their suffixes
# are different
disabled_platform_no_suffix = re.sub(cleanup_regex, "", disabled_platform)
current_platform_no_suffix = re.sub(cleanup_regex, "", current_platform)
if (
disabled_platform != current_platform
and disabled_platform_no_suffix != current_platform_no_suffix
):
# The current platform is not disabled by this record
continue
# The logic after this is fairly complicated:
#
# - If the disabled record doesn't have the optional job (config) name,
# i.e. pull / linux-bionic-py3.8-clang9, all build and test jobs will
# be skipped
#
# - If the disabled record has the job name and it's a build job, i.e.
# pull / linux-bionic-py3.8-clang9 / build, all build and test jobs
# will be skipped, because the latter requires the former
#
# - If the disabled record has the job name and it's a test job without
# the config part, i.e. pull / linux-bionic-py3.8-clang9 / test, all
# test jobs will be skipped. TODO: At the moment, the script uses the
# short-circuiting logic to skip the build job automatically when there
# is no test job assuming that it would be a waste of effort building
# for nothing. This might not be the desirable behavior, and could be
# fixed later if needed
#
# - If the disabled record has the job (config) name, only that test config
# will be skipped, i.e. pull / linux-bionic-py3.8-clang9 / test (dynamo)
if not disabled_job_cfg:
print(
f"Issue {disabled_url} created by {author} has disabled all CI jobs for {workflow} / {job_name}"
)
return filtered_test_matrix
if disabled_job_cfg == BUILD_JOB_NAME:
print(
f"Issue {disabled_url} created by {author} has disabled the build job for {workflow} / {job_name}"
)
return filtered_test_matrix
if disabled_job_cfg in (TEST_JOB_NAME, BUILD_AND_TEST_JOB_NAME):
print(
f"Issue {disabled_url} created by {author} has disabled all the test jobs for {workflow} / {job_name}"
)
return filtered_test_matrix
m = JOB_NAME_CFG_REGEX.match(disabled_job_cfg)
if m:
disabled_job = m.group("job")
# Make sure that the job name is a valid test job name first before checking the config
if disabled_job in (TEST_JOB_NAME, BUILD_AND_TEST_JOB_NAME):
disabled_cfg = m.group("cfg")
# Remove the disabled config from the test matrix
filtered_test_matrix["include"] = [
r
for r in test_matrix["include"]
if r.get("config", "") != disabled_cfg
]
return filtered_test_matrix
warnings.warn(
f"Found a matching disabled issue {disabled_url} for {workflow} / {job_name}, "
f"but the name {disabled_job_cfg} is invalid"
)
# Found no matching disabled issue, return the same input test matrix
return test_matrix
def download_json(url: str, headers: Dict[str, str], num_retries: int = 3) -> Any:
for _ in range(num_retries):
try:
req = Request(url=url, headers=headers)
content = urlopen(req, timeout=5).read().decode("utf-8")
return json.loads(content)
except Exception as e:
warnings.warn(f"Could not download {url}: {e}")
warnings.warn(f"All {num_retries} retries exhausted, downloading {url} failed")
return {}
def set_output(name: str, val: Any) -> None:
if os.getenv("GITHUB_OUTPUT"):
with open(str(os.getenv("GITHUB_OUTPUT")), "a") as env:
print(f"{name}={val}", file=env)
else:
print(f"::set-output name={name}::{val}")
def main() -> None:
args = parse_args()
# Load the original test matrix set by the workflow. Its format, however,
# doesn't follow the strict JSON format, so we load it using yaml here for
# its more relaxed syntax
test_matrix = yaml.safe_load(args.test_matrix)
if test_matrix is None:
warnings.warn(f"Invalid test matrix input '{args.test_matrix}', exiting")
# We handle invalid test matrix gracefully by marking it as empty
set_output("is-test-matrix-empty", True)
sys.exit(0)
pr_number = args.pr_number
tag = args.tag
# If the tag matches, we can get the PR number from it, this is from ciflow
# workflow dispatcher
tag_regex = re.compile(r"^ciflow/\w+/(?P<pr_number>\d+)$")
labels = set()
if pr_number:
# If a PR number is set, query all the labels from that PR
labels = get_labels(int(pr_number))
# Then filter the test matrix and keep only the selected ones
filtered_test_matrix = filter(test_matrix, labels)
elif tag:
m = tag_regex.match(tag)
if m:
pr_number = m.group("pr_number")
# The PR number can also come from the tag in ciflow tag event
labels = get_labels(int(pr_number))
# Filter the test matrix and keep only the selected ones
filtered_test_matrix = filter(test_matrix, labels)
else:
# There is a tag but it isn't ciflow, so there is nothing left to do
filtered_test_matrix = test_matrix
else:
# No PR number, no tag, we can just return the test matrix as it is
filtered_test_matrix = test_matrix
if args.event_name == "schedule" and args.schedule == "29 8 * * *":
# we don't want to run the mem leak check or disabled tests on normal
# periodically scheduled jobs, only the ones at this time
filtered_test_matrix = set_periodic_modes(filtered_test_matrix, args.job_name)
if args.workflow and args.job_name and args.branch not in EXCLUDED_BRANCHES:
# If both workflow and job name are available, we will check if the current job
# is disabled and remove it and all its dependants from the test matrix
filtered_test_matrix = remove_disabled_jobs(
args.workflow, args.job_name, filtered_test_matrix
)
# Set the filtered test matrix as the output
set_output("test-matrix", json.dumps(filtered_test_matrix))
filtered_test_matrix_len = len(filtered_test_matrix.get("include", []))
# and also put a flag if the test matrix is empty, so subsequent jobs can
# quickly check it without the need to parse the JSON string
set_output("is-test-matrix-empty", filtered_test_matrix_len == 0)
set_output("keep-going", "keep-going" in labels)
if __name__ == "__main__":
main()
| [
"pytorchmergebot@users.noreply.github.com"
] | pytorchmergebot@users.noreply.github.com |
2736f96c3498266dc89e81e233c7c4990877fda0 | 3408dbca136a301cdaece4f15bc69e6c3cab3caf | /ism_pkg/tools/rff_layer.py | 53dc0dd8c9479e6b895e2bcf7f794328a1d417c8 | [
"MIT"
] | permissive | fusion-research/stochastic_ISM | 7c832e41aa76853349f266ebcf2d29aef77ade54 | 6438eb17bd391e8698e989156acb9b786c8f7299 | refs/heads/master | 2023-04-05T10:52:13.611747 | 2021-04-15T16:18:58 | 2021-04-15T16:18:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 755 | py |
from ism_pkg.tools.RFF import *
# A layer is defined by Wₐ, ℱ = RFF mapping, and maybe Wᵦ
# ℓᴵᴺ:layer input, ℓᴼᵁᵀ : layer output from training only
# A layer is a function f = Wᵦ ∘ H ∘ ℱ ∘ Wₐ
class rff_layer():
def __init__(self, ℓᴵᴺ, σ, RFF_width=400):
self.σ = σ
self.ℓᴵᴺ = ℓᴵᴺ
self.Wₐ = self.W = np.array([])
self.Wᵦ = np.array([])
# Code
self.get_RFF_mapping(RFF_width)
def get_RFF_mapping(self, RFF_width=400):
self.ℱ = RFF(sample_num=RFF_width)
self.ℱ.initialize_RFF(self.ℓᴵᴺ, self.σ)
self.Φᵪ = self.ℱ.np_feature_map(self.ℓᴵᴺ)
self.ℓᴼᵁᵀ = self.Φᵪ
def apply_layer(self, X):
Φᵪ = self.ℱ.np_feature_map(X)
return Φᵪ
| [
"juliusctw@gmail.com"
] | juliusctw@gmail.com |
b62cc4bb33d0bd00d4f6f417bd45a4df88c9bfad | be86e0055a3fc1a713ff6c530365cde78e81c2bf | /src/cardinal/db/newbie.py | 00d36673ff2cd09538a8b0e1f7d35833b741b80f | [
"MIT"
] | permissive | Maamue/cardinal.py | 228f3e422bb4884c87e53c6ef59373f772cf2b6d | 8e5a357c896ad0ff899e92a211380aab80c09b79 | refs/heads/master | 2020-08-31T23:46:57.841645 | 2019-04-30T15:40:13 | 2019-04-30T15:40:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,673 | py | from sqlalchemy import BigInteger, Column, DateTime, ForeignKey, Interval, UnicodeText
from sqlalchemy.orm import relationship
from .base import Base
class NewbieGuild(Base):
__tablename__ = 'newbie_guilds'
guild_id = Column(BigInteger, primary_key=True, autoincrement=False)
role_id = Column(BigInteger, nullable=False)
welcome_message = Column(UnicodeText, nullable=False)
response_message = Column(UnicodeText, nullable=False)
timeout = Column(Interval, nullable=True)
class NewbieUser(Base):
__tablename__ = 'newbie_users'
user_id = Column(BigInteger, primary_key=True, autoincrement=False)
guild_id = Column(BigInteger,
ForeignKey(NewbieGuild.guild_id),
primary_key=True,
autoincrement=False)
message_id = Column(BigInteger, unique=True, nullable=False)
joined_at = Column(DateTime, nullable=False)
class NewbieChannel(Base):
__tablename__ = 'newbie_channels'
channel_id = Column(BigInteger, primary_key=True, autoincrement=False)
guild_id = Column(BigInteger, ForeignKey(NewbieGuild.guild_id), nullable=False)
# TODO: Decide on lazy (True) or eager (False) loading
NewbieGuild.users = relationship(NewbieUser,
backref='guild',
innerjoin=True,
cascade='all, delete-orphan',
lazy=True)
NewbieGuild.channels = relationship(NewbieChannel,
backref='guild',
cascade='all, delete-orphan',
lazy=True)
| [
"simon.engmann@tu-dortmund.de"
] | simon.engmann@tu-dortmund.de |
5f1c1351f6599de5efa259519f67b0362771a49c | fc77f5083e4303f589b8e4b06a9cf898c8bffe3e | /users/forms.py | db49143b9091f51c957968a7cdd01bed1790a5e7 | [] | no_license | SaemChan/hotelreservation | fbe364153f2194669219881309c782df45fb6481 | c2df07fff5e0e6cf7d14e65147b774801c231e3f | refs/heads/master | 2023-01-02T11:53:31.098275 | 2020-10-26T17:34:55 | 2020-10-26T17:34:55 | 294,141,391 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,313 | py | from django import forms
from . import models
# from django.contrib.auth import password_validation
from . import models
class LoginForm(forms.Form):
email = forms.EmailField(widget=forms.EmailInput(attrs={"placeholder": "Email"}))
password = forms.CharField(
widget=forms.PasswordInput(attrs={"placeholder": "Password"})
)
def clean(self):
email = self.cleaned_data.get("email")
password = self.cleaned_data.get("password")
try:
user = models.User.objects.get(email=email)
if user.check_password(password):
return self.cleaned_data
else:
self.add_error("password", forms.ValidationError("Password is wrong"))
except models.User.DoesNotExist:
self.add_error("email", forms.ValidationError("User does not exist"))
class SignUpForm(forms.ModelForm):
class Meta:
model = models.User
fields = ("first_name", "last_name", "email")
widgets = {
"first_name": forms.TextInput(attrs={"placeholder": "First Name"}),
"last_name": forms.TextInput(attrs={"placeholder": "Last Name"}),
"email": forms.EmailInput(attrs={"placeholder": "Email Name"}),
}
password = forms.CharField(
widget=forms.PasswordInput(attrs={"placeholder": "Password"})
)
password1 = forms.CharField(
widget=forms.PasswordInput(attrs={"placeholder": "Confirm Password"})
)
def clean_email(self):
email = self.cleaned_data.get("email")
try:
models.User.objects.get(email=email)
raise forms.ValidationError(
"That email is already taken", code="existing_user"
)
except models.User.DoesNotExist:
return email
def clean_password1(self):
password = self.cleaned_data.get("password")
password1 = self.cleaned_data.get("password1")
if password != password1:
raise forms.ValidationError("Password confirmation does not match")
else:
return password
def save(self, *args, **kwargs):
user = super().save(commit=False)
email = self.cleaned_data.get("email")
password = self.cleaned_data.get("password")
user.username = email
user.set_password(password)
user.save()
"""class SignUpForm(UserCreationForm):
username = forms.EmailField(label="Email")
"""
"""class SignUpForm(forms.ModelForm):
class Meta:
model = models.User
fields = ("first_name", "last_name", "email")
password = forms.CharField(widget=forms.PasswordInput)
password1 = forms.CharField(widget=forms.PasswordInput, label="Confirm Password")
def clean_password1(self):
password = self.cleaned_data.get("password")
password1 = self.cleaned_data.get("password1")
if password != password1:
raise forms.ValidationError("Password confirmation does not match")
else:
return password
def save(self, *args, **kwargs):
user = super().save(commit=False)
email = self.cleaned_data.get("email")
password = self.cleaned_data.get("password")
user.username = email
user.set_password(password)
user.save()
""" | [
"scha_@naver.com"
] | scha_@naver.com |
be93dedcd378f0edd7cec8080db95e8c1e31360e | 994ed74e909a59c3aae6428e13c4817865fc5636 | /ar/factory.py | fcbdd587f5ee693b6f1d1ecb28b89e7e259c2017 | [] | no_license | Lance0404/ampos_restaurant | e6bb0d66ac79d93902b30d13631cdb6fda4e8ea1 | 8c8c2039d704235c686cdd797d3962ee6d5cac8e | refs/heads/master | 2020-05-16T12:25:16.243764 | 2019-04-26T12:01:41 | 2019-04-26T12:01:41 | 183,044,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 812 | py | from flask import Flask, current_app
from ar import config
# import os
from ar import db
import logging
import logging.config
from ar import mylogging
def create_app(config_obj=None):
app = Flask(__name__)
app.logger.info(f'flask app is up by Lance!')
app.config.from_object(config)
# if config_obj:
# app.config.from_object(config_obj)
with app.app_context():
db.init_app(app)
# app.logger.debug(f"app.config['CLEAN_TABLE'] {app.config['CLEAN_TABLE']}")
# if app.config['CLEAN_TABLE']:
# db.drop_all()
# app.logger.debug('drop all tables')
db.create_all()
db.session.commit()
from ar.api.v1.endpoints import bp as endpoints_bp
app.register_blueprint(endpoints_bp, url_prefix='/v1')
return app
| [
"virtuouslycan@gmail.com"
] | virtuouslycan@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.