blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d24626872c96822900062bfe7a7743c5830295ef
|
152824609ab06a8b31b55f32510773137312064a
|
/launch.py
|
c33b588131bb2b970050976f307722c6fc843824
|
[] |
no_license
|
jakobbsm/NowPlaying
|
e8f338ceb934a1cd1751bd8c78226531e0167d62
|
a894bf1a7723b4271e22f5387f348f1b376c8b36
|
refs/heads/master
| 2023-01-02T05:58:18.087660
| 2020-10-24T20:56:32
| 2020-10-24T20:56:32
| 296,960,535
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 53
|
py
|
from NowPlaying import nowplaying
nowplaying.main()
|
[
"jakobbsm@gmail.com"
] |
jakobbsm@gmail.com
|
b44a5d408683b927f661cf1a8b164618f992a8ba
|
6f93573111d9c2f26966b9f72f65a55be372b1e6
|
/orami/spiders/orami_spider.py
|
d4e410d35d97a1fd5868e92319f56fa8dcf25ad9
|
[] |
no_license
|
ajikusbandono/jawaban_no_2
|
dfcedec006021b35818159e2978513e75daa1468
|
21fbfa752896e575062cdc53a7e555c19b3d73d0
|
refs/heads/master
| 2022-11-30T07:36:50.284679
| 2020-08-09T03:47:01
| 2020-08-09T03:47:01
| 286,096,855
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,482
|
py
|
# scrapy shell "https://www.orami.co.id/ap/takoyakids"
# buat mencari pattern xpath atau css
# running -> scrapy crawl orami
# cheats https://devhints.io/xpath
"""
--- Export as CSV Feed ---
FEED_FORMAT = "csv"
FEED_URI = "result.csv"
--- save in settings.py ---
"""
import scrapy
class OramiSpider(scrapy.Spider):
name = "orami"
def start_requests(self):
urls = [
'https://www.orami.co.id/',
]
for url in urls:
yield scrapy.Request(url=url, callback=self.getlink)
def getlink(self, response):
links = response.xpath("//div[contains(@class, 'oss-u-1-8 pl-8 pr-8')]/div/a/@href").extract()
for link in links:
try:
yield scrapy.Request(url=link, callback=self.parse)
except:
_link = 'https://www.orami.co.id' + link
yield scrapy.Request(url=_link, callback=self.parse)
def parse(self, response):
_res = response.url
_price = response.xpath("//div[contains(@class, 'widget-price mb-8')]//p/text()").extract()
_name_product = response.xpath("//div[contains(@class, 'prod-name mb-8')]//a/text()").extract()
for _item in zip(_price,_name_product):
scraped_info = {
'price' : _item[0],
'name_product' : _item[1].lstrip().rstrip(),
'link' : _res
}
yield scraped_info
|
[
"ajikusbandonoext@gmail.com"
] |
ajikusbandonoext@gmail.com
|
90250a964c27036a6b7c2f95665974cfc1d8db55
|
5e4a415e8053050254749a0cd9b408536adccca1
|
/lab3/example4.py
|
ffb92bccbcb7f45d5aa3e522acd79769374f701f
|
[] |
no_license
|
x59272796/280201065
|
c22a7ed7bc15e4f63aa9065db21298e9242c0ae3
|
51cf72bf3058bdd3ea42806cafd1ab4ec8a9f9d4
|
refs/heads/master
| 2023-06-18T03:41:49.321791
| 2021-06-20T21:43:45
| 2021-06-20T21:43:45
| 305,110,207
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 225
|
py
|
age = int(input("How old are you?"))
ticket = 3
discount = 50
if age < 6 or age > 60 :
ticket = 0
elif 6 <= age <= 18 :
ticket = ticket * ((100 - discount)/100)
print("You have to pay " + str(ticket) + " Turkish Liras.")
|
[
"yigitdurmaz@std.iyte.edu.tr"
] |
yigitdurmaz@std.iyte.edu.tr
|
5305640658e29b3e4bd472bdcd804bc603de201f
|
b2fc598c68fa101eab6d37df2a23a38ceb9dbba6
|
/jingdong/jd_4_baogao.py
|
f98a7d156604d96b337c902409bb8351e2c078f0
|
[] |
no_license
|
chengchenf/project
|
4d501e6c8da6119f05f863862b45f49f49b7bca7
|
4fbbb4883b77a75e5bf84dce477cbd76d1283d6d
|
refs/heads/master
| 2020-04-14T03:15:35.386093
| 2019-07-12T07:21:27
| 2019-07-12T07:21:27
| 163,603,550
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 706
|
py
|
#coding=utf-8
import os
import sys
import time
import unittest
from HTMLTestRunner import HTMLTestRunner
reload(sys)
sys.setdefaultencoding('utf-8')
if __name__=='__main__':
baogaopath=r'C:\Users\chen\Desktop\linshi\\'
shijian=time.strftime('%Y_%m_%d %H_%M_%S')
baogaoming=baogaopath+shijian+u'report.html'
#添加测试套件
dangqianmulu=os.path.abspath('')
# print dangqianmulu
testsuit=unittest.defaultTestLoader.discover(dangqianmulu,pattern='jd*.py')
fp=open(baogaoming,'wb')
#输出报告
runner=HTMLTestRunner(stream=fp,title=u'京东自动化测试报告',description=u'win10系统')
runner.run(testsuit)
fp.close()
|
[
"1083651727@qq.com"
] |
1083651727@qq.com
|
9dd4f6e5dd4f2a03b20cba0d9ac237169b77d085
|
f7e1671cd4eca4f6eff8a18eb634a7c8b1538840
|
/visreader/python/visreader/misc/imagetool.py
|
83e6937692c1b68ae17e74d2445ac06c346ee07a
|
[] |
no_license
|
PaddlePaddle/VisionTools
|
6f40eeedf23fa4c6c62d7e9b398f4d385c1bfa8e
|
bd2c33cb6a4ccfd2b8fc32139d59bc834cdc6c9d
|
refs/heads/master
| 2023-05-24T18:51:03.096781
| 2019-07-18T12:04:15
| 2019-07-18T12:04:15
| 75,912,778
| 21
| 12
| null | 2023-05-22T21:37:27
| 2016-12-08T07:11:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,249
|
py
|
"""
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
import io
from PIL import Image
import numpy as np
def save_jpeg(data, fname):
assert type(data) == np.ndarray, \
'invalid type of "data" when save it as jpeg'
im = Image.fromarray(data)
im.save(fname)
def load_jpeg(fname, as_rgb=True, to_np=True):
with open(fname, 'rb') as f:
data = f.read()
stream = io.BytesIO(data)
img = Image.open(stream)
if as_rgb and img.mode != 'RGB':
img = img.convert('RGB')
if to_np:
img = np.array(img)
return img
if __name__ == "__main__":
fname = 'test_img.jpg'
img = load_jpeg(fname)
save_jpeg(img, 'new.jpg')
|
[
"wanglong03@baidu.com"
] |
wanglong03@baidu.com
|
fcc9c8d4821b393f117e3af98dd209ea66bb1488
|
459250c6efd1267917d5973e76c6b765b7642a0b
|
/duplicate_encoder.py
|
274701bfb24fe9e7016eae28d31983ccc16071d0
|
[] |
no_license
|
msafari89/codewars
|
e9be3be6017fb1b941ebfd98937bf0a698180e2e
|
9852474d941d420d0d443f7faf8eb1a696390b24
|
refs/heads/master
| 2020-06-13T20:35:38.890737
| 2019-07-16T03:54:57
| 2019-07-16T03:54:57
| 194,780,534
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 780
|
py
|
#The goal of this exercise is to convert a string to a new string where each character in the new string
# is "(" if that character appears only once in the original string, or ")" if that
#character appears more than once in the original string. Ignore capitalization when determining if a character is a duplicate.
def duplicate_encoder(word):
word = word.upper()
word_list = list(word)
result = word_list.copy()
for i in range(0 , len(word_list)):
print(word_list[i])
if word_list.count(word_list[i]) > 1 :
result[i] = ")"
else:
result[i] = "("
def duplicate_encode(word):
return "".join(["(" if word.lower().count(c) == 1 else ")" for c in word.lower()])
duplicate_encoder("Mimossa")
|
[
"m.j.safari89@outlook.com"
] |
m.j.safari89@outlook.com
|
6bf2f0710a8085b220d88af14bb1642e6753272d
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-gsn-edf.0/gsn-edf_ut=3.5_rd=0.5_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=72/params.py
|
8aaea0d268f078f8ca23bfedff6ab7009d3df1a7
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794
| 2021-04-25T03:27:16
| 2021-04-25T03:27:16
| 358,926,457
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
{'cpus': 4,
'duration': 30,
'final_util': '3.516929',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 72,
'utils': 'uni-medium-3'}
|
[
"ricardo.btxr@gmail.com"
] |
ricardo.btxr@gmail.com
|
3a3f6e39bb56c367fafcc9de13c312f241cc776c
|
b74be606bddc1485f980c63a4d1c0380387b2d68
|
/app/routes.py
|
d97bf96b3224f39d3cf89f8fdccf4b448251dbeb
|
[] |
no_license
|
mwilliams22/project_new
|
4c2ee3ef667bda36ace6443fcdb4e7828511d21b
|
74a5bc620ee0fa751908573e56cd38797b0b872a
|
refs/heads/master
| 2020-06-27T21:50:41.844317
| 2019-08-08T17:16:05
| 2019-08-08T17:16:05
| 200,059,203
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,464
|
py
|
import os
from app import app
from flask import render_template, request, redirect, session, url_for
from flask_pymongo import PyMongo
app.secret_key = b'\x1b\xe3)\x89Hz\x83X\xb6\xe2\xdb\x17\xa1J\x94J'
# name of database
app.config['MONGO_DBNAME'] = 'database'
# URI of database
app.config['MONGO_URI'] = 'mongodb+srv://admin:Purple123@cluster0-3obbt.mongodb.net/database?retryWrites=true&w=majority'
mongo = PyMongo(app)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/signup', methods=['GET','POST'])
def signup():
if request.method == 'POST':
#take in info, check if username is taken, if it is available, put in database of users
users = mongo.db.project_users
existing_user = users.find_one({"username":request.form['username']})
if existing_user is None:
users.insert({"username":request.form['username'], "password":request.form['password']})
return redirect(url_for('market'))
else:
message = "That username is taken. Try logging in or try a different username."
return render_template('signup.html', message = message)
else:
return render_template('signup.html', message = "")
@app.route('/market', methods = ["POST", "GET"])
def market():
if request.method == "GET":
return render_template('market.html')
else:
message = "Your supermarket has been saved"
return render_template('profile1.html', message = message)
@app.route('/login', methods= ["POST", "GET"])
def login():
if request.method == 'POST':
users = mongo.db.project_users
#use the username to find the account
existing_user = users.find_one({"username":request.form["username"]})
if existing_user:
#check if the password is right
if existing_user['password'] == request.form["password"] :
session['username'] = request.form['username']
return redirect(url_for('profile'))
else:
message = "Your password doesn't match your username.Try again."
return render_template('login.html', message = message)
else:
message = "There is no user with that username. Try making an account."
return render_template('signup.html', message = message)
else:
return render_template('login.html', message = "")
# lOG OUT
@app.route('/logout')
def logout():
session.clear()
return redirect('/')
@app.route('/profile', methods = ["POST", "GET"])
def profile():
collection = mongo.db.meals
meals = collection.find({})
user_collection = mongo.db.user_items
user_items = user_collection.find({})
return render_template('profile.html', message= "", meals = meals, user_items = user_items)
@app.route('/add')
def add():
# connect to the database
meals = mongo.db.meals
# insert new data
# events.insert({"event":"First Day of Classes", "date":"2019-08-21"})
# events.insert({"event":"Winter Break", "date":"2019-12-20"})
# events.insert({"event":"Finals Begin", "date":"2019-12-01"})
# events.insert({"event": "Madison's Birthday", "date":"2004-07-07"})
# return a message to the user
return "Event added"
@app.route('/meals/new', methods= ["GET", "POST"])
def meals_new():
userdata = dict(request.form)
meals = mongo.db.meals
meals.insert(userdata)
return redirect('/profile')
@app.route('/shopping', methods= ["GET", "POST"])
def shopping():
collection = mongo.db.meals
meals = collection.find({})
user_collection = mongo.db.user_items
user_items = user_collection.find({})
items = mongo.db.items
message1 = ""
message2 = ""
# items.insert({"item":"potatoes"})
# items.insert({"item":"rice"})
# items.insert({"item":"milk"})
# items.insert({"item":"popcorn"})
userdata = dict(request.form)
existing_item = items.find_one({"item":request.form["item"]})
user_collection.insert(userdata)
if existing_item is None:
message1 = "No, we are very sorry."
return render_template('profile.html', meals = meals, user_items = user_items, message1 = message1)
elif existing_item is True :
message2 = "Yes!"
return render_template('profile.html', meals = meals, user_items = user_items, message2 = message2)
|
[
"ec2-user@ip-172-31-36-7.us-east-2.compute.internal"
] |
ec2-user@ip-172-31-36-7.us-east-2.compute.internal
|
493c6a2097528ad256457b7c4dc0acb1e0a15a47
|
7363e1465f1f37c8d0a29196aeb26552d7aa7074
|
/landtemperature_mp.py
|
0348702d152a218d8af1c1e9db44ab28e0773e4e
|
[] |
no_license
|
soarwing52/Small-Geotools
|
d3e4289e8e22b0e01a7eca9f436a0361f3351679
|
6d2063299c84bfaf3ec78f3e1ca3c2954e52f46b
|
refs/heads/master
| 2021-07-14T22:53:54.344708
| 2021-07-01T01:36:52
| 2021-07-01T01:36:52
| 176,767,632
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,930
|
py
|
# -*- coding: UTF-8 -*-
import os
from multiprocessing import Pool
import arcpy
from arcpy.sa import Divide, Float, Raster, Ln
arcpy.env.overwriteOutput = True
arcpy.env.workspace = r'D:\Small-Geotools'
RESULT_FOLDER = 'D:\\surface'
TARGET_FOLDER = r'F:\\landsat 2020'
RADIANCE_MULT_BAND = 0.0003342
RADIANCE_ADD = 0.1
Oi = 0.29
K1_CONSTANT_BAND_10 = 774.8853
K2_CONSTANT_BAND_10 = 1321.0789
def mp_land_temperature(file):
path_thermal = file + "\\" + file[-40:] + "_B10.tif"
location = file[-30:-24]
date = file[-24:-15]
name = date + "_" + location
print(name)
arcpy.env.scratchWorkspace = os.path.join(arcpy.env.workspace, name) # r'C:\Users\yourname\PSU_LiDAR\f'+raster.replace(".img","")
if not os.path.exists(arcpy.env.scratchWorkspace):
os.makedirs(arcpy.env.scratchWorkspace)
path_thermal = file + "\\" + file[-40:] + "_B10.tif"
thermal_band = Raster(path_thermal)
print thermal_band
print "band condirmed"
Rfloat = Float(thermal_band)
top_temperature = Rfloat * RADIANCE_MULT_BAND + RADIANCE_ADD - Oi
temp_tif = "temp{}.tif".format(name)
top_temperature.save(temp_tif)
top_temperature = Raster(temp_tif)
top_temperature = Float(top_temperature)
divide1 = Divide(K1_CONSTANT_BAND_10 , (top_temperature + 1))
ln1 = Ln(divide1)
surface_temp = Divide(K2_CONSTANT_BAND_10, ln1) - 273.15
location = file[-30:-24]
date = file[-24:-15]
name = date + "_" + location
print name
path = "surface_temp" + name + ".tif"
print path
surface_temp.save(path)
if __name__ == '__main__':
print("GO")
target_list = []
for x in os.listdir(TARGET_FOLDER):
file_path = os.path.join(TARGET_FOLDER, x)
if os.path.isdir(file_path):
print(file_path)
target_list.append(file_path)
pool = Pool()
pool.map(mp_land_temperature,target_list)
|
[
"soarwing52hot@gmail.com"
] |
soarwing52hot@gmail.com
|
493c335996f7bb69ec7faead873dc4bbdbc63635
|
16102cc2b9de85d84e4a5d6032e101d6f65e3a5a
|
/evaluation_metrics/custom_data_precision_and_recall.py
|
2f5a775dadec2632d0342252d7792a12bce6e8e6
|
[] |
no_license
|
anandkarra/Evaluation_Metrics_Mini_Project
|
407fa4fda4ce76814dc27cdfc8b0e77754fd2725
|
f4778f443d60faeddd572b75eb12b681fd4dd753
|
refs/heads/master
| 2021-04-06T19:24:45.501132
| 2018-03-14T18:52:17
| 2018-03-14T18:52:17
| 125,259,143
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,328
|
py
|
#!/usr/bin/python
"""
Starter code for the validation mini-project.
The first step toward building your POI identifier!
Start by loading/formatting the data
After that, it's not our code anymore--it's yours!
"""
import pickle
import sys
sys.path.append("../tools/")
from feature_format import featureFormat, targetFeatureSplit
data_dict = pickle.load(open("../final_project/final_project_dataset.pkl", "r") )
### first element is our labels, any added elements are predictor
### features. Keep this the same for the mini-project, but you'll
### have a different feature list when you do the final project.
features_list = ["poi", "salary"]
data = featureFormat(data_dict, features_list)
labels, features = targetFeatureSplit(data)
### it's all yours from here forward!
from sklearn import cross_validation
features_train,features_test,labels_train,labels_test = cross_validation.train_test_split(features,labels,test_size=0.3,random_state=42)
from sklearn import tree
clf = tree.DecisionTreeClassifier()
clf.fit(features_train,labels_train)
predictions = [0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1]
true_labels = [0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0]
from sklearn.metrics import classification_report
print classification_report(true_labels,predictions)
|
[
"i7andgravity@live.com"
] |
i7andgravity@live.com
|
0c5093427ec1f71a7d4afabd93539d77a77c2658
|
7b27648f96a36a69458cf364262e6973a8dbe3fa
|
/ejercicio.py
|
e3a2a961874ba6ae5f99146bc53febcb9959bcd9
|
[] |
no_license
|
Daniel-code666/Programas
|
daa6943ccca8600f98cb2409f193cd7c06e0b43b
|
58bfcd9678e2e5b31db4edb350d9cb344a0fb937
|
refs/heads/master
| 2023-05-30T17:19:48.740526
| 2021-06-23T17:56:57
| 2021-06-23T17:56:57
| 376,101,958
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 636
|
py
|
numero_magico = []
num_mag = input("Escriba 12345679: ")
if num_mag == '12345679':
print(num_mag)
else:
print(num_mag.replace('8',''))
num_mag2 = num_mag.replace('8','')
print(num_mag2)
# for i in range(1,10):
# elem = int(input("Ingrese los números: "))
# if elem == 8:
# continue
# else:
# numero_magico.append(elem)
# numero_usuario = int(input("Ingrese un número de 1 a 9: "))
# if numero_usuario < 0 and numero_usuario > 10:
# print("Ingrese un número con respecto al rango")
# numero_usuario = int(input("Ingrese un número de 1 a 9: "))
# else:
#print(numero_magico)
|
[
"dmartinezcifuentes180@gmail.com"
] |
dmartinezcifuentes180@gmail.com
|
67843ba639692fa421a192c51d03941971c3fc10
|
5c848a88f435dc2906228553522f403b6a1d5f15
|
/edd/utilities.py
|
2d8e2fd6e27ba3e3421458d346d521cb43500fc6
|
[
"BSD-3-Clause-LBNL",
"BSD-3-Clause"
] |
permissive
|
TeselaGen/jbei-edd
|
2ed6ad139483a5cb006d5f931b9c50c4851d976e
|
fb71e29219a7024042a8f4c42a5c88f0803b5895
|
refs/heads/master
| 2021-04-12T03:36:05.977542
| 2018-04-18T17:09:35
| 2018-04-18T17:09:35
| 125,923,562
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,799
|
py
|
# coding: utf-8
"""
General utility code for EDD, not tied to Django or Celery.
"""
import json
from datetime import datetime
from dateutil import parser
from decimal import Decimal
from django.conf import settings
from kombu.serialization import register
from uuid import UUID
DATETIME = '__datetime__'
TYPE = '__type__'
VALUE = 'value'
class JSONEncoder(json.JSONEncoder):
"""
Enhancement of base JSONEncoder, also handling these objects:
* datetime.datetime
* decimal.Decimal
* uuid.UUID
"""
def default(self, o):
if isinstance(o, Decimal):
return float(o)
elif isinstance(o, UUID):
return str(o)
elif isinstance(o, datetime):
return {
TYPE: DATETIME,
VALUE: o.isoformat(),
}
return super(JSONEncoder, self).default(o)
@staticmethod
def dumps(obj):
return json.dumps(obj, cls=JSONEncoder)
class JSONDecoder(json.JSONDecoder):
"""
Complement of JSONEncoder, translates encoded datetime objects back to real datetime.
"""
def __init__(self, *args, **kwargs):
super(JSONDecoder, self).__init__(object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, o):
if TYPE not in o:
return o
klass = o[TYPE]
if klass == 'datetime':
return parser.parse(o[VALUE])
return o
@staticmethod
def loads(text):
return json.loads(text, cls=JSONDecoder)
# register serializers for JSON that handle UUIDs and datetime objects
register(
name=getattr(settings, 'EDD_SERIALIZE_NAME', 'edd-json'),
encoder=JSONEncoder.dumps,
decoder=JSONDecoder.loads,
content_type='application/x-edd-json',
content_encoding='UTF-8',
)
|
[
"wcmorrell@lbl.gov"
] |
wcmorrell@lbl.gov
|
aa947531ab2ef5c8948fd5e805175af3b7a5ebf6
|
547242530dc1bd3a4f4556eb6870a3f7a40efae2
|
/Spy-Games/code.py
|
2f202ef4c179cece2732a4220bd9a491c2ba6600
|
[
"MIT"
] |
permissive
|
OmkarAsukar/dsmp-pre-work
|
7ecec1d724549e1bf40e6e38bae1f394391e0e9e
|
b99f541758f0806d59d89bd5c5d63f926dc52892
|
refs/heads/master
| 2020-06-25T13:21:21.938425
| 2019-08-17T05:39:21
| 2019-08-17T05:39:21
| 199,320,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,354
|
py
|
# --------------
##File path for the file
file_path
def read_file(path):
file = open(path,'r')
sentence = file.readline()
file.close()
return (sentence)
sample_message = read_file(file_path)
print(sample_message)
#Code starts here
# --------------
#Code starts here
message_1 = read_file(file_path_1)
message_2 = read_file(file_path_2)
print(message_1)
print(message_2)
def fuse_msg(message_a , message_b):
quotient = int(message_b)//int(message_a)
return(str(quotient))
secret_msg_1 = fuse_msg(message_1 , message_2)
print(secret_msg_1)
# --------------
#Code starts here
message_3 = read_file(file_path_3)
print(message_3)
def substitute_msg(message_c):
if(message_c == 'Red'):
sub = 'Army General'
if(message_c == 'Green'):
sub = 'Data Scientist'
if(message_c == 'Blue'):
sub = 'Marine Biologist'
return(sub)
secret_msg_2 = substitute_msg(message_3)
print(secret_msg_2)
# --------------
# File path for message 4 and message 5
file_path_4
file_path_5
#Code starts here
message_4 = read_file(file_path_4)
message_5 = read_file(file_path_5)
print(message_4)
print(message_5)
def compare_msg(message_d , message_e):
alist = message_d.split()
blist = message_e.split()
c_list = [ i for i in alist if i not in blist]
final_msg = " ".join(c_list)
return(final_msg)
secret_msg_3 = compare_msg(message_4 , message_5)
print(secret_msg_3)
# --------------
#Code starts here
message_6 = read_file(file_path_6)
print(message_6)
def extract_msg(message_f):
a_list = message_f.split()
print(a_list)
even_word = lambda x : (len(x)) % 2 == 0
b_list = list(filter(even_word,a_list))
final_msg = ' '.join(b_list)
return(final_msg)
secret_msg_4 = extract_msg(message_6)
print(secret_msg_4)
# --------------
#Secret message parts in the correct order
message_parts=[secret_msg_3, secret_msg_1, secret_msg_4, secret_msg_2]
final_path= user_data_dir + '/secret_message.txt'
#Code starts here
secret_msg = ' '.join(message_parts)
def write_file(secret_msg,path):
file= open(path ,mode= 'a+')
file.write(secret_msg)
file.close()
return()
write_file(secret_msg,final_path)
print(secret_msg)
|
[
"OmkarAsukar@users.noreply.github.com"
] |
OmkarAsukar@users.noreply.github.com
|
a1bafd62c641cba22f0bbeabdfb86c76ef03bd5f
|
1c1961f8ed56c6852fd2a43a5c47283eb930767c
|
/Python/Sandhya/task2/store_in_sqlite.py
|
f96a58332ef948952c242a6e57ec1f28912a1173
|
[] |
no_license
|
leapfrogtechnology/lf-training
|
4a2c22d06519c4b49638e5f7519268c9e8f2347a
|
6d18c6d152a1dddfa6e5711c91321a4d7a745517
|
refs/heads/master
| 2023-01-28T16:00:52.805472
| 2020-09-14T11:40:02
| 2020-09-14T11:40:02
| 283,126,079
| 1
| 15
| null | 2020-11-30T07:55:02
| 2020-07-28T06:48:44
|
Python
|
UTF-8
|
Python
| false
| false
| 598
|
py
|
import sqlite3
import pandas
from pandas import DataFrame
con = sqlite3.connect('ScrapeData.db')
# Saves database in same location as this file
c = con.cursor()
c.execute('''CREATE TABLE MOVIE_RATINGS ([generated_id] INTEGER PRIMARY KEY, [TITLE] text, [RATING] float)''')
# Read data from the csv file
read_data = pandas.read_csv(r'output/topmovies.csv')
# Insert movie titles from csv file to the table
read_data.to_sql('MOVIE_RATINGS', con, if_exists = 'append', index = False)
for movies in con.execute("SELECT TITLE, RATING FROM MOVIE_RATINGS"):
print(movies)
con.commit()
con.close()
|
[
"intrinsic@pop-os.localdomain"
] |
intrinsic@pop-os.localdomain
|
eb6553627c71a02e4c29b6cc978eef31a510583f
|
d2ed747705b20cee8113f71b7f0807a18323bbcb
|
/events/signals.py
|
5cc3c81654335d935180aa6dbde964bb0ee34ed9
|
[] |
no_license
|
Lils2013/technotrack-web2-spring-2017
|
240e03c90e890cdb5eebad4bcd7273d23e5a72e6
|
e1e7f75835206e68c5e9bdeca08620b420916980
|
refs/heads/master
| 2021-08-30T12:21:23.786930
| 2017-12-17T23:02:57
| 2017-12-17T23:02:57
| 103,847,772
| 0
| 0
| null | 2017-09-17T17:25:13
| 2017-09-17T17:25:13
| null |
UTF-8
|
Python
| false
| false
| 655
|
py
|
from django.contrib.contenttypes.models import ContentType
from django.db.models.signals import post_save
from core.models import ModelWithAuthor
from events.models import WatchableModel, Event
def watchable_postsave(instance, created=False, *args, **kwargs):
if (not isinstance(instance, WatchableModel)) or not (isinstance(instance, ModelWithAuthor)) or not created:
return
event = Event()
event.title = instance.get_title_for_event(instance)
event.author = instance.author
event.object = instance
event.content_type = ContentType.objects.get_for_model(instance)
event.save()
post_save.connect(watchable_postsave)
|
[
"Lils2010@gmail.com"
] |
Lils2010@gmail.com
|
172c6a142cfa8dce849d4836ff7a2c27faafb6e2
|
fe5c543b3070000fba4820e9b7a26c7c46529ee8
|
/mapclientplugins/scaffoldparameterfitterstep/model/scaffoldmodel.py
|
04daaf490be4a81a920ecde9a1bbb2ada9638051
|
[
"Apache-2.0"
] |
permissive
|
haribalankumar/mapclientplugins.scaffoldparameterfitterstep
|
a5097abb8979d3caf03a12db1dc8fbe0c37dbc59
|
130ba280cb4d4b91ed48eb80868599c8eae28f73
|
refs/heads/master
| 2022-01-31T17:34:13.314870
| 2019-07-23T20:12:31
| 2019-07-23T20:12:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,457
|
py
|
from opencmiss.zinc.field import Field
from opencmiss.zinc.graphics import Graphics
from opencmiss.zinc.glyph import Glyph
from opencmiss.zinc.material import Material
from opencmiss.zinc.node import Node
from opencmiss.zinc.streamregion import StreaminformationRegion
from opencmiss.utils.zinc import create_finite_element_field
from scaffoldmaker.scaffolds import Scaffolds
from scaffoldmaker.scaffoldpackage import ScaffoldPackage
from ..utils import maths
class ScaffoldModel(object):
def __init__(self, context, region, generator_model, parameters, material_module, scaffold_package,
scaffold_package_class):
self._context = context
self._region = region
# self._region = self._context.createRegion()
# self._region.setName('custom_scaffold')
# self._region.readFile('D:\\sparc\\tmp\\pig_scaffold.exf')
self._generator_model = generator_model
self._material_module = material_module
self._parameters = parameters.keys()
self._coordinate_field = None
_scaffold_package = scaffold_package
_scaffold_package_class = scaffold_package_class
scaffolds = Scaffolds()
self._all_scaffold_types = scaffolds.getScaffoldTypes()
scaffold_type = None
for scaffold in self._all_scaffold_types:
if scaffold == _scaffold_package[-1].getScaffoldType():
scaffold_type = scaffold
if scaffold_type is None:
raise TypeError('Scaffold Type was not found.')
scaffold_package = ScaffoldPackage(scaffold_type)
self._parameterSetName = scaffold_type.getParameterSetNames()[0]
self._scaffold_package = scaffold_package
self._scaffold = None
self._scaffold_options = None
self._temp_region = None
self._annotation_groups = None
self._scene = None
self._scaffold_is_time_aware = None
self._scaffold_fit_parameters = None
self._initialise_surface_material()
# self._timekeeper = self._scene.getTimekeepermodule().getDefaultTimekeeper()
# self._current_time = None
# self._maximum_time = None
# self._time_sequence = None
def get_region(self):
return self._region
# def set_time(self, time):
# self._current_time = time
# self._timekeeper.setTime(time)
def _create_surface_graphics(self):
self._scene.beginChange()
surface = self._scene.createGraphicsSurfaces()
surface.setCoordinateField(self._coordinate_field)
surface.setRenderPolygonMode(Graphics.RENDER_POLYGON_MODE_SHADED)
surface_material = self._material_module.findMaterialByName('trans_blue')
surface.setMaterial(surface_material)
surface.setName('display_surfaces')
self._scene.endChange()
return surface
def _create_node_graphics(self):
self._scene.beginChange()
self._node_derivative_labels = ['D1', 'D2', 'D3', 'D12', 'D13', 'D23', 'D123']
fm = self._region.getFieldmodule()
fm.beginChange()
cmiss_number = fm.findFieldByName('cmiss_number')
node_points = self._scene.createGraphicsPoints()
node_points.setFieldDomainType(Field.DOMAIN_TYPE_NODES)
node_points.setCoordinateField(self._coordinate_field)
point_attr = node_points.getGraphicspointattributes()
point_attr.setBaseSize([500, 500, 500])
point_attr.setGlyphShapeType(Glyph.SHAPE_TYPE_SPHERE)
node_points.setMaterial(self._material_module.findMaterialByName('white'))
node_points.setName('display_node_points')
node_numbers = self._scene.createGraphicsPoints()
node_numbers.setFieldDomainType(Field.DOMAIN_TYPE_NODES)
node_numbers.setCoordinateField(self._coordinate_field)
point_attr = node_numbers.getGraphicspointattributes()
point_attr.setLabelField(cmiss_number)
point_attr.setGlyphShapeType(Glyph.SHAPE_TYPE_NONE)
node_numbers.setMaterial(self._material_module.findMaterialByName('green'))
node_numbers.setName('display_node_numbers')
node_derivative_fields = [
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D_DS1, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D_DS2, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D_DS3, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D2_DS1DS2, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D2_DS1DS3, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D2_DS2DS3, 1),
fm.createFieldNodeValue(self._coordinate_field, Node.VALUE_LABEL_D3_DS1DS2DS3, 1)
]
node_derivative_material_names = ['gold', 'silver', 'green', 'cyan', 'magenta', 'yellow', 'blue']
derivative_scales = [1.0, 1.0, 1.0, 0.5, 0.5, 0.5, 0.25]
for i in range(len(self._node_derivative_labels)):
node_derivative_label = self._node_derivative_labels[i]
node_derivatives = self._scene.createGraphicsPoints()
node_derivatives.setFieldDomainType(Field.DOMAIN_TYPE_NODES)
node_derivatives.setCoordinateField(self._coordinate_field)
point_attr = node_derivatives.getGraphicspointattributes()
point_attr.setGlyphShapeType(Glyph.SHAPE_TYPE_ARROW_SOLID)
point_attr.setOrientationScaleField(node_derivative_fields[i])
point_attr.setBaseSize([0.0, 50, 50])
point_attr.setScaleFactors([derivative_scales[i], 0.0, 0.0])
material = self._material_module.findMaterialByName(node_derivative_material_names[i])
node_derivatives.setMaterial(material)
node_derivatives.setSelectedMaterial(material)
node_derivatives.setName('display_node_derivatives' + node_derivative_label)
fm.endChange()
self._scene.endChange()
return
def _create_line_graphics(self):
self._scene.beginChange()
lines = self._scene.createGraphicsLines()
fieldmodule = self._context.getMaterialmodule()
lines.setCoordinateField(self._coordinate_field)
lines.setName('display_lines')
black = fieldmodule.findMaterialByName('white')
lines.setMaterial(black)
self._scene.endChange()
return lines
def create_scaffold_graphics(self):
# self._create_node_graphics()
self._create_line_graphics()
self._create_surface_graphics()
def _get_mesh(self):
parent_region = self._region
fm = parent_region.getFieldmodule()
for dimension in range(3, 0, -1):
mesh = fm.findMeshByDimension(dimension)
if mesh.getSize() > 0:
return mesh
raise ValueError('Model contains no mesh')
def get_model_coordinate_field(self):
mesh = self._get_mesh()
element = mesh.createElementiterator().next()
if not element.isValid():
raise ValueError('Model contains no elements')
fm = self._region.getFieldmodule()
cache = fm.createFieldcache()
cache.setElement(element)
field_iter = fm.createFielditerator()
field = field_iter.next()
while field.isValid():
if field.isTypeCoordinate() and (field.getNumberOfComponents() <= 3):
if field.isDefinedAtLocation(cache):
return field
field = field_iter.next()
raise ValueError('Could not determine model coordinate field')
def _get_node_coordinates_range(self, time=0):
fm = self._coordinate_field.getFieldmodule()
fm.beginChange()
nodes = fm.findNodesetByFieldDomainType(Field.DOMAIN_TYPE_NODES)
min_coordinates = fm.createFieldNodesetMinimum(self._coordinate_field, nodes)
max_coordinates = fm.createFieldNodesetMaximum(self._coordinate_field, nodes)
components_count = self._coordinate_field.getNumberOfComponents()
cache = fm.createFieldcache()
cache.setTime(time)
result, min_x = min_coordinates.evaluateReal(cache, components_count)
result, max_x = max_coordinates.evaluateReal(cache, components_count)
fm.endChange()
return min_x, max_x
def get_range(self, time=0):
return self._get_node_coordinates_range(time=time)
def get_scale(self):
minimums, maximums = self._get_node_coordinates_range()
return maths.sub(minimums, maximums)
def get_coordinate_field(self):
return self._coordinate_field
def get_scaffold_options(self):
return self._scaffold_options
def initialise_scaffold(self):
# self._coordinate_field = self.get_model_coordinate_field()
self._coordinate_field = self._region.getFieldmodule().findFieldByName('coordinates')
print('Coordinate = ', self._coordinate_field.isValid())
def _update(self):
self._scene.beginChange()
for name in ['display_lines', 'display_surfaces']:
graphics = self._scene.findGraphicsByName(name)
graphics.setCoordinateField(self._coordinate_field)
self._scene.endChange()
def get_scaffold_package(self):
return self._scaffold_package
def _get_scaffold_package_settings(self):
return self._scaffold_package.getScaffoldSettings()
def _get_scaffold_package_type(self):
return self._scaffold_package.getScaffoldType()
def get_edit_scaffold_settings(self):
return self._scaffold_package.getScaffoldSettings()
def get_edit_scaffold_option(self, key):
# print(self.get_edit_scaffold_settings()[key])
return self.get_edit_scaffold_settings()[key]
def generate_mesh_for_fitting(self):
scaffold_package = self._scaffold_package
# if self._region:
# self._region.removeChild(self._region)
# self._region = self._region.createChild('fitting_region')
scaffold_package.getScaffoldType().generateMesh(self._region, self.get_edit_scaffold_settings())
self._update()
def _initialise_surface_material(self):
self._material_module = self._context.getMaterialmodule()
self._material_module.beginChange()
solid_blue = self._material_module.createMaterial()
solid_blue.setName('solid_blue')
solid_blue.setManaged(True)
solid_blue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.0, 0.2, 0.6])
solid_blue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [0.0, 0.7, 1.0])
solid_blue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
solid_blue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.1, 0.1, 0.1])
solid_blue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.2)
trans_blue = self._material_module.createMaterial()
trans_blue.setName('trans_blue')
trans_blue.setManaged(True)
trans_blue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.0, 0.2, 0.6])
trans_blue.setAttributeReal3(Material.ATTRIBUTE_DIFFUSE, [0.0, 0.7, 1.0])
trans_blue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
trans_blue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.1, 0.1, 0.1])
trans_blue.setAttributeReal(Material.ATTRIBUTE_ALPHA, 0.3)
trans_blue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.2)
glyph_module = self._context.getGlyphmodule()
glyph_module.defineStandardGlyphs()
self._material_module.defineStandardMaterials()
solid_tissue = self._material_module.createMaterial()
solid_tissue.setName('heart_tissue')
solid_tissue.setManaged(True)
solid_tissue.setAttributeReal3(Material.ATTRIBUTE_AMBIENT, [0.913, 0.541, 0.33])
solid_tissue.setAttributeReal3(Material.ATTRIBUTE_EMISSION, [0.0, 0.0, 0.0])
solid_tissue.setAttributeReal3(Material.ATTRIBUTE_SPECULAR, [0.2, 0.2, 0.3])
solid_tissue.setAttributeReal(Material.ATTRIBUTE_ALPHA, 1.0)
solid_tissue.setAttributeReal(Material.ATTRIBUTE_SHININESS, 0.6)
self._material_module.endChange()
def set_coordinate_field(self, field):
if self._coordinate_field is not None:
self._coordinate_field = None
self._coordinate_field = field
def set_scaffold_graphics_post_rotate(self, field):
self._scene.beginChange()
for name in ['display_lines', 'display_surfaces']:
graphics = self._scene.findGraphicsByName(name)
graphics.setCoordinateField(field)
self._scene.endChange()
self.set_coordinate_field(field)
def transfer_temp_into_main(self, time):
node_descriptions = _extract_node_descriptions(self._temp_region)
if not self._scaffold_is_time_aware:
self._undefine_scaffold_nodes()
self._scaffold_is_time_aware = True
_read_node_descriptions(self._region, node_descriptions, time)
def generate_temp_mesh(self, fit_options_array=None):
fit_options = {}
if fit_options_array is not None:
for index in range(len(self._parameters)):
fit_options[self._parameters[index]] = fit_options_array[index]
temp_options = self.get_scaffold_options().copy()
temp_options.update(fit_options)
self._temp_region = self._region.createRegion()
self._scaffold.generateMesh(self._temp_region, temp_options)
def set_scaffold_options(self, options):
self._scaffold_options = options
parameters = []
for option in self._parameters:
parameters.append(self._scaffold_options[option])
self._scaffold_fit_parameters = parameters
def initialise_scene(self):
self._scene = self._region.getScene()
def set_scaffold(self, scaffold):
self._scaffold = scaffold
def _undefine_scaffold_nodes(self):
field_module = self._region.getFieldmodule()
field_module.beginChange()
node_set = field_module.findNodesetByName('nodes')
node_template = node_set.createNodetemplate()
node_template.undefineField(self._coordinate_field)
node_iterator = node_set.createNodeiterator()
node = node_iterator.next()
while node.isValid():
node.merge(node_template)
node = node_iterator.next()
field_module.endChange()
def write_model(self, filename):
self._region.writeFile(filename)
def _extract_node_descriptions(region):
stream_information = region.createStreaminformationRegion()
memory_resource = stream_information.createStreamresourceMemory()
stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_NODES)
region.write(stream_information)
_, buffer_contents = memory_resource.getBuffer()
return buffer_contents
def _read_node_descriptions(region, buffer, time):
stream_information = region.createStreaminformationRegion()
memory_resource = stream_information.createStreamresourceMemoryBuffer(buffer)
stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_NODES)
stream_information.setResourceAttributeReal(memory_resource, StreaminformationRegion.ATTRIBUTE_TIME, time)
region.read(stream_information)
def _read_aligner_description(scaffold_region, scaffold_description):
scaffold_stream_information = scaffold_region.createStreaminformationRegion()
memory_resource = scaffold_stream_information.createStreamresourceMemoryBuffer(scaffold_description['elements3D'])
scaffold_stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_MESH3D)
memory_resource = scaffold_stream_information.createStreamresourceMemoryBuffer(scaffold_description['elements2D'])
scaffold_stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_MESH2D)
memory_resource = scaffold_stream_information.createStreamresourceMemoryBuffer(scaffold_description['elements1D'])
scaffold_stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_MESH1D)
memory_resource = scaffold_stream_information.createStreamresourceMemoryBuffer(scaffold_description['nodes'])
scaffold_stream_information.setResourceDomainTypes(memory_resource, Field.DOMAIN_TYPE_NODES)
return scaffold_stream_information
|
[
"m.osanlouy@auckland.ac.nz"
] |
m.osanlouy@auckland.ac.nz
|
c098a950df38c2582d381fce2375fb8666b2ac93
|
51927d34dde2c3df30e7e0e77ab67ae3a5323a6e
|
/bifrost_dashboard/bifrost_dashboard/reporter.py
|
76983c2658ac2314f717811d3b40b085d813f476
|
[
"MIT"
] |
permissive
|
Jinjie-Duan/bifrost
|
88dc96ade4d2159362d664b424796abf2077e57c
|
e17f35235ecb07f0031557a6e8698c7279525f6d
|
refs/heads/master
| 2022-11-28T16:07:47.218696
| 2020-07-29T12:37:10
| 2020-07-29T12:37:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 25,610
|
py
|
# -*- coding: utf-8 -*-
import os
import sys
import re
import urllib.parse as urlparse
import datetime
from io import StringIO
from flask_caching import Cache
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_auth
import pandas as pd
import numpy as np
from bson import json_util
import plotly.graph_objs as go
import dash_bootstrap_components as dbc
from plotly import tools
from dash.dependencies import Input, Output, State
from flask import request # To get client IP for pass/fail stamp
import bifrostapi
import bifrost_dashboard.components.import_data as import_data
from bifrost_dashboard.components.table import html_table, html_td_percentage
from bifrost_dashboard.components.filter import html_div_filter, generate_table, filter_update_run_options, filter_update_filter_values, html_filter_drawer, html_collection_selector, update_collection_button
from bifrost_dashboard.components.sample_report import SAMPLE_PAGESIZE, sample_report, children_sample_list_report, samples_next_page
from bifrost_dashboard.components.images import list_of_images, static_image_route, image_directory
import bifrost_dashboard.components.global_vars as global_vars
import bifrost_dashboard.components.admin as admin
from bifrost_dashboard.run_checker import pipeline_report, rerun_components_button, update_rerun_table, pipeline_report_data
from bifrost_dashboard.components.aggregate_report import aggregate_report, update_aggregate_fig, aggregate_species_dropdown
from bifrost_dashboard.components.resequence_report import resequence_report
from bifrost_dashboard.components.link_to_files import link_to_files, link_to_files_div
import yaml
config = yaml.safe_load(open(os.environ["BIFROST_DASH_CONFIG"]))
bifrostapi.add_URI(config["mongodb_key"])
external_scripts = [
'https://kit.fontawesome.com/24170a81ff.js',
]
external_stylesheets = [
"https://fonts.googleapis.com/css?family=Lato",
dbc.themes.BOOTSTRAP
]
assets = os.path.dirname(os.path.abspath(__file__)) + "/data/assets"
app = dash.Dash("bifrost_dashboard",
assets_folder=assets,
external_stylesheets=external_stylesheets,
external_scripts=external_scripts
)
app.title = "bifrost"
app.config["suppress_callback_exceptions"] = True
cache = Cache(app.server, config={
'CACHE_TYPE': 'filesystem',
'CACHE_DIR': config["cache_location"]
})
cache_timeout = 60
if config.get("pass_protected"):
dash_auth.BasicAuth(
app,
config.USERNAME_PASSWORD
)
# Temp css to make it look nice
# Lato font
def samples_list(active, collection_name=None):
links = [
{
"icon": "fa-list",
"href": ""
},
{
"icon": "fa-money-check",
"href": "sample-report"
},
{
"icon": "fa-chart-pie",
"href": "aggregate"
},
{
"icon": "fa-traffic-light",
"href": "pipeline-report"
},
{
"icon": "fa-link",
"href": "link-to-files"
}
]
link_list = []
for item in links:
href = "/" + item["href"]
if collection_name is not None:
href = "/collection/{}/{}".format(collection_name, item["href"])
if active == item['href']:
link_list.append(dcc.Link(
html.I(className="fas {} fa-fw".format(item['icon'])),
className="btn btn-outline-secondary active",
href=href
))
else:
link_list.append(dcc.Link(
html.I(className="fas {} fa-fw".format(item['icon'])),
className="btn btn-outline-secondary",
href=href
))
return link_list
app.layout = html.Div([
dcc.Location(id="url", refresh=False),
# To store url param values
dcc.Store(id="sample-store", data=[], storage_type='session'),
dcc.Store(id="param-store", data={}),
dcc.Store(id="removed-samples-store", data=None),
dcc.Store(id="selected-collection", data=None),
html.Ul(
[
html.A(
[
html.Img(src="/assets/img/bifrost-logo-white@2x.png",
className="navbar-logo ")
# html.Div("bifrost", className="sidebar-brand-text mx-3")
],
className="sidebar-brand d-flex align-items-center justify-content-center",
href="/"
),
html.Hr(className="sidebar-divider"),
html.Div("Browse data", className="sidebar-heading"),
html.Li(dcc.Link(
[
html.I(className="fas fa-vial fa-fw"),
html.Span("Samples")
], className="nav-link", href="/"),
className="nav-item",
id="samples-nav"),
html.Li(dcc.Link([
html.I(className="fas fa-vials fa-fw"),
html.Span("Collections")
], className="nav-link", href="/collection"),
className="nav-item", id="collections-nav"),
html.Hr(className="sidebar-divider"),
html.Div("Reports", className="sidebar-heading"),
html.Li(dcc.Link(
[
html.I(className="fas fa-chart-line fa-fw"),
html.Span("Resequence report")
], className="nav-link", href="/resequence-report"),
className="nav-item", id="resequence-nav"),
html.Hr(className="sidebar-divider"),
html.Div(
html.Button(className="rounded-circle border-0",
id="sidebarToggle"),
className="text-center d-none d-md-inline"
),
],
className="navbar-nav bg-gradient-primary sidebar sidebar-dark accordion",
id="sidebar"
),
html.Div([
html.Div(id="content", children=[
html.Nav(
[
html.Ul([
html.Li(
html.Span("This view is in Beta. Please report any feedback/bugs to mbas@ssi.dk :)"), className="nav-item mx-1"
),
], className="navbar-nav"),
html.Ul([
html.Li(
html.A("Documentation", href="https://ssi-dk.github.io/bifrost/"), className="nav-item dropdown no-arrow mx-1"
),
html.Li(
html.A("Github", href="https://github.com/ssi-dk/bifrost"), className="nav-item dropdown no-arrow mx-1"
)
],className="navbar-nav ml-auto")
],
className="navbar navbar-expand navbar-light bg-white topbar mb-4 static-top shadow"),
html.Main([
html_collection_selector(),
html.Div([
dbc.Collapse(
[
html_filter_drawer()
], id="filter_panel"
),
html.Div([
html.Div([
html.Div(
samples_list('/'),
className="btn-group shadow-sm",
id="selected-view-buttons"
),
], className="col-4"),
html.Div([
html.Button(
html.I(className="fas fa-filter fa-sm"),
className="btn btn-outline-secondary shadow-sm mx-auto d-block",
id="filter_toggle"
),
], className="col-4"),
], className="row mb-4"),
], id="samples-panel", className="d-none"),
html.Div(id="selected-view"),
html.Footer([
"Created with 🔬 at SSI. Bacteria icons from ",
html.A("Flaticon", href="https://www.flaticon.com/"),
"."], className="footer container")
], className="container-fluid",
role="main"),
]),
], id="content-wrapper", className="d-flex flex-column")
], id="wrapper")
# Callbacks
# We could make this one much faster by hiding the unused species with CSS
# by adding a new hidden class.
@app.callback(
[Output("filter_panel", "is_open"),
Output("filter_toggle", "className")],
[Input("filter_toggle", "n_clicks")],
[State("filter_panel", "is_open")]
)
def sidebar_toggle(n_clicks, is_open):
if n_clicks:
if is_open:
return [False, "btn btn-outline-secondary shadow-sm mx-auto d-block"]
else:
return [True, "btn btn-outline-secondary shadow-sm mx-auto d-block active"]
return [is_open, "btn btn-outline-secondary shadow-sm mx-auto d-block"]
@app.callback(
Output("param-store", "data"),
[Input("url", "search")],
[State("param-store", "data")]
)
def update_run_name(params, prev_params):
if params is None or params == "":
raise dash.exceptions.PreventUpdate("Initial repeated params call")
pparse = urlparse.urlparse(params)
params = urlparse.parse_qs(pparse.query)
if params == prev_params:
raise dash.exceptions.PreventUpdate("No param change")
return params
@app.callback(
[Output("selected-view", "children"),
Output("selected-view-buttons", "children"),
Output("samples-panel", "className"),
Output("samples-nav", "className"),
Output("resequence-nav", "className"),
Output("collections-nav", "className"),
Output("selected-collection", "data"),
Output("collection-selector-div", "className"),
Output("run-list-div", "className")],
[Input("url", "pathname")],
[State("sample-store", "data")]
)
def update_view(pathname, sample_store):
if pathname is None or pathname == "/":
pathname = "/"
path = pathname.split("/")
view = None
samples_panel = ""
samples_nav = "nav-item"
resequence_nav = "nav-item"
collections_nav = "nav-item"
collection_view = False
collection_name = None
if path[1] == "collection":
collection_view = True
if len(path) > 2: #/collection/collectionname
collection_name = path[2]
if len(path) > 3: # /collection/collectionname/section
section = path[3]
else: # /collection/collectionname
section = ""
else: # /collection
section = ""
else: # /section
section = path[1]
if section == "resequence-report":
if len(path) == 3: #/resequence-report/collectionname
collection_name = path[2]
resequence_nav += " active"
else:
samples_nav += " active"
if section == "":
view = html_div_filter()
elif section == "sample-report":
view = sample_report(sample_store)
elif section == "aggregate":
view = aggregate_report() # Doesn't need data
elif section == "pipeline-report":
view = pipeline_report(sample_store)
elif section == "resequence-report":
samples_panel = "d-none"
view = resequence_report(collection_name)
elif section == "link-to-files":
view = link_to_files_div()
else:
samples_panel = "d-none"
view = "Not found"
if collection_view:
collection_selector_list = "row"
run_list = "d-none"
collections_nav += " active"
elif section == "resequence-report":
collection_selector_list = "row"
run_list = "d-none"
else:
collection_selector_list = "row d-none"
run_list = ""
return [view, samples_list(section, collection_name), samples_panel,
samples_nav, resequence_nav, collections_nav, collection_name,
collection_selector_list, run_list]
@app.callback(
[Output("run-list", "options"),
Output("collection-selector", "options"),
Output("group-list", "options"),
Output("species-list", "options")],
[Input("form-species-source", "value"),
Input("selected-collection", "data")]
)
@cache.memoize(timeout=cache_timeout) # in seconds
def update_run_options(form_species, selected_collection):
return filter_update_run_options(form_species, selected_collection)
@app.callback(
Output("collection-selector", "value"),
[Input("selected-collection", "data")]
)
def update_selected_collection(selected_collection):
return selected_collection
@app.callback(
[Output("run-list", "value"),
Output("group-list", "value"),
Output("species-list", "value"),
Output("qc-list", "value"),
Output("samples-form", "value")],
[Input("param-store", "data")]
)
def update_filter_values(param_store):
return filter_update_filter_values(param_store)
@app.callback(
Output("collection-link", "href"),
[Input("collection-selector", "value")],
[State("url", "pathname")]
)
def update_collection_button_f(collection, pathname):
return update_collection_button(collection, pathname)
@app.callback(
Output("page-n",
"children"),
[Input("prevpage", "n_clicks_timestamp"),
Input("prevpage2", "n_clicks_timestamp"),
Input("nextpage", "n_clicks_timestamp"),
Input("nextpage2", "n_clicks_timestamp")],
[State("page-n", "children"),
State("max-page", "children")]
)
def next_page(prev_ts, prev_ts2, next_ts, next_ts2, page_n, max_page):
return samples_next_page(prev_ts, prev_ts2, next_ts, next_ts2, page_n, max_page)
@app.callback(
Output("sample-report", "children"),
[Input("page-n", "children"),
Input("sample-store", "data")]
)
def fill_sample_report(page_n, sample_store):
page_n = int(page_n)
sample_ids = list(
map(lambda x: x["_id"], sample_store))
if len(sample_ids) == 0:
return None
data_table = import_data.filter_all(
sample_ids=sample_ids,
pagination={"page_size": SAMPLE_PAGESIZE, "current_page": page_n})
max_page = len(sample_store) // SAMPLE_PAGESIZE
# We need to have fake radio buttons with the same ids to account for times
# when not all SAMPLE_PAGESIZE samples are shown and are not taking the ids required by the callback
html_fake_radio_buttons = html.Div([dcc.RadioItems(
options=[
{'label': '', 'value': 'nosample'}
],
value='noaction',
id="sample-radio-{}".format(n_sample)
) for n_sample in range(len(data_table), SAMPLE_PAGESIZE)], style={"display": "none"})
return [
html.H4("Page {} of {}".format(page_n + 1, max_page + 1)),
html.Div(children_sample_list_report(data_table)),
html_fake_radio_buttons,
admin.html_qc_expert_form(),
html.H4("Page {} of {}".format(page_n + 1, max_page + 1)),
dcc.ConfirmDialog(
id='qc-confirm',
message='Are you sure you want to send sample feedback?',
)
]
@app.callback(
Output("sample-store", "data"),
[Input("apply-filter-button", "n_clicks"),
Input("param-store", "data"),
Input("selected-collection", "data")],
[State("run-list", "value"),
State("species-list", "value"),
State("form-species-source", "value"),
State("group-list", "value"),
State("qc-list", "value"),
State("samples-form", "value"),
State("sample-store", "data"),
State("date-sequenced", "start_date"),
State("date-sequenced", "end_date"),
]
)
def update_selected_samples(n_clicks, param_store, collection_name,
run_names, species_list,
species_source, group_list, qc_list,
sample_names, prev_sample_store,
date_seq_start, date_seq_end):
date_range = [date_seq_start, date_seq_end]
for i in range(2):
if date_range[i] is not None:
date_range[i] = datetime.datetime.strptime(re.split('T| ', date_range[i])[0], '%Y-%m-%d')
if sample_names is not None and sample_names != "":
sample_names = sample_names.split("\n")
else:
sample_names = param_store.get("sample_names", [])
if not run_names:
run_names = param_store.get("run", [])
if not group_list:
group_list = param_store.get("group", [])
if not species_list:
species_list = param_store.get("species", [])
if not qc_list:
qc_list = param_store.get("qc", [])
if not date_range[0]:
date_range[0] = param_store.get("date_seq_start", None)
if not date_range[1]:
date_range[1] = param_store.get("date_seq_end", None)
#override if selected collection
if collection_name is not None:
run_names = [collection_name]
if (date_range[0] is None and
date_range[1] is None):
date_range = None
if (n_clicks == 0 and
sample_names == [] and
run_names == [] and
group_list == [] and
species_list == [] and
qc_list == [] and
date_range is None):
samples = prev_sample_store
else:
samples = import_data.filter_all(
species=species_list, species_source=species_source,
group=group_list, qc_list=qc_list,
run_names=run_names,
sample_names=sample_names,
date_range=date_range,
projection={"name": 1})
if "_id" in samples:
samples["_id"] = samples["_id"].astype(str)
samples = samples.to_dict('records')
# if deleted_samples:
# samples = [s for s in samples if s["_id"] not in deleted_samples]
return samples
@app.callback(
[
Output("filter-sample-count", "children"),
Output("datatable-ssi_stamper", "data"),
Output("datatable-ssi_stamper", "virtualization")
],
[
Input("placeholder0", "children"),
Input("sample-store", "data")
],
)
def update_filter_table(_, sample_store):
if len(sample_store) == 0:
return ["0", [{}], False]
print('s',sample_store)
sample_ids = list(
map(lambda x: x["_id"], sample_store))
samples = import_data.filter_all(
sample_ids=sample_ids)
samples = generate_table(samples)
if len(sample_store) > 500:
virtualization = True
else:
virtualization = False
return [len(sample_store), samples.to_dict("rows"), virtualization]
@app.callback(
Output("tsv-download", "children"),
[Input("generate-download-button", "n_clicks")],
[State("run-list", "value"),
State("species-list", "value"),
State("form-species-source", "value"),
State("group-list", "value"),
State("qc-list", "value"),
State("samples-form", "value")],
prevent_initial_call=True
)
def generate_download_button(download_button,
run_names, species_list,
species_source, group_list, qc_list,
sample_names):
if download_button == 0:
return None
else:
if sample_names is not None and sample_names != "":
sample_names = sample_names.split("\n")
tests_df = import_data.filter_all(species=species_list, species_source=species_source,
group=group_list, qc_list=qc_list,
run_names=run_names,
sample_names=sample_names,
pagination=None)
# return samples.to_dict()
if not len(tests_df):
return None
tests_df = generate_table(tests_df)
rename_dict = {item["id"]: item["name"]
for item in global_vars.COLUMNS}
renamed = tests_df.rename(rename_dict, axis='columns')
missing_columns = [a for a in list(
rename_dict.values()) if not a in list(renamed.columns)]
# add missing columns
for column in missing_columns:
renamed[column] = np.nan
# reorder columns
renamed = renamed[list(rename_dict.values())]
csv_string_eur = renamed.to_csv(
index=False, encoding="utf-8", sep=";", decimal=",")
tsv_string_us = renamed.to_csv(index=False, encoding="utf-8", sep="\t")
full_csv_string_eur = 'data:text/csv;charset=utf-8,' + \
urlparse.quote(csv_string_eur)
full_tsv_string_us = 'data:text/tab-separated-values;charset=utf-8,' + \
urlparse.quote(tsv_string_us)
return [
html.A("(tsv, US format)",
href=full_tsv_string_us,
download='report.tsv'),
" - ",
html.A("(csv, EUR Excel format)",
href=full_csv_string_eur,
download='report.csv')
]
@app.callback(
[Output("plot-species", "value"),
Output("plot-species", "options")],
[Input("sample-store", "data"),
Input("plot-species-source", "value")],
[State("plot-species", "value")]
)
def aggregate_species_dropdown_f(sample_store, plot_species, selected_species):
return aggregate_species_dropdown(sample_store, plot_species, selected_species)
@app.callback(
[Output("pipeline-table", "data"),
Output("pipeline-table", "columns"),
Output("pipeline-table", "style_data_conditional"),
Output("rerun-samples", "options"),
Output("rerun-components", "options")],
[Input("sample-store", "data"),
Input("table-interval", "n_intervals")]
)
def pipeline_report_data_f(sample_store, _):
return pipeline_report_data(sample_store)
@app.callback(
[Output("summary-plot", "figure"),
Output("mlst-plot", "figure")],
[Input("plot-species", "value")],
[State("sample-store", "data"),
State("plot-species-source", "value")]
)
@cache.memoize(timeout=cache_timeout) # in seconds
def update_aggregate_fig_f(selected_species, samples, plot_species_source):
return update_aggregate_fig(selected_species, samples, plot_species_source)
@app.callback(Output("pipeline-rerun", "data"),
[Input("pipeline-table", "active_cell"),
Input("pipeline-table", "derived_viewport_data"),
Input("rerun-add-components", "n_clicks"),
Input("rerun-add-samples", "n_clicks"),
Input("rerun-add-failed", "n_clicks")],
[State("pipeline-table", "columns"),
State("pipeline-rerun", "derived_viewport_data"),
State("rerun-components", "value"),
State("rerun-samples", "value")])
def update_rerun_table_f(active, table_data, n_click_comp, n_click_samp,
n_click_fail, columns, prev_data, rerun_comp,
rerun_samp):
return update_rerun_table(active, table_data, n_click_comp, n_click_samp,
n_click_fail, columns, prev_data, rerun_comp,
rerun_samp)
@app.callback(
[Output("rerun-output", "children"),
Output("rerun-output", "is_open")],
[Input("rerun-button", "n_clicks")],
[State("pipeline-rerun", "derived_viewport_data")],
prevent_initial_call=True
)
def rerun_components_button_f(n_clicks, data):
return rerun_components_button(n_clicks, data, config["rerun"])
@app.callback(Output('qc-confirm', 'displayed'),
[Input('feedback-button', 'n_clicks_timestamp')],
prevent_initial_call=True)
def display_confirm_feedback(button):
if button is not None:
return True
return False
@app.callback(
Output("qc-feedback", "children"),
[Input("qc-confirm", "submit_n_clicks")],
[State("qc-user-1", "value")] + [State("sample-radio-{}".format(n), "value")
for n in range(SAMPLE_PAGESIZE)] +
[State("sample_reason-{}".format(n), "value")
for n in range(SAMPLE_PAGESIZE)],
prevent_initial_call=True
)
def submit_user_feedback(_, user, *args):
if (config["feedback_enabled"]):
feedback_pairs = []
for i in range(int(len(args)/2)):
val = args[i]
reason = args[int(len(args)/2) + i]
if val != "noaction":
if val.startswith("OK_"):
feedback_pairs.append((val[3:], "OK", reason))
elif val.startswith("CF_"):
feedback_pairs.append((val[3:], "resequence", reason))
elif val.startswith("OT_"):
feedback_pairs.append((val[3:], "other", reason))
if len(feedback_pairs) > 0:
email_config = {
"email_from": config["email_from"],
"email_to": config["email_to"]
}
import_data.add_batch_user_feedback_and_mail(feedback_pairs, user, email_config)
return "Feedback saved"
return []
@app.callback(
Output("link-to-files-div", "children"),
[Input("sample-store", "data")],
)
def link_to_files_f(data):
return link_to_files(data)
server = app.server # Required for gunicorn
def main_debug():
app.run_server(debug=True, host="0.0.0.0", dev_tools_hot_reload=True)
if __name__ == '__main__':
# 0.0.0.0 exposes the app to the network.
main_debug()
|
[
"martinbaste@gmail.com"
] |
martinbaste@gmail.com
|
ca72598468c90650f461ccf10d9b867094d708a8
|
95b583b03f1865daa700e9992dbae2b1cd3cfad3
|
/makefile/160822_assignment8/speed.py
|
36d8b747c9ba4812413f69408be590d2cc8fafc6
|
[] |
no_license
|
ymahajan98/CS-251
|
46d49bc4b79a6033b2dd0870378d7f7cb049e9d0
|
d40a1f0a289ae3fddbed81f38830957f015d2f8b
|
refs/heads/master
| 2020-03-13T22:20:19.288444
| 2019-06-20T13:43:10
| 2019-06-20T13:43:10
| 131,314,229
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,145
|
py
|
with open("analyse") as f:
content = f.readlines()
content = [x.strip() for x in content]
T2 = [x.split(" ") for x in content]
t = [map(float, x) for x in T2]
print ("#Threads 1 2 4 8 16")
print ("100"),t[0][1]/t[0][1],t[0][1]/t[4][1],t[0][1]/t[8][1],t[0][1]/t[12][1],t[0][1]/t[16][1],
print (t[20][1])*t[0][1]*t[0][1],(t[24][1])*t[0][1]*t[0][1],(t[28][1])*t[0][1]*t[0][1],(t[32][1])*t[0][1]*t[0][1],(t[36][1])*t[0][1]*t[0][1]
print ("1000"),t[1][1]/t[1][1],t[1][1]/t[5][1],t[1][1]/t[9][1],t[1][1]/t[13][1],t[1][1]/t[17][1],
print (t[21][1])*t[1][1]*t[1][1],(t[25][1])*t[1][1]*t[1][1],(t[29][1])*t[1][1]*t[1][1],(t[33][1])*t[1][1]*t[1][1],(t[37][1])*t[1][1]*t[1][1]
print ("10000"),t[2][1]/t[2][1],t[2][1]/t[6][1],t[2][1]/t[10][1],t[2][1]/t[14][1],t[2][1]/t[18][1],
print (t[22][1])*t[2][1]*t[2][1],(t[26][1])*t[2][1]*t[2][1],(t[30][1])*t[2][1]*t[2][1],(t[34][1])*t[2][1]*t[2][1],(t[38][1])*t[2][1]*t[2][1]
print ("100000"),t[3][1]/t[3][1],t[3][1]/t[7][1],t[3][1]/t[11][1],t[3][1]/t[15][1],t[3][1]/t[19][1],
print(t[23][1])*t[3][1]*t[3][1],(t[27][1])*t[3][1]*t[3][1],(t[31][1])*t[3][1]*t[3][1],(t[35][1])*t[3][1],(t[39][1])*t[3][1]*t[3][1]
|
[
"ymahajan98@gmail.com"
] |
ymahajan98@gmail.com
|
48dbe9c8798421fd1f2317bfdd8b6d68b1e56714
|
6bf49be2f4e3830025f897373a07d983e2e060cc
|
/chordchart_notation/parser/parsers/string.py
|
c22ca295d74f96510baccaef64702918afaa5f32
|
[] |
no_license
|
AntoineCezar/chordchart-notation
|
057947964f3c5268d13f969443635ded3d5810d1
|
cabd897a445996017817cca14a72f8cb20f536eb
|
refs/heads/master
| 2021-01-20T21:59:57.293664
| 2017-12-02T02:09:55
| 2017-12-02T02:09:55
| 101,794,125
| 2
| 1
| null | 2017-10-02T12:07:54
| 2017-08-29T18:36:27
|
Python
|
UTF-8
|
Python
| false
| false
| 983
|
py
|
import typing
from ..errors import ParseError
from ..text import Text
from ..ast import TerminalNode
from ..results import Results
from .parser import Parser
ResultsORTerminalNode = typing.Union[Results, TerminalNode]
class String(Parser):
def __init__(self, string: str) -> None:
self._string = string
def __eq__(self, other):
return isinstance(other, String) \
and other._string == self._string
def __add__(self, other: 'Parser') -> 'Parser':
if isinstance(other, String):
return String(self._string + other._string)
return super().__add__(other)
def parse(self, text: Text, results: ResultsORTerminalNode) -> None:
lookup_length = len(self._string)
fragment = text.lookup(lookup_length)
if fragment == self._string:
text.forward(lookup_length)
results.append(fragment)
return
raise ParseError(f'"{self._string}"', text.position)
|
[
"antoine@cezar.fr"
] |
antoine@cezar.fr
|
eae8199fad50515ff7bb5860514b1723c7d7666d
|
0877e0a3ca2b55a5dc686bfba4a8abe76a2b5de7
|
/coop/members/mixins.py
|
07670f3da78fc208e4e43af9e55afa2567bafa8e
|
[
"Apache-2.0"
] |
permissive
|
jalibras/coop
|
e45b483d6fa4857946f044f486cc106ae53b465e
|
cb94560eb4a25eca3e241551e01eea6e3d4e3b6b
|
refs/heads/master
| 2020-06-18T01:27:16.098343
| 2018-09-10T15:14:23
| 2018-09-10T15:14:23
| 74,960,105
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 645
|
py
|
from django.db import models
class PermissionMixin(object):
# this is a class method because we create instances of a class
@classmethod
def can_create(cls,user):
return True
# OTOH it is conceivable that we have an instance already created and
# we need to check if the user can save that particular instance to the db
# something like
# if user.groups intersection object.add_groups is not empty
# then return True
def can_save(self,user):
return True
def can_read(self,user):
return True
def can_update(self,user):
return True
def can_delete(self,user):
return True
|
[
"jamescruickshank71@gmail.com"
] |
jamescruickshank71@gmail.com
|
9163a3b4364c67dae435f69030415d3e9199177a
|
a086f654cb2ff7af46865983ba49cb7d1a1b6bac
|
/tutorial/items.py
|
c36cdc7629aebf1956fff4a8f5e771c45839c1d2
|
[] |
no_license
|
kcaaaxing/scrapy
|
5140954c047288090dbb79e1c579f2fd415e8592
|
90e48895a862131f50c7e19d69694378cce89f56
|
refs/heads/master
| 2020-04-13T11:13:58.398972
| 2018-12-26T10:39:08
| 2018-12-26T10:39:08
| 163,168,124
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 861
|
py
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import TakeFirst
class TutorialItemLoader(ItemLoader):
default_output_processor = TakeFirst()
class TutorialItem(scrapy.Item):
# define the fields for your item here like:(自定义用于储存爬虫所抓取的字段类容)
# name = scrapy.Field()
code = scrapy.Field() # 股票代码
abbr = scrapy.Field() # 股票简介
last_trade = scrapy.Field() # 最新价
chg_ratio = scrapy.Field() # 涨跌幅
chg_amt = scrapy.Field() # 涨跌额
chg_ratio_5min = scrapy.Field() # 5分钟涨幅
volumn = scrapy.Field() # 成交量
turn_over = scrapy.Field() # 成交额
pass
|
[
"yongxingliu60@gmail.com"
] |
yongxingliu60@gmail.com
|
3ed8e3a1a00a96af9e9bf185cc4a8bf0b4694b6b
|
c8d7cf53cf757e0f43007d47195c9746bb20c8d4
|
/scripts/runtest.py
|
0c24c5eee0050d3303bec12328139b36fd87a6ba
|
[] |
no_license
|
jonmay/lexicon
|
068059bf3a8d1b5d4f702b158c9c80505758b74f
|
4b962967c5b4c0dc8db55712a227eb251da6cdb0
|
refs/heads/master
| 2021-01-12T16:07:15.081029
| 2017-01-27T22:04:44
| 2017-01-27T22:04:44
| 71,940,027
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,404
|
py
|
#!/usr/bin/env python
import argparse
import sys
import codecs
if sys.version_info[0] == 2:
from itertools import izip
else:
izip = zip
from collections import defaultdict as dd
import re
import os.path
import gzip
import tempfile
import shutil
import atexit
import shlex
from jmutil import shchain, mkdir_p
scriptdir = os.path.dirname(os.path.abspath(__file__))
reader = codecs.getreader('utf8')
writer = codecs.getwriter('utf8')
def prepfile(fh, code):
if type(fh) is str:
fh = open(fh, code)
ret = gzip.open(fh.name, code if code.endswith("t") else code+"t") if fh.name.endswith(".gz") else fh
if sys.version_info[0] == 2:
if code.startswith('r'):
ret = reader(fh)
elif code.startswith('w'):
ret = writer(fh)
else:
sys.stderr.write("I didn't understand code "+code+"\n")
sys.exit(1)
return ret
def addonoffarg(parser, arg, dest=None, default=True, help="TODO"):
''' add the switches --arg and --no-arg that set parser.arg to true/false, respectively'''
group = parser.add_mutually_exclusive_group()
dest = arg if dest is None else dest
group.add_argument('--%s' % arg, dest=dest, action='store_true', default=default, help=help)
group.add_argument('--no-%s' % arg, dest=dest, action='store_false', default=default, help="See --%s" % arg)
def main():
parser = argparse.ArgumentParser(description="apply patterns to data, get in and out results, sample them.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
addonoffarg(parser, 'debug', help="debug mode", default=False)
parser.add_argument("--lexiconfile", "-l", nargs='?', type=argparse.FileType('r'), default=sys.stdin, help="input lexicon file")
parser.add_argument("--toklcfile", "-t", nargs='?', type=argparse.FileType('r'), default=sys.stdin, help="toklc english file")
parser.add_argument("--patternfile", "-p", nargs='?', type=argparse.FileType('r'), default=sys.stdin, help="pattern file")
parser.add_argument("--sample", "-s", type=int, default=20, help="number of samples to catch")
parser.add_argument("--threshhold", "-d", type=float, default=5.0, help="minimum score for patterns")
parser.add_argument("--applyprog", default=os.path.join(scriptdir, 'applymatches.py'), help='apply matches program')
parser.add_argument("--sampleprog", default=os.path.join(scriptdir, 'sample.py'), help='sample program')
parser.add_argument("--maskngram", default=os.path.join(scriptdir, 'maskngram.py'), help='maskngram file')
parser.add_argument("--outdir", "-o", default=".", help="output directory")
try:
args = parser.parse_args()
except IOError as msg:
parser.error(str(msg))
workdir = tempfile.mkdtemp(prefix=os.path.basename(__file__), dir=os.getenv('TMPDIR', '/tmp'))
def cleanwork():
shutil.rmtree(workdir, ignore_errors=True)
if args.debug:
print(workdir)
else:
atexit.register(cleanwork)
lexiconfile = prepfile(args.lexiconfile, 'r')
toklcfile = prepfile(args.toklcfile, 'r')
patternfile = prepfile(args.patternfile, 'r')
mkdir_p(args.outdir)
changefile=prepfile(os.path.join(args.outdir, "changes"), 'w')
samefile=prepfile(os.path.join(args.outdir, "sames"), 'w')
changesamplefile=prepfile(os.path.join(args.outdir, "changesamples"), 'w')
samesamplefile=prepfile(os.path.join(args.outdir, "samesamples"), 'w')
_, tmpfile = tempfile.mkstemp(dir=workdir, text=True)
tmpfile = prepfile(tmpfile, 'w')
for l, t in izip(lexiconfile, toklcfile):
tmpfile.write("%s\t%s" % (l.strip(), t))
tmpfile.close()
shchain(["%s -i %s -t %f --no-passthrough --scoremode" % (args.applyprog, tmpfile.name, args.threshhold),], input=patternfile, output=changefile)
shchain(["%s -i %s -t %f --no-mods" % (args.applyprog, tmpfile.name, args.threshhold),], input=patternfile, output=samefile)
changefile.close()
samefile.close()
changefile = prepfile(changefile.name, 'r')
_, tmpfile = tempfile.mkstemp(dir=workdir, text=True)
tmpfile = prepfile(tmpfile, 'w')
shchain(["%s -s %d" % (args.sampleprog, args.sample),], input=changefile, output=tmpfile)
tmpfile.close()
tmpfile=prepfile(tmpfile.name, 'r')
for line in tmpfile:
toks = line.strip().split('\t')
changesamplefile.write('\t'.join(toks[:-1])+"\n")
for tok in toks[-1].split('//'):
changesamplefile.write("\t%s\n" % tok.strip())
if __name__ == '__main__':
main()
|
[
"jonmay@isi.edu"
] |
jonmay@isi.edu
|
3029a9c75f647c0fd90e55f2045bf5fdb294d656
|
030724b60fb4f8b63953b7401702a98072993e94
|
/python/50.pow_x_n.py
|
61bf1ffb664c83a0ba1410577e1a4b9ea064f1a1
|
[] |
no_license
|
MtTsai/Leetcode
|
5f51a892b78cf6427ce2b4891a10bc2d4ed4d972
|
21e83294aee779a16a8c1b96089da4a40eb03035
|
refs/heads/master
| 2021-01-24T17:17:52.909429
| 2019-08-04T06:53:53
| 2019-08-04T06:54:23
| 123,228,705
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 695
|
py
|
class Solution(object):
def myPow(self, x, n):
"""
:type x: float
:type n: int
:rtype: float
"""
d = 0
out = 1.0
if n >= 0:
while n:
x2d = x
if n & 1:
for _ in xrange(d):
x2d *= x2d
out *= x2d
n >>= 1
d += 1
else:
n *= -1
while n:
x2d = x
if n & 1:
for _ in xrange(d):
x2d *= x2d
out /= x2d
n >>= 1
d += 1
return out
|
[
"mttsai@gmail.com"
] |
mttsai@gmail.com
|
756a57b9f0260e05bc6780be8bb3d45a12d0f3d8
|
c5712fa2c5470c97b0c0b534ff64d4121c758977
|
/combined_model/all_stats/dynamic_t_non_obs_stats/newStats/weights_model.py
|
4902e6d14f47277e72c7d446b718da50873f0c9b
|
[
"MIT"
] |
permissive
|
nibraaska/Working-Memory-Temporal-Difference
|
a5dde9a09ae6b19a0ea363ca7f4406158649ee19
|
543cd91e87ebd478e79d821fa8708885df5899c5
|
refs/heads/master
| 2020-04-08T00:29:51.351421
| 2020-03-13T00:43:54
| 2020-03-13T00:43:54
| 158,854,165
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,968
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
np.set_printoptions(threshold=np.inf)
import time, sys, random, pylab
from math import fabs
from random import randrange
from random import choice
from hrr import *
from IPython.display import clear_output
from sys import argv
seed_val = int(argv[1])
# In[ ]:
def seed(seed):
random.seed(seed)
np.random.seed(seed)
# In[ ]:
seed(seed_val)
# In[ ]:
def plot_all_graphs():
get_ipython().run_line_magic('matplotlib', 'inline')
fig, axes = plt.subplots(nrows=num_of_atrs, ncols=num_obs_tasks+1)
fig.set_figwidth(15)
fig.set_figheight(15)
plt.rcParams.update({'font.size': 14})
if num_of_atrs > 1:
for x in range(num_of_atrs):
x_ind = x
y_for_rwd = 0
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in signals + ["I"]:
lab = "WM:" + wm + "*Signal:" + signal + reward_tkn() + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state) + "*rewardTkn", x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[x_ind,y_for_rwd].title.set_text(wm + " with rewardTkn " + "Atr: " + str(x))
axes[x_ind,y_for_rwd].plot(position, value, label=lab)
axes[x_ind,y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[x_ind,y_for_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_rwd += 1
y = x + 1
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[x_ind,y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[x_ind,y_for_no_rwd].plot(position, value, label=lab)
axes[x_ind,y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[x_ind,y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
else:
for x in range(num_of_atrs):
x_ind = x
y_for_rwd = 0
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in signals + ["I"]:
lab = "WM:" + wm + "*Signal:" + signal + reward_tkn() + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state) + "*rewardTkn", x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[y_for_no_rwd].title.set_text(wm + " with rewardTkn " + "Atr: " + str(x))
axes[y_for_no_rwd].plot(position, value, label=lab)
axes[y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_rwd += 1
y = x + 1
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[y_for_no_rwd].plot(position, value, label=lab)
axes[y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
plt.show()
def plot_graph(data):
get_ipython().run_line_magic('matplotlib', 'inline')
plt.plot(data)
plt.show()
def live_graphs():
get_ipython().run_line_magic('matplotlib', 'qt')
mpl.rcParams['axes.prop_cycle'] = mpl.cycler(color=["r", "g", "b", "y"])
fig, axes = plt.subplots(nrows=num_of_atrs, ncols=num_obs_tasks+1)
if num_of_atrs > 1:
for x in range(num_of_atrs):
x_ind = x
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[x_ind,y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[x_ind,y_for_no_rwd].plot(position, value, label=lab)
axes[x_ind,y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[x_ind,y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
else:
for x in range(num_of_atrs):
x_ind = x
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[y_for_no_rwd].plot(position, value, label=lab)
axes[y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
ani = animation.FuncAnimation(fig, animate, interval=60000)
plt.show()
plt.suptitle("{0} Non-Observable tasks and {1} Observable tasks with goals: {2}".format(num_non_obs_tasks, num_obs_tasks, goals), fontsize=30)
def animate(i):
if num_of_atrs > 1:
for x in range(num_of_atrs):
x_ind = x
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[x_ind,y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[x_ind,y_for_no_rwd].plot(position, value, label=lab)
axes[x_ind,y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[x_ind,y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
else:
for x in range(num_of_atrs):
x_ind = x
y_for_no_rwd = 0
for wm in list(dict.fromkeys([signal + "In" if signal != "I" else signal for signal in signals] + ["I"])):
position = np.arange(size_of_maze)
value = np.zeros(size_of_maze)
for signal in list(dict.fromkeys(signals + ["I"])):
lab = "WM:" + wm + "*Signal:" + signal + "*Atr:" + str(x)
for state in range(size_of_maze):
encode_str = build_hrr_string(wm, signal, str(state), x)
value[state] = np.dot(weights, ltm.encode(encode_str)) + bias
axes[y_for_no_rwd].title.set_text(wm + " Atr: " + str(x))
axes[y_for_no_rwd].plot(position, value, label=lab)
axes[y_for_no_rwd].tick_params(direction='out', length=6, width=2,
grid_color='r', grid_alpha=0.5)
axes[y_for_no_rwd].legend(loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=1, prop={'size': 10})
y_for_no_rwd += 1
plt.tight_layout(rect=[0, 0.03, 1, 0.95])
# In[ ]:
def update_progress(progress, episode):
bar_length = 50
if isinstance(progress, int):
progress = float(progress)
if not isinstance(progress, float):
progress = 0
if progress < 0:
progress = 0
if progress >= 1:
progress = 1
block = int(round(bar_length * progress))
clear_output(wait = True)
text = "Episode {0}, Progress: [{1}] {2:.1f}%".format(episode, "=" * block + "." * (bar_length - block), progress * 100)
print(text)
# In[ ]:
def get_moves(state, size_of_maze):
if(state == 0):
return size_of_maze - 1, 1
elif(state == size_of_maze - 1):
return size_of_maze - 2, 0
else:
return state - 1, state + 1
# In[ ]:
def build_hrr_string(wm, signal, state, atr):
if wm == "I" and signal == "I":
return "State:" + str(state) + "*" + "Atr:" + str(atr)
elif wm == "I":
return "Signal:" + str(signal) + "*" + "State:" + str(state) + "*" + "Atr:" + str(atr)
elif signal == "I":
return "WM:" + str(wm) + "*" + "State:" + str(state) + "*" + "Atr:" + str(atr)
else:
return "WM:" + str(wm) + "*" + "Signal:" + str(signal) + "*" + "State:" + str(state) + "*" + "Atr:" + str(atr)
# In[ ]:
def context_policy_negative(atr):
return (atr + 1)%num_of_atrs
def context_policy_positive(wm, signal, state, atr):
val = -9999
for atr in range(0, num_of_atrs):
encode_str = build_hrr_string(wm, signal, state, atr)
temp = np.dot(weights, ltm.encode(encode_str)) + bias
if temp > val:
val = temp
s_atr = atr
return s_atr
# In[ ]:
def reward_tkn():
return "*rewardTkn"
# In[ ]:
def move_policy(goal, moves, wms, signals, atr, rand_on):
val = -9999
for move in moves:
for wm in list(dict.fromkeys(wms + ["I"])):
for signal in list(dict.fromkeys(signals + ["I"])):
if move == goal:
encode_str = build_hrr_string(wm, signal, str(move) + reward_tkn(), atr)
else:
encode_str = build_hrr_string(wm, signal, move, atr)
if (debug):
print(encode_str)
temp = np.dot(weights, ltm.encode(encode_str)) + bias
if debug:
if signal != "I":
print("Move: {0}, WM: {1}, Signal: {2}In, Atr: {3}, Value: {4}".format(move, wm, signal, atr, temp))
else:
print("Move: {0}, WM: {1}, Signal: {2}, Atr: {3}, Value: {4}".format(move, wm, signal, atr, temp))
if temp > val:
val = temp
s_move = move
if signal != "I":
s_wm = signal + "In"
else:
s_wm = wm
if(np.random.random_sample() < e_soft) and rand_on:
if(debug):
print("RANDOM MOVE")
return (np.random.choice(moves), wm, atr, True)
return (s_move, s_wm, atr, False)
# In[ ]:
def logmod(x):
return np.sign(x)*np.log(abs(x)+1)
# In[ ]:
def get_opt_steps(start, goal, size_of_maze):
opt = abs(goal - start)
if opt > size_of_maze / 2:
opt = size_of_maze - opt
return opt
# In[ ]:
def start_testing(testing, rand_on, alpha, threshold_alpha, atr_alpha):
testing = True
rand_on = 0
alpha = 0.01
threshold_alpha = 0
atr_alpha = 0
return testing, rand_on, alpha, threshold_alpha, atr_alpha
# In[ ]:
def reset(num_of_atrs, atr_values, threshold, hrr_length, ltm, weights, eligibility):
num_of_atrs += 1
atr_values = [1 * reward_good] * num_of_atrs
if dynamic_threshold:
threshold = 1
hrr_length = (num_of_atrs * hrr_length) / (num_of_atrs - 1)
store_old = ltm.getStore()
weights_new = hrr(int(hrr_length), normalized)
ltm_new = LTM(int(hrr_length), normalized)
inv = np.linalg.pinv(np.atleast_2d(weights_new))
for key in store_old.keys():
key_val = store_old[key]
val = np.dot(weights, key_val)
guess = np.dot(inv, val).ravel()
ltm_new.encode_val(key, guess)
ltm = ltm_new
weights = weights_new
eligibility = np.zeros(int(hrr_length))
return num_of_atrs, atr_values, threshold, hrr_length, ltm, weights, eligibility
# In[ ]:
# Number of training cycles
episodes = 100000
# Hrr parameters
hrr_length = 6144
normalized = True
# How many steps to take before quiting
steps_till_quit = 100
# Task
signals = ["I"]
goals = [[0], [4], [7], [10], [13]]
# Maze parameters
size_of_maze = 20
non_obs_task_switch_rate = 500
num_non_obs_tasks = len(goals)
num_obs_tasks = len(signals)
# Arguments for neural network
input_size = hrr_length
output_size = 1
discount = 0.7
alpha = 0.1
# Reward for temporal difference learning
reward_bad = -1
reward_good = 0
# Dynamic atrs hyperparameters
num_of_atrs = 1
atr_alpha = 0.00063
atr_values = (np.ones(num_of_atrs) * reward_good).tolist()
atr_threshold = -0.5
threshold_vals = []
# Threshold for non observable task switching
# threshold = 0.3
threshold = 1
threshold_alpha = 0.0001
dynamic_threshold = True
# Expolration rate
e_soft = 0.00001
rand_on = 1
# Eligibility trace rate
eli_lambda = 0.0
# Neural network
weights = hrr(hrr_length, normalized)
bias = 1
# Eligibility trace
eligibility = np.zeros(hrr_length)
# Accurcay test percentage
percent_check = 9
# Start values for the agent
non_obs = 0
current_atr = 0
current_wm = "I"
changed = False
# Flag for printing values
debug = False
testing = False
create_plots = False
episodic_memory = False
step_store = []
if create_plots:
pos_err_store = []
neg_err_store = []
total_error = []
total_goal_error = []
switch_error = []
norm_error = []
# Live graph flag
live_graph = False
# Ltm is created
ltm = LTM(hrr_length, normalized)
# In[ ]:
# start_time = time.time()
# In[ ]:
for x in range(episodes):
# Initial state
current_state = random.randint(0, size_of_maze - 1)
start = current_state
current_signal = np.random.choice(signals)
eligibility *= 0.0
if episodic_memory:
episode_memory = []
changed = False
# Set the goal for the tast
if x%non_obs_task_switch_rate == 0:
non_obs = choice([i for i in range(len(goals)) if i not in [non_obs]])
changed = True
if num_obs_tasks == 1:
goal = goals[non_obs][0]
else:
goal = goals[non_obs][signals.index(current_signal)]
steps = 0
opt_steps = get_opt_steps(current_state, goal, size_of_maze)
# Start testing phase
if testing == False and x > ((episodes*percent_check) / 10):
testing, rand_on, alpha, threshold_alpha, atr_alpha = start_testing(testing, rand_on, alpha, threshold_alpha, atr_alpha)
for y in range(steps_till_quit):
if create_plots:
threshold_vals += [threshold]
if (current_state == goal):
encode_str = build_hrr_string(current_wm, current_signal, str(current_state) + reward_tkn(), current_atr)
goal_hrr = ltm.encode(encode_str)
goal_value = np.dot(weights, goal_hrr) + bias
if episodic_memory:
episode_memory += [[current_state, goal_value, goal]]
error = reward_good - goal_value
eligibility *= eli_lambda
eligibility = eligibility + goal_hrr
weights = np.add(weights, (alpha * logmod(error) * eligibility))
if dynamic_threshold:
threshold += threshold_alpha * logmod(error)
atr_values[current_atr] += atr_alpha * logmod(error)
if create_plots:
total_goal_error += [error]
if(debug):
print("In goal with value {0}".format(goal_value))
break
# Store info about previous state
previous_wm = current_wm
previous_signal = current_signal
previous_state = current_state
previous_atr = current_atr
if debug:
print("Previous WM:, {0}, Signal:, {1}, State, {2}, ATR:, {3}".format(previous_wm, previous_signal, previous_state, previous_atr))
encode_str = build_hrr_string(previous_wm, previous_signal, previous_state, previous_atr)
previous_state_hrr = ltm.encode(encode_str)
previous_value = np.dot(weights, previous_state_hrr) + bias
if debug:
print("Started with state: {0}, State Value: {1}, WM: {2}, Atr: {3}".format(previous_state, previous_value, previous_wm, previous_atr))
current_signal = "I"
left, right = get_moves(previous_state, size_of_maze)
if previous_signal != "I":
previous_signal += "In"
# Make the move
move, wm, current_atr, random_move = move_policy(goal, [left, right], [previous_wm, previous_signal], [current_signal], previous_atr, rand_on)
steps += 1
current_wm = wm
current_state = move
if random_move:
eligibility *= 0.0
if(debug):
print("Moves {0}, taken {1}".format([left, right], move))
if debug:
print("Current WM {0}, Current Signal {1}, Current state {2}, Current ATR {3}".format(current_wm, current_signal, current_state, current_atr))
if current_state == goal:
encode_str = build_hrr_string(current_wm, current_signal, str(current_state) + reward_tkn(), current_atr)
if debug:
print("In goal: WM: {1}, ATR: {2}".format(current_wm, current_atr))
else:
encode_str = build_hrr_string(current_wm, current_signal, current_state, current_atr)
current_state_hrr = ltm.encode(encode_str)
current_value = np.dot(weights, current_state_hrr) + bias
sarsa_error = (reward_bad + discount * current_value) - previous_value
eligibility *= eli_lambda
eligibility = eligibility + previous_state_hrr
weights = np.add(weights, (alpha * logmod(sarsa_error) * eligibility))
atr_values[current_atr] += atr_alpha * logmod(sarsa_error)
if dynamic_threshold:
threshold += threshold_alpha * logmod(sarsa_error)
if create_plots:
total_error += [sarsa_error]
norm_error += [sarsa_error]
if sarsa_error > fabs(threshold) or sarsa_error < -fabs(threshold):
if np.mean(atr_values) < atr_threshold:
num_of_atrs, atr_values, threshold, hrr_length, ltm, weights, eligibility = reset(num_of_atrs, atr_values, threshold, hrr_length, ltm, weights, eligibility)
if create_plots:
switch_error += [sarsa_error]
if create_plots:
if testing and sarsa_error > fabs(threshold):
pos_err_store += [sarsa_error]
elif testing and sarsa_error < -fabs(threshold):
neg_err_store += [sarsa_error]
if sarsa_error > fabs(threshold):
current_atr = context_policy_positive(current_wm, current_signal, current_state, current_atr)
elif sarsa_error < -fabs(threshold):
current_atr = context_policy_negative(previous_atr)
eligibility *= 0.0
if changed:
steps = 0
start = current_state
opt_steps = get_opt_steps(current_state, goal, size_of_maze)
if(debug):
print("Changed atr from {0} to {1}".format(previous_atr, current_atr))
if debug:
input("")
if testing:
if current_state == goal:
step_store += [steps - opt_steps]
else:
step_store += [steps_till_quit]
# update_progress(x / episodes, x)
if live_graph:
plt.pause(0.001)
#update_progress(1, episodes)
# In[ ]:
# end_time = time.time()
# print("Total time: {0} minutes".format((end_time - start_time)/60))
# In[ ]:
# plot_graph(step_store)
accuracy = (len(step_store)-np.count_nonzero(step_store))*100.0 / len(step_store)
print(accuracy)
# In[ ]:
# plot_all_graphs()
# In[ ]:
if create_plots:
plot_graph(pos_err_store)
# In[ ]:
if create_plots:
plot_graph(neg_err_store)
# In[ ]:
if create_plots:
plot_graph(total_error)
# In[ ]:
if create_plots:
plot_graph(total_goal_error)
# In[ ]:
if create_plots:
plt.plot(switch_error)
# In[ ]:
if create_plots:
plot_graph(norm_error)
# In[ ]:
# threshold
# In[ ]:
# print(atr_values)
# In[ ]:
# plot_graph(threshold_vals)
# In[ ]:
# hrr_length
# In[ ]:
# ltm.count()
# In[ ]:
# seed_val
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
|
[
"nnibraas@gmail.com"
] |
nnibraas@gmail.com
|
7bd10c7e26dfc3ac4983378e19d0182b65ee9f16
|
05f1f11f7cf6b06156ffdf4e53cda02a112340fc
|
/ConnectionClass.py
|
ac30d6ca2054315f2393b737e80df50e20f8d3f2
|
[] |
no_license
|
Poscowekh/akinator_server
|
4019bd97594f4951ad8578ea45b5511645575e9c
|
6f5780441ad04d82bb2b60a5c708dbb728cb07a0
|
refs/heads/main
| 2023-08-31T21:28:29.919794
| 2021-09-19T21:14:51
| 2021-09-19T21:14:51
| 407,912,569
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 23,856
|
py
|
from sqlite3 import Connection as sqlConnection, OperationalError, IntegrityError, \
Cursor, Error as SQLError, connect as sql_connect, Row
from VersionClass import Version
from Stats import StatsManager
from MyError import MyError, MyErrorType
from enum import IntEnum
from file_management import PathCreator, FileManager, dirname
from file_skeleton import Layouts
from pandas import read_sql_query
class Connection(sqlConnection):
class Type(IntEnum):
server = 0
client = 1
game = 2
@staticmethod
def CT_to_ST(connection_type: Type) -> StatsManager.Type:
if connection_type == Connection.Type.server:
return StatsManager.Type.version_server
elif connection_type == Connection.Type.client:
return StatsManager.Type.version_server
class Error(MyError):
class Type(MyErrorType):
sql_error = "SQLite3 error"
stats_error = "Stats file manager error"
table_error = "SQLite table error"
arguments_error = "SQLite3 could not execute query with this set of arguments"
missing_args_error = "Missing required arguments to execute query"
def __init__(self, error_type: Type, stats_error: StatsManager.Error = None,
sql_error = None, sql_query: str = None, values: list = None,
add_info: dict = None):
message = str()
info = dict()
if stats_error is not None:
message = stats_error.message
info = stats_error.info
elif sql_error is not None:
message = sql_error.__cause__ or sql_error.__context__
info = sql_error.__dict__
# additional info
if sql_query is not None:
info["sql_query"] = sql_query
if values is not None:
info["values"] = values
if add_info is not None:
info = dict(add_info, **info)
MyError.__init__(self, error_type, message, info)
def __str(self) -> str:
return MyError.__str__(self)
max_id: int = 1000000
latest_id: int = 0
def __create_table__(self, table: str):
self.execute(f"CREATE TABLE IF NOT EXISTS {table}")
def __create_tables__(self):
self.begin_transaction()
if self.connection_type is self.Type.server or self.connection_type is self.Type.client:
if self.connection_type is self.Type.server:
self.__create_table__(Layouts.Table.entities_server.value)
else:
self.__create_table__(Layouts.Table.entities_client.value)
self.__create_table__(Layouts.Table.questions.value)
self.__create_table__(Layouts.Table.answers.value)
else:
self.__create_table__(Layouts.Table.entities_game(self.id))
self.__create_table__(Layouts.Table.questions_game(self.id))
self.__create_table__(Layouts.Table.answers_game(self.id))
self.commit()
def __init__(self, connection_type: Type, theme: str, version: Version, path: str = None):
self.connection_type = connection_type
if self.connection_type is self.Type.game:
if self.latest_id >= self.max_id:
self.latest_id = 0
self.id = self.latest_id
self.latest_id += 1
else:
# server and client db connections do not have ids
self.id = None
if path is None:
if connection_type != self.Type.game:
path = PathCreator.db(theme, version)
FileManager.makedir(dirname(path))
else:
path = ":memory:"
self.path = path
sqlConnection.__init__(self, self.path, timeout=30)
self.theme = theme
self.version = version
self.connection_type = connection_type
self.parent_db = None
self.stats = StatsManager(self.CT_to_ST(connection_type), theme, version, self.id)
if not self.stats.data:
self.stats.data = {}
self.stats.write_data()
self.__create_tables__()
if self.connection_type == self.Type.game:
self.parent_db = sql_connect(PathCreator.db(theme, version))
self.__create_game_tables__()
#self.row_factory = Row
def __create_game_tables__(self):
entities = self.parent_db.execute("SELECT id, base_rating from entities").fetchall()
questions = self.parent_db.execute("SELECT id FROM questions").fetchall()
answers = self.parent_db.execute("SELECT entity_id, question_id, answer_value FROM answers").fetchall()
self.executemany(f"INSERT INTO entities_{self.id} (id, rating, used) VALUES(?, ?, 0)", entities)
self.executemany(f"INSERT INTO questions_{self.id} (id, rating, used) VALUES(?, 0.0, 0)", questions)
self.executemany(f"INSERT INTO answers_{self.id} (entity_id, question_id, answer_value) "
"VALUES(?, ?, ?)", answers)
def begin_transaction(self):
self.execute("BEGIN TRANSACTION")
def __select__(self, table: str, columns: list = None, condition: str = None, raw_string: str = None) -> Cursor:
query = str()
if columns is None:
query = "SELECT * FROM "
else:
query = f"SELECT {', '.join(columns)} FROM "
if self.connection_type != self.Type.game:
query += table
else:
query += f"{table}_{self.id}"
if condition is not None:
query += f" WHERE {condition}"
cursor = self.cursor()
try:
if raw_string is not None:
cursor = self.execute(query, (raw_string,))
else:
cursor = self.execute(query)
except OperationalError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query)
except SQLError as e:
raise self.Error(self.Error.Type.sql_error, sql_error=e, sql_query=query)
else:
return cursor
def get_entities(self, columns: list = None, condition: str = None) -> Cursor:
return self.__select__("entities", columns, condition)
def get_questions(self, columns: list = None, condition: str = None) -> Cursor:
return self.__select__("questions", columns, condition)
def get_answers(self, columns: list = None, condition: str = None) -> Cursor:
return self.__select__("answers", columns, condition)
def update_whole_column(self, table: str, columns: list, values: list):
row_count = self.stats.data[f"{table}_count"]
if not isinstance(columns[0], str) or len(columns) != len(values[0] or len(values) != row_count):
raise self.Error(self.Error.Type.missing_args_error,
add_info={"table": table, "columns": columns, "values_example": values[0]})
query = f"UPDATE {table} SET {', '.join(columns)} WHERE id=?"
values_ = list()
for value, id in zip(values, range(1, row_count + 1)):
values_.append(value + (id,))
self.begin_transaction()
try:
self.executemany(query, values_)
except OperationalError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=values)
except IntegrityError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=values)
except SQLError as e:
raise self.Error(self.Error.Type.sql_error, sql_error=e, sql_query=query, values=values)
else:
self.commit()
def __insert__(self, table: str, columns: list, values: tuple, auto_commit: bool = True):
if len(columns) != len(values):
raise self.Error(self.Error.Type.arguments_error)
if not isinstance(values, tuple):
values = ((value,) for value in values)
query = str()
if self.connection_type != self.Type.game:
query = f"INSERT INTO {table}"
else:
query = f"INSERT INTO {table}_{self.id}"
query += f"({', '.join(columns)}) VALUES({'?, ' * (len(columns) - 1)}?)"
try:
self.execute(query, values)
except IntegrityError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=[values])
except OperationalError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=[values])
except SQLError as e:
raise self.Error(self.Error.Type.sql_error, sql_error=e, sql_query=query, values=[values])
else:
if auto_commit:
self.commit()
def insert_entity(self,
# game table values:
rating: float = None, used: bool = None,
# server and client table values:
name: str = None, base_rating: float = None, description: str = None,
# server table value:
popularity: int = None,
# True to commit immediately
auto_commit: bool = True
):
if self.connection_type.value <= 1: # server or client connection
if base_rating is None or description is None or name is None:
raise TypeError()
if self.connection_type.server:
if popularity is None:
raise TypeError()
self.__insert__("entities",
["name", "base_rating", "description", "popularity"],
(name, base_rating, description, popularity))
else:
self.__insert__("entities",
["name", "base_rating", "description"],
(name, base_rating, description))
else:
if rating is None or used is None:
raise TypeError()
self.__insert__("entities",
["rating", "used"],
(rating, used))
self.stats.data["entity_count"] += 1
self.stats.write_data()
if auto_commit:
self.commit()
def insert_question(self, text: str = None, rating: float = None, used: bool = None, auto_commit: bool = True):
if self.connection_type == self.Type.game:
if rating is None or used is None:
raise TypeError()
self.__insert__("questions", ["rating", "used"], (rating, used))
else:
if text is None:
raise TypeError()
self.__insert__("questions", ["text"], (text,))
self.stats.data["question_count"] += 1
self.stats.write_data()
if auto_commit:
self.commit()
def insert_answer(self, entity_id: int, question_id: int, answer_value: float, auto_commit: bool = True):
self.__insert__("answers",
["entity_id", "question_id", "answer_value"],
(entity_id, question_id, answer_value))
#self.stats.data["answer_count"] += 1
#self.stats.write_data()
if auto_commit:
self.commit()
def __insertmany__(self, table: str, columns: list, values: list):
if len(columns) != len(values[0]):
raise self.Error(self.Error.Type.missing_args_error,
add_info={"table": table, "columns": columns, "values_example": values[0]})
if not isinstance(values[0], tuple):
values = ((value,) for value in values)
query = str()
if self.connection_type != self.Type.game:
query = f"INSERT INTO {table}"
else:
query = f"INSERT INTO {table}_{self.id}"
query += f"({', '.join(columns)}) VALUES({'?, ' * (len(columns) - 1)}?)"
self.begin_transaction()
try:
self.executemany(query, values)
except IntegrityError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=values)
except OperationalError as e:
raise self.Error(self.Error.Type.arguments_error, sql_error=e, sql_query=query, values=values)
except SQLError as e:
raise self.Error(self.Error.Type.sql_error, sql_error=e, sql_query=query, values=values)
self.commit()
def insertmany_entities(self, values: list):
if not values:
return
if (self.connection_type is self.Type.server and len(values[0]) != 4) or \
(self.connection_type is self.Type.client and len(values[0]) != 3) or \
(self.connection_type is self.Type.game and len(values[0]) != 3):
raise self.Error(self.Error.Type.missing_args_error, values=values)
if self.connection_type == self.Type.server:
if not isinstance(values[0][0], str) or not isinstance(values[0][1], float) or \
not isinstance(values[0][2], str) or not isinstance(values[0][3], int):
raise TypeError()
self.__insertmany__("entities", ["name", "base_rating", "description", "popularity"], values)
elif self.connection_type == self.Type.client:
if not isinstance(values[0][0], str) or not isinstance(values[0][1], float) or \
not isinstance(values[0][2], str):
raise TypeError()
self.__insertmany__("entities", ["name", "base_rating", "description"], values)
else:
if not isinstance(values[0][0], str) or not isinstance(values[0][1], float) or \
not isinstance(values[0][2], str):
raise TypeError()
self.__insertmany__("entities", ["name", "base_rating", "description"], values)
self.stats.data["entities_count"] += len(values)
self.stats.write_data()
def insertmany_questions(self, values: list):
if not values:
return
if (self.connection_type.value <= 1 and len(values[0]) != 1) or \
(self.connection_type == self.Type.game and len(values[0]) != 2):
raise self.Error(self.Error.Type.missing_args_error, values=values)
if self.connection_type.value <= 1:
if isinstance(values[0][0], str):
self.__insertmany__("questions", ["text"], values)
else:
raise TypeError()
else:
if not isinstance(values[0], int) or not isinstance(values[1], bool):
raise TypeError()
self.__insertmany__("questions", ["rating", "used"], values)
self.stats.data["questions_count"] += len(values)
self.stats.write_data()
def insertmany_answers(self, values: list):
if not values:
return
if len(values[0]) != 3:
raise self.Error(self.Error.Type.missing_args_error, values=values)
if not isinstance(values[0][1], int) or not isinstance(values[0][1], int) or not isinstance(values[0][2], float):
raise TypeError()
self.__insertmany__("answers", ["entity_id", "question_id", "answer_value"], values)
self.stats.data["answers_count"] += len(values)
self.stats.write_data()
def entities_answering_question(self, question_id: int) -> Cursor:
if self.connection_type == self.Type.game:
return self.execute("SELECT tmp.entity_id, tmp.answer_value, e.rating "
f"FROM (SELECT entity_id, answer_value FROM answers_{self.id} WHERE question_id={question_id}) tmp "
f"JOIN entities_{self.id} e ON tmp.entity_id == e.id")
else:
return self.execute("SELECT tmp.entity_id, tmp.answer_value "
f"FROM (SELECT entity_id, answer_value FROM answers WHERE question_id={question_id}) tmp "
f"JOIN entities ON tmp.entity_id == entities.id")
def entities_answering_many_questions(self, question_ids: list) -> list:
answers_subquery = "SELECT entity_id, question_id, answer_value " \
f"FROM answers_{self.id} " \
f"WHERE question_id IN ({', '.join(question_ids)}) "# \
#"ORDER BY entity_id"
query = "SELECT e.id AS entity_id, e.rating AS rating, " \
"a.question_id AS question_id, a.answer_value AS answer_value "\
f"FROM ({answers_subquery}) a "\
f"INNER JOIN entities_{self.id} e ON a.entity_id == e.id "\
"WHERE e.used==0 ORDER BY a.entity_id"
cursor: Cursor = self.execute(query)
cursor.row_factory = Row
# list[tuple[int, float, list[tuple[int, float]]]]
# as list of entities with lists of their answers
result = list()
# tuple[int, float, list[tuple[int, float]]
result_item = tuple()
current_entity_id = -1
for row in cursor:
if current_entity_id == -1:
current_entity_id = row["entity_id"]
result_item = (current_entity_id, row["rating"], list())
elif current_entity_id != row["entity_id"]:
result.append(result_item)
current_entity_id = row["entity_id"]
result_item = (current_entity_id, row["rating"], list())
# tuple[int, float]
answer_item = (row["question_id"], row["answer_value"])
result_item[2].append(answer_item)
return result
def question_ratings(self, threshold: float) -> Cursor:
used_entities = f"SELECT id AS entity_id FROM entities_{self.id} WHERE used=1 AND rating<{threshold}"
question_answers_count = f"SELECT question_id, COUNT(question_id) AS count FROM answers_{self.id} " \
f"WHERE entity_id NOT IN ({used_entities}) " \
"GROUP BY question_id HAVING count>=1"
unused_questions = f"SELECT id AS question_id FROM questions_{self.id} WHERE used=0"
return self.execute(f"SELECT q.question_id AS question_id FROM ({unused_questions}) q "
f"JOIN ({question_answers_count}) c "
"ON q.question_id==c.question_id "
"ORDER BY c.count DESC")
def entity_ratings(self, threshold: float) -> Cursor:
return self.execute(f"SELECT id, rating FROM entities_{self.id} WHERE used=0 AND rating>={threshold} "
f"ORDER BY rating DESC")
def entity_get_name(self, id: int) -> str:
return self.parent_db.execute(f"SELECT name FROM entities WHERE id={id}").fetchone()[0]
def question_get_text(self, id: int) -> str:
return self.parent_db.execute(f"SELECT text FROM questions WHERE id={id}").fetchone()[0]
def entity_set_used(self, id: int):
self.execute(f"UPDATE entities_{self.id} SET used=1 WHERE id=={id}")
def question_set_used(self, id: int):
self.execute(f"UPDATE questions_{self.id} SET used=1 WHERE id=={id}")
def __updatemany__(self, table: str, id_name: str, columns: list, values: list):
table_ = table
if self.connection_type == Connection.Type.game:
table_ = f"{table}_{self.id}"
query = f"UPDATE {table_} SET {'=?, '.join(columns) + '=?'} WHERE {id_name}=?"
self.executemany(query, values)
def update_entity_ratings(self, values: list): # values = list[..., int]
self.__updatemany__("entities", "id", ["rating"], values)
def entity_min_max_rating(self) -> tuple: #tuple[float, float]
return self.get_entities(["MAX(rating)", "MIN(rating)"], "used=0 AND rating>-10000.0").fetchone()
def clear(self):
if self.connection_type == Connection.Type.game:
self.execute(f"DROP TABLE entities_{self.id}")
self.execute(f"DROP TABLE questions_{self.id}")
self.execute(f"DROP TABLE answers_{self.id}")
self.stats.data = Layouts.Stats.Game.template_record.value
else:
self.execute(f"DROP TABLE entities")
self.execute(f"DROP TABLE questions")
self.execute(f"DROP TABLE answers")
if self.connection_type == Connection.Type.server:
self.stats.data = Layouts.Stats.Version.template_server.value
else:
self.stats.data = Layouts.Stats.Version.template_client.value
self.stats.write_data()
def __search_string__(self, table: str, column: str, string: str = None) -> Cursor:
return self.__select__(table, [column], f"{column} LIKE (?)", f'%{string}%')
def search_name(self, name: str) -> list:
return self.__search_string__("entities", "name", name).fetchall()
def search_text(self, text: str) -> list:
return self.__search_string__("questions", "text", text).fetchall()
def __str__(self) -> str:
if self.connection_type == self.Type.game:
return f"ENTITIES:\n{read_sql_query(f'SELECT * FROM entities_{self.id}', self)}\n\n" \
f"QUESTIONS:\n{read_sql_query(f'SELECT * FROM questions_{self.id}', self)}\n\n" \
f"ANSWERS:\n{read_sql_query(f'SELECT * FROM answers_{self.id}', self)}\n"
else:
return f"ENTITIES:\n{read_sql_query('SELECT * FROM entities', self)}\n\n" \
f"QUESTIONS:\n{read_sql_query('SELECT * FROM questions', self)}\n\n" \
f"ANSWERS:\n{read_sql_query('SELECT * FROM answers', self)}\n"
def entity_count(self) -> int:
return self.get_entities(["id"]).rowcount
def question_count(self) -> int:
return self.get_questions(["id"]).rowcount
def answer_count(self) -> int:
return self.get_answers(["entity_id"]).rowcount
def update_base_ratings(self, ids_and_popularity_changes: list):
query = "UPDATE entities " \
f"SET popularity=popularity+?, base_rating=popularity/{self.entity_count()} " \
"WHERE id==?"
self.executemany(query, ids_and_popularity_changes)
def update_answers(self, ids_and_values: list):
self.create_function("ANSWER_VALUE_CHANGE", 2, Connection.answer_change)
query = "UPDATE answers " \
"SET answer_value=ANSWER_VALUE_CHANGE(answer_value, ?) " \
"WHERE entity_id==? AND question_id==?"
self.executemany(query, ids_and_values)
@staticmethod
def answer_change(old_value: float, average_given_value: float) -> float:
"""
def sign(a: float) -> float:
return 1.0 if a >= 0.0 else -1.0
return sign(old_value - average_given_value) * abs(abs(old_value)- abs(average_given_value))
"""
# reverted to just mean of those two
return (old_value + average_given_value) / 2
def insert_new_entity(self, name: str, desc: str) -> int:
cur = self.cursor()
cur.execute("INSERT INTO entities(name, description, base_rating, popularity) "
"VALUES(?, ?, 0.0, 0)", (name, desc))
id = cur.lastrowid + 1
cur.close()
return id
def insert_new_question(self, text: tuple) -> int:
cur = self.cursor()
cur.execute("INSERT INTO questions(text) VALUES(?)", text)
id = cur.lastrowid
cur.close()
return id
def connect(connection_type: Connection.Type, theme: str, version: Version, path: str = None) -> Connection :
return Connection(connection_type, theme, version, path)
|
[
"56505043+Poscowekh@users.noreply.github.com"
] |
56505043+Poscowekh@users.noreply.github.com
|
114dd608958e7d62cf6e09134d63e1b8fe9d7cb4
|
0ae94ec1eabb14287ba91ba503ecb33162bb2431
|
/Source Code/diffie_hellman.py
|
d17acf9cdbe16ae8a398ab4e92530861b7366632
|
[] |
no_license
|
Lavanya3095/Chat-Application
|
7b46850fd9dd33908332086697def79d3816b78c
|
f7b31af5005295531f42b655f896a5e7449d73f2
|
refs/heads/master
| 2020-04-17T13:10:35.643310
| 2019-01-19T23:38:11
| 2019-01-19T23:38:11
| 166,604,413
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,271
|
py
|
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import dh
from binascii import hexlify
from dss import DSS_sign
from Crypto.PublicKey import DSA
from Crypto.Signature import DSS
from Crypto.Hash import SHA256
import random
a_private_key = ""
b_private_key = ""
a_peer_public_key = ""
b_peer_public_key = ""
perm = []
def rand(start, end, num):
for j in range(num):
perm.append(random.randint(start, end))
def DSS_sign(c):
key = DSA.generate(2048)
global public_key
public_key = key.publickey()
if c == "a":
hash_obj = SHA256.new(a_peer_public_key)
else:
hash_obj = SHA256.new(b_peer_public_key)
signer = DSS.new(key, 'fips-186-3')
signature = signer.sign(hash_obj)
global s
s = signer.sign(hash_obj)
return hash_obj.hexdigest(), signature
def DSS_verify(c, h, signature):
if c == "a":
hash_obj = SHA256.new(a_peer_public_key)
else:
hash_obj = SHA256.new(b_peer_public_key)
pkey = DSS.new(public_key, 'fips-186-3')
signature = s
if h == hash_obj.hexdigest():
try:
pkey.verify(hash_obj, signature)
valid = True
except ValueError:
valid = False
return valid
def diffie_alice_public():
parameters = dh.generate_parameters(generator=2, key_size=512, backend=default_backend())
global a_private_key
a_private_key = parameters.generate_private_key()
global ap
ap = a_private_key
a_peer_public_key = a_private_key.public_key()
global a
a = a_peer_public_key
hash_value, alice_sign = DSS_sign("a")
return hash_value, alice_sign
def shared_key():
a_shared_key = ap.exchange(b)
key = hexlify(a_shared_key).decode()
final_key = ""
for i in perm:
final_key += key[i]
return final_key
def diffie_bob_public():
parameters = dh.generate_parameters(generator=2, key_size=512, backend=default_backend())
global b_private_key
b_private_key = parameters.generate_private_key()
global bp
bp = b_private_key
b_peer_public_key = b_private_key.public_key()
global b
b = b_peer_public_key
hash_value, bob_sign = DSS_sign("b")
return hash_value, bob_sign
|
[
"lavanya.saravanan-1@ou.edu"
] |
lavanya.saravanan-1@ou.edu
|
2e48a34ab2fbfe035ed2d2a14b9a8cbd208a58ff
|
b423d7b4b2a16c450ae54dce1bc39feccb13ea1e
|
/util.py
|
a15c9ee0292a170bff40b723a43e76d703b5bb8b
|
[
"MIT"
] |
permissive
|
abangadji/supreme-goggles
|
dfede4fc207508e0791cd226c9602aa7d7b25322
|
f1f1e8e1c4c98740ed336bcb0b88de851e3e04f6
|
refs/heads/master
| 2020-04-09T22:22:48.318854
| 2018-04-19T17:57:26
| 2018-04-19T17:57:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 695
|
py
|
import io
import datetime
import csv
def destringify(field):
return datetime.datetime.strptime(field, "%Y-%m-%d %H:%M:%S")
FIELDS = ['entry_time',
'exit_time',
]
FIELD_PARSER = {
'entry_time': destringify,
"exit_time": destringify,
}
def recordsFromStream(istrm, fields=FIELDS, parsers=FIELD_PARSER):
reader = csv.DictReader(istrm)
for raw in reader:
tmp = {}
for fld in fields:
p = parsers[fld]
tmp[fld] = p(raw[fld])
yield tmp
def recordsfromfile(path, fields=FIELDS, parsers=FIELD_PARSER):
with open(path, newline='') as ifil:
return recordsFromStream(ifil, fields, parsers)
|
[
"smith.h.mark@gmail.com"
] |
smith.h.mark@gmail.com
|
0b810b91af1b5b078f4cd4ef2ba490a904eef4d5
|
3c1a308cff6716e16c4356c48d5610185f3dc4f3
|
/media.py
|
8449cc6dfff788916d928a3db7007658bd22bed4
|
[] |
no_license
|
colecode-ph/movie-website
|
451a1612efe225f974c30e09473ce47ca39748fb
|
238666c6fd7cc40c6b7e6704df139df792c07e28
|
refs/heads/master
| 2021-01-20T16:51:10.721133
| 2017-05-13T05:57:02
| 2017-05-13T05:57:02
| 90,853,092
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 539
|
py
|
class Movie():
"""This class provides a way to store movie related information"""
def __init__(self, movie_title, movie_year, movie_storyline, poster_image,
trailer_youtube):
"""This function initializes the class instance, and defines the
variables which contain the movie related information"""
self.title = movie_title
self.year = movie_year
self.storyline = movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube
|
[
"colecode.ph@gmail.com"
] |
colecode.ph@gmail.com
|
6743774b471e2ba7263b7e873aa1520a2b2e5ade
|
a9d7b1469a8e4190867202c259b7ee229634ce2f
|
/Project1.py
|
fe9df5509891f3089d6373436e5f3aa9949139ff
|
[
"MIT"
] |
permissive
|
varnaugj/Python-Early-Codes
|
e1642a2ed5b56324308845f2ec843393e075cde1
|
3b659529c65dc608eaf41ec5d5ffaa4c18704946
|
refs/heads/main
| 2023-05-25T18:06:20.923259
| 2021-06-10T13:43:32
| 2021-06-10T13:43:32
| 363,928,127
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 622
|
py
|
from tkinter import *
import tkinter as tk
window = tk.Tk()
greeting = tk.Label(text="Test 123")
greeting.pack()
def returnEntry(arg=None):
name = myEntry.get()
resultLabel.config(text= name)
myEntry.delete(0,END)
myEntry = tk.Entry(width=20)
myEntry.focus()
myEntry.bind("<Return>", returnEntry)
myEntry.pack()
button = tk.Button(text="Yeet me!", command=returnEntry, width=25, height=5)
button.pack(fill=X)
resultLabel = tk.Label(text="")
resultLabel.pack(fill=X)
window.mainloop()
#val = input("Enter Text here: ")
#print(val)
#entry = tk.Entry(fg="black",bg="white", width=50)
#entry.pack()
|
[
"varnaugj@yahoo.com"
] |
varnaugj@yahoo.com
|
3d4646d023c36f73f1d72beadfd4780bb00456fb
|
d03b64f4b87438fc6205671bcb43dca462fdf8d1
|
/1.Behavior-FitModels/Functions/costFunctions.py
|
3c5144c8efb959c3c54d0e972502fb19038fd731
|
[
"MIT"
] |
permissive
|
jeroenvanbaar/MoralStrategiesFMRI
|
c0489ec45fa2f829c00951a07f5f647446bf0b17
|
a96ebe7e65162ab488977a93f5a0ef23d94eb34a
|
refs/heads/master
| 2022-10-29T14:18:44.777700
| 2022-09-29T15:06:42
| 2022-09-29T15:06:42
| 170,903,915
| 22
| 12
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,630
|
py
|
import numpy as np
import pandas as pd
import choiceModels
def MP_costfun(param,subDat,printStep=False,printPredictions=False,resid_share=False):
theta = param[0]
phi = param[1]
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.MP_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'exp'],
theta, phi)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print 'theta = %.2f, phi = %.2f, SSE = %.2f'%(theta,phi,SSE)
if printPredictions == True:
print subDat
return residuals
def MP_ppSOE_costfun(param,subDat,printStep=False,printPredictions=False,resid_share=False):
theta = param[0]
phi = param[1]
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.MP_model_ppSOE(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'exp'],
theta, phi)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print 'theta = %.2f, phi = %.2f, SSE = %.2f'%(theta,phi,SSE)
if printPredictions == True:
print subDat
return residuals
def IA_costfun(theta,subDat,printStep=False,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.IA_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
theta)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print 'theta = %.2f, SSE = %.2f'%(theta,SSE)
if printPredictions == True:
print subDat
return residuals
def GA_costfun(theta,subDat,printStep=False,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.GA_model(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'exp'],
theta)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print 'theta = %.2f, SSE = %.2f'%(theta,SSE)
if printPredictions == True:
print subDat
return residuals
def GA_ppSOE_costfun(theta,subDat,printStep=False,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.GA_model_ppSOE(
subDat.loc[trial,'inv'],
subDat.loc[trial,'mult'],
subDat.loc[trial,'exp'],
theta)
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printStep==True:
print 'theta = %.2f, SSE = %.2f'%(theta,SSE)
if printPredictions == True:
print subDat
return residuals
def GR_costfun(subDat,printPredictions=False,resid_share=False):
for trial in range(subDat.shape[0]):
subDat.loc[trial,'prediction'] = choiceModels.GR_model()
if resid_share == True:
residuals = (subDat.loc[:,'ret'] - subDat.loc[:,'prediction'])/(subDat.loc[:,'inv'] * subDat.loc[:,'mult'])
else:
residuals = subDat.loc[:,'ret'] - subDat.loc[:,'prediction']
residuals = residuals.astype('float')
SSE = np.sum(np.square(residuals))
if printPredictions == True:
print subDat
return residuals
|
[
"noreply@github.com"
] |
jeroenvanbaar.noreply@github.com
|
2befbe1e545810905b628aa8c59246e1055cad44
|
10465a8e4771b221fb1f4a92b804f8f2cf031edf
|
/server/restore_hardware_db.py
|
f3aa65a03df630794080f982d50e15ce61dd6ef8
|
[
"MIT"
] |
permissive
|
raspihats/EvePnP
|
fd51ba35ebef2838e87108b617b873254863adc3
|
2961d759554c38609f0f431c012ea8d38016f347
|
refs/heads/master
| 2023-01-24T21:17:42.789363
| 2019-06-18T11:46:57
| 2019-06-18T11:46:57
| 164,684,352
| 0
| 0
|
MIT
| 2023-01-14T00:16:23
| 2019-01-08T15:59:56
|
Python
|
UTF-8
|
Python
| false
| false
| 23,827
|
py
|
from tinydb import TinyDB, Query
from tinydb.storages import JSONStorage
from tinydb.middlewares import CachingMiddleware
db = TinyDB("db_hardware.json", storage=CachingMiddleware(JSONStorage))
db.purge()
axis_table = db.table("axis")
axis_table.purge()
axis_table.insert_multiple([
{
"id": "x",
"limit": 450,
"feed_rate": 50000,
"acc": 1500,
"park": 5
},
{
"id": "y",
"limit": 400,
"feed_rate": 50000,
"acc": 1500,
"park": 5
},
{
"id": "z",
"limit": 120,
"feed_rate": 50000,
"acc": 1500,
"park": 59
},
{
"id": "a",
"limit": 360,
"feed_rate": 50000,
"acc": 1500,
"park": 0
},
{
"id": "b",
"limit": 360,
"feed_rate": 50000,
"acc": 1500,
"park": 0
}
])
controllers_table = db.table("controllers")
controllers_table.purge()
controllers_table.insert_multiple([
{
"id": "Mc1",
"driver": "grbl",
"type": "motion",
"port": {
"name": "/dev/ttyAMA0",
"baudrate": 115200,
"bytesize": 8,
"parity": "N",
"stopbits": 1
},
},
{
"id": "Io1",
"driver": "raspihats.i2c_hats.DQ10rly",
"type": "input_output",
"adr": 0x50
}
])
actuators_table = db.table("actuators")
actuators_table.purge()
actuators_table.insert_multiple([
{
"id": "VacuumPump",
"type": "ToggleActuator",
"initial_value": 0,
"code": """
def set(value):
controllers['Mc1'].spindle_duty = value * 100
def get():
return 1 if controllers['Mc1'].spindle_duty else 0
"""
},
{
"id": "Valve1",
"type": "ToggleActuator",
"initial_value": 0,
"code": """
def set(value):
controllers['Mc1'].coolant_flood = value
def get():
controllers['Mc1'].coolant_flood
"""
},
{
"id": "Valve2",
"type": "ToggleActuator",
"initial_value": 0,
"code": """
def set(value):
controllers['Mc1'].coolant_mist = value
def get():
controllers['Mc1'].coolant_mist
"""
}
])
cameras_table = db.table("cameras")
cameras_table.purge()
cameras_table.insert_multiple([
{
"id": "Cam1",
"description": "Up looking camera"
},
{
"id": "Cam2",
"description": "Down looking camera",
}
])
phead_one_code = """
def move(point, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_xy_config = controllers[motion_controller_xy_id].config['axis']
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park pick n place axis before moving x or y
if 'x' in point or 'y' in point:
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
# move x,y
position = {}
if 'x' in point:
position[x_axis_id] = point['x'] + offset['x']
if 'y' in point:
position[y_axis_id] = point['y'] + offset['y']
if position:
controllers[motion_controller_xy_id].move(position, feed_rate)
# raise/descent and rotate nozzle
position = {}
if 'z' in point:
position[z_axis_id] = point['z']
if 'r' in point:
position[r_axis_id] = point['r']
if position:
controllers[motion_controller_zr_id].move(position, feed_rate)
def get_position():
position_xy = controllers[motion_controller_xy_id].position
if motion_controller_xy_id == motion_controller_zr_id:
position_zr = position_xy
else:
position_zr = controllers[motion_controller_zr_id].position
return {
'x' : position_xy[x_axis_id],
'y' : position_xy[y_axis_id],
'z' : position_zr[z_axis_id],
'r' : position_zr[r_axis_id]
}
def jog(axis, step, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_map = {
'x' : x_axis_id,
'y' : y_axis_id,
'z' : z_axis_id,
'r' : r_axis_id
}
if axis == 'x' or axis == 'y':
controllers[motion_controller_xy_id].jog(axis_map[axis], step, feed_rate)
if axis == 'z' or axis == 'r':
controllers[motion_controller_zr_id].jog(axis_map[axis], step, feed_rate)
def park(axis_list, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_xy_config = controllers[motion_controller_xy_id].config['axis']
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park z and r axis
park_position = {}
# park z even if x, y parking is desired
if 'x' in axis_list or 'y' in axis_list or 'z' in axis_list:
park_position[z_axis_id] = axis_zr_config[z_axis_id]['park']
if 'r' in axis_list:
park_position[r_axis_id] = axis_zr_config[r_axis_id]['park']
if park_position:
controllers[motion_controller_zr_id].move(park_position, feed_rate)
# park x and y axis
park_position = {}
if 'x' in axis_list:
park_position[x_axis_id] = axis_xy_config[x_axis_id]['park']
if 'y' in axis_list:
park_position[y_axis_id] = axis_xy_config[y_axis_id]['park']
if park_position:
controllers[motion_controller_xy_id].move(park_position, feed_rate)
def pick(point, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park z and axis
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park'],
}, feed_rate)
# move to pick point on x and y axis and park rotation axis
point_xy = {
x_axis_id: point['x'],
y_axis_id: point['y']
}
if motion_controller_xy_id == motion_controller_zr_id:
# also park raxis if z and r axis use same motion controller as x and y
point_xy[r_axis_id] = axis_zr_config[r_axis_id]['park']
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
else:
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
# rotate nozzle
controllers[motion_controller_zr_id].move({
r_axis_id: axis_zr_config[r_axis_id]['park']
}, feed_rate)
# lower nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: point['z']
}, feed_rate)
# enable vacuum
actuators[vacuum_actuator_id].set(True)
# raise nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
def place(point, rotation, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# move to place point on x and y axis
point_xy = {
x_axis_id: point['x'],
y_axis_id: point['y']
}
if motion_controller_xy_id == motion_controller_zr_id:
# also rotate if z and r axis use same motion controller
point_xy[r_axis_id] = rotation
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
else:
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
# rotate nozzle
controllers[motion_controller_zr_id].move({
r_axis_id: rotation
}, feed_rate)
# lower nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: point['z']
}, feed_rate)
# disable vacuum
actuators[vacuum_actuator_id].set(False)
# raise nozzle, park rotation axis
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
"""
phead_two_code = """
def move(point, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_xy_config = controllers[motion_controller_xy_id].config['axis']
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park pick n place axis before moving x or y
if 'x' in point or 'y' in point:
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
# move x,y
position = {}
if 'x' in point:
position[x_axis_id] = point['x'] + offset['x']
if 'y' in point:
position[y_axis_id] = point['y'] + offset['y']
if position:
controllers[motion_controller_xy_id].move(position, feed_rate)
# raise/descent and rotate nozzle
position = {}
if 'z' in point:
position[z_axis_id] = 118 - point['z']
if 'r' in point:
position[r_axis_id] = point['r']
if position:
controllers[motion_controller_zr_id].move(position, feed_rate)
def get_position():
position_xy = controllers[motion_controller_xy_id].position
if motion_controller_xy_id == motion_controller_zr_id:
position_zr = position_xy
else:
position_zr = controllers[motion_controller_zr_id].position
return {
'x' : position_xy[x_axis_id],
'y' : position_xy[y_axis_id],
'z' : 118 - position_zr[z_axis_id],
'r' : position_zr[r_axis_id]
}
def jog(axis, step, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_map = {
'x' : x_axis_id,
'y' : y_axis_id,
'z' : z_axis_id,
'r' : r_axis_id
}
if axis == 'z':
step = -step
if axis == 'x' or axis == 'y':
controllers[motion_controller_xy_id].jog(axis_map[axis], step, feed_rate)
if axis == 'z' or axis == 'r':
controllers[motion_controller_zr_id].jog(axis_map[axis], step, feed_rate)
def park(axis_list, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_xy_config = controllers[motion_controller_xy_id].config['axis']
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park z and r axis
park_position = {}
# park z even if x, y parking is desired
if 'x' in axis_list or 'y' in axis_list or 'z' in axis_list:
park_position[z_axis_id] = axis_zr_config[z_axis_id]['park']
if 'r' in axis_list:
park_position[r_axis_id] = axis_zr_config[r_axis_id]['park']
if park_position:
controllers[motion_controller_zr_id].move(park_position, feed_rate)
# park x and y axis
park_position = {}
if 'x' in axis_list:
park_position[x_axis_id] = axis_xy_config[x_axis_id]['park']
if 'y' in axis_list:
park_position[y_axis_id] = axis_xy_config[y_axis_id]['park']
if park_position:
controllers[motion_controller_xy_id].move(park_position, feed_rate)
def pick(point, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# park z axis
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
# move to pick point on x and y axis and park rotation axis
point_xy = {
x_axis_id: point['x'],
y_axis_id: point['y']
}
if motion_controller_xy_id == motion_controller_zr_id:
# also park raxis if z and r axis use same motion controller as x and y
point_xy[r_axis_id] = axis_zr_config[r_axis_id]['park']
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
else:
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
# rotate nozzle
controllers[motion_controller_zr_id].move({
r_axis_id: axis_zr_config[r_axis_id]['park']
}, feed_rate)
# position z axis, lower nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: 118 - point['z']
}, feed_rate)
# enable vacuum
actuators[vacuum_actuator_id].set(True)
# raise nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
def place(point, rotation, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_zr_config = controllers[motion_controller_zr_id].config['axis']
# move to place point on x and y axis
point_xy = {
x_axis_id: point['x'],
y_axis_id: point['y']
}
if motion_controller_xy_id == motion_controller_zr_id:
# also rotate if z and r axis use same motion controller as x and y axis
point_xy[r_axis_id] = rotation
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
else:
controllers[motion_controller_xy_id].move(point_xy, feed_rate)
# rotate nozzle
controllers[motion_controller_zr_id].move({
r_axis_id: rotation
}, feed_rate)
# lower nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: 118 - point['z']
}, feed_rate)
# disable vacuum
actuators[vacuum_actuator_id].set(False)
# raise nozzle
controllers[motion_controller_zr_id].move({
z_axis_id: axis_zr_config[z_axis_id]['park']
}, feed_rate)
"""
cam_code = """
def move(point, speed_factor=1):
feed_rate = 50000 * speed_factor
axis_xy_config = controllers[motion_controller_xy_id].config['axis']
# move x,y
position = {}
if 'x' in point:
position[x_axis_id] = point['x'] + offset['x']
if 'y' in point:
position[y_axis_id] = point['y'] + offset['y']
controllers[motion_controller_xy_id].move(position, feed_rate)
def get_position():
return {
'x' : 200,
'y' : 201,
'z' : 202,
'r' : 204
}
"""
head_table = db.table("head")
head_table.purge()
head_table.insert(
{
"x_axis_id": "x",
"y_axis_id": "y",
"motion_controller_xy_id": "Mc1", # for x/y head movement
"placement_heads": [
{
"id": "PlaceHead1",
"z_axis_id": "z",
"r_axis_id": "a",
"motion_controller_zr_id": "Mc1", # for z/rot placement head movement
"offset": {"x": 0.0, "y": 0.0},
"vacuum_actuator_id": "Valve1",
"code": phead_one_code
},
{
"id": "PlaceHead2",
"z_axis_id": "z",
"r_axis_id": "b",
"motion_controller_zr_id": "Mc1", # for z/rot placement head movement
"offset": {"x": -43.9, "y": -0.2},
"vacuum_actuator_id": "Valve2",
"code": phead_two_code
}
],
"cameras": [
{
"id": "Cam2",
"offset": {"x": -21.9, "y": -20.0},
"code": cam_code
}
]
}
)
# /api/heads/Head1/p_heads/PlaceHead1/position
# /api/p_heads/PlaceHead1/position
# heads_table = db.table("heads")
# heads_table.purge()
# heads_table.insert_multiple([
# {
# "id": "Head1",
# "motion_controller_id": "Mc1", # for x/y movement
# "x_axis_id": "x",
# "y_axis_id": "y",
# "placement_heads": [
# {
# "id": "PlaceHead1",
# "offset": {"x": 0.0, "y": 0.0},
# "motion_controller_id": "Mc1", # for z/rot movement
# "pnp_axis_id": "z",
# "rotation_axis_id": "a",
# "vacuum_actuator_id": "Valve1",
# "code": phead_one_code
# },
# {
# "id": "PlaceHead2",
# "offset": {"x": -43.8, "y": 0.0},
# "motion_controller_id": "Mc1", # for z/rot movement
# "pnp_axis_id": "z",
# "rotation_axis_id": "b",
# "vacuum_actuator_id": "Valve2",
# "code": phead_two_code
# }
# ],
# "cameras": [
# {
# "id": "Cam2",
# "offset": {"x": -21.9, "y": -20.0}
# }
# ]
# }
# ])
feeders_code_xn = """# get_point gets called before pick operation
# and should return the next pick point
def get_point(point, count, size):
point['x'] -= (size-count) * 3.98
return point"""
feeders_code_xp = """# get_point gets called before pick operation
# and should return the next pick point
def get_point(point, count, size):
point['x'] += (size-count) * 3.98
return point"""
feeders_table = db.table("feeders")
feeders_table.purge()
feeders_table.insert_multiple([
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_1",
"size": 47,
"component": {
"value": "5.6K 1%",
"package": "RES-1206"
},
"point": {"x": 225.6, "y": 130.5, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_2",
"size": 47,
"component": {
"value": "10nF 50V 10%",
"package": "CAP-0603"
},
"point": {"x": 225.6, "y": 142.5, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_3",
"size": 47,
"component": {
"value": "24K 1%",
"package": "RES-0603"
},
"point": {"x": 225.6, "y": 154.5, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_4",
"size": 47,
"component": {
"value": "18K 1%",
"package": "RES-0603"
},
"point": {"x": 225.6, "y": 166.6, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_5",
"size": 47,
"component": {
"value": "47K 1%",
"package": "RES-0603"
},
"point": {"x": 225.6, "y": 178.7, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_6",
"size": 47,
"component": {
"value": "12pF 50V 5%",
"package": "CAP-0603"
},
"point": {"x": 225.6, "y": 190.7, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_7",
"size": 47,
"component": {
"value": "1uF 50V 5%",
"package": "CAP-0603"
},
"point": {"x": 225.6, "y": 202.8, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_8",
"size": 47,
"component": {
"value": "100nF 10V 5%",
"package": "CAP-0603"
},
"point": {"x": 225.6, "y": 214.8, "z": 31},
"code": feeders_code_xn
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_9",
"size": 47,
"component": {"value": "BSS84", "package": "SOT-23"},
"point": {"x": 251.6, "y": 133.2, "z": 31},
"code": feeders_code_xp
},
{
"count": 23,
"type": "StripFeeder",
"id": "StripFeeder_10",
"size": 23,
"component": {
"value": "680R 5%",
"package": "RES-0603"
},
"point": {"x": 347.2, "y": 146, "z": 31},
"code": feeders_code_xp
},
{
"count": 23,
"type": "StripFeeder",
"id": "StripFeeder_11",
"size": 23,
"component": {
"value": "10K 1%",
"package": "RES-0603"
},
"point": {"x": 347.2, "y": 158, "z": 31},
"code": feeders_code_xp
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_12",
"size": 47,
"component": {"value": "OSG50603C1E", "package": "LED-0603"},
"point": {"x": 251.6, "y": 169.4, "z": 31},
"code": feeders_code_xp
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_13",
"size": 47,
"component": {
"value": "PDTC114ET",
"package": "SOT-23",
},
"point": {"x": 251.6, "y": 181.4, "z": 31},
"code": feeders_code_xp
},
{
"count": 23,
"type": "StripFeeder",
"id": "StripFeeder_14",
"size": 23,
"component": {
"value": "0R 1%",
"package": "RES-0603"
},
"point": {"x": 347.2, "y": 194, "z": 31},
"code": feeders_code_xp
},
{
"count": 23,
"type": "StripFeeder",
"id": "StripFeeder_15",
"size": 23,
"component": {
"value": "150R 5%",
"package": "RES-0603"
},
"point": {"x": 347.2, "y": 206, "z": 31},
"code": feeders_code_xp
},
{
"count": 47,
"type": "StripFeeder",
"id": "StripFeeder_16",
"size": 47,
"component": {
"value": "LL4148",
"package": "SOD-80"
},
"point": {"x": 251.6, "y": 217.4, "z": 31},
"code": feeders_code_xp
}
])
packages_table = db.table("packages")
packages_table.purge()
# fiducials
packages_table.insert_multiple([
{
"id": "FID-1x3",
"length": 3.00,
"width": 3.00,
"height": 0.01
}
])
# resistors
packages_table.insert_multiple([
{
"id": "RES-0201",
"length": 0.60,
"width": 0.30,
"height": 0.25
},
{
"id": "RES-0402",
"length": 1.00,
"width": 0.50,
"height": 0.35
},
{
"id": "RES-0603",
"length": 1.55,
"width": 0.85,
"height": 0.45
},
{
"id": "RES-0805",
"length": 2.00,
"width": 1.20,
"height": 0.45
},
{
"id": "RES-1206",
"length": 3.20,
"width": 1.60,
"height": 0.55
},
{
"id": "RES-1210",
"length": 3.20,
"width": 2.20,
"height": 0.55
},
{
"id": "RES-1218",
"length": 3.20,
"width": 4.60,
"height": 0.55
},
{
"id": "RES-2010",
"length": 5.00,
"width": 2.50,
"height": 0.60
},
{
"id": "RES-2512",
"length": 6.30,
"width": 3.20,
"height": 0.60
}
])
# capacitors
packages_table.insert_multiple([
{
"id": "CAP-0201",
"length": 0.60,
"width": 0.30,
"height": 0.50
},
{
"id": "CAP-0402",
"length": 1.00,
"width": 0.50,
"height": 0.50
},
{
"id": "CAP-0603",
"length": 1.55,
"width": 0.85,
"height": 1.00
},
{
"id": "CAP-0805",
"length": 2.00,
"width": 1.20,
"height": 1.00
},
{
"id": "CAP-1206",
"length": 3.20,
"width": 1.60,
"height": 1.00
},
{
"id": "CAP-1210",
"length": 3.20,
"width": 2.20,
"height": 1.00
}
])
# leds
packages_table.insert_multiple([
{
"id": "LED-0603",
"length": 1.55,
"width": 0.85,
"height": 1.00
}
])
# diodes, transistors
packages_table.insert_multiple([
{
"id": "SOD-80",
"length": 3.70,
"width": 1.7,
"height": 1.7
},
{
"id": "SOT-23",
"length": 3.10,
"width": 2.60,
"height": 1.20
},
{
"id": "SOT-23-5",
"length": 3.10,
"width": 2.60,
"height": 1.20
},
{
"id": "SOT-23-6",
"length": 3.10,
"width": 2.60,
"height": 1.20
}
])
# make sure that all data is safely written when using Caching
db.close()
|
[
"florin.costa83@gmail.com"
] |
florin.costa83@gmail.com
|
069b52697a2f61796ded5b06c27982de54545d6e
|
663d518e295f5898418f7dc6a0d3b15181329412
|
/toy_example.py
|
3a030cb03e83cd1e8008e3f5a8bed3ac0b785ee4
|
[] |
no_license
|
bgyori/paths_graph_paper
|
981ee7ecf3595473db316e407ee9a783df946ea3
|
aa613a0979431f15face47e8e06738575f673a66
|
refs/heads/master
| 2020-03-27T18:38:42.164736
| 2018-10-04T19:16:27
| 2018-10-04T19:16:27
| 146,934,273
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,013
|
py
|
import sys
from os.path import join
import itertools
import networkx as nx
from paths_graph import PathsGraph, CombinedPathsGraph, CFPG, \
get_reachable_sets
"""
g_edges = [
('S', 'B'), ('S', 'E'),
('B', 'C'), ('B', 'S'), ('B', 'E'), ('B', 'T'),
('C', 'S'), ('C', 'T'), ('D', 'B'), ('D', 'S'), ('D', 'F'),
('E', 'F'), ('E', 'T'),
('F', 'E'), ('F', 'B'),
('T', 'C'), ('T', 'E'), ('T', 'D'),
]
g_edges = [
('A', 'B'), ('A', 'E'),
('B', 'C'), ('B', 'A'), ('B', 'E'),
('C', 'A'), ('D', 'B'), ('D', 'A'), ('D', 'F'),
('E', 'F'),
('F', 'E'), ('F', 'B'),
]
"""
g_edges = [
('S', 'A'), ('S', 'B'),
('A', 'C'), ('A', 'T'), ('A', 'D'),
('B', 'C'), ('B', 'T'),
('C', 'A'), ('C', 'B'),
('D', 'C'),
]
g = nx.DiGraph()
g.add_edges_from(g_edges)
def draw(g, filename):
fixed_edges = []
for u, v in g.edges():
u_fix = str(u).replace("'", "").replace('"', '')
v_fix = str(v).replace("'", "").replace('"', '')
fixed_edges.append((u_fix, v_fix))
g_fixed = nx.DiGraph()
g_fixed.add_edges_from(fixed_edges)
ag = nx.nx_agraph.to_agraph(g_fixed)
ag.draw(filename, prog='dot')
def draw_reachset(g, level, direction, depth, output_dir):
if direction not in ('forward', 'backward'):
raise ValueError("direction must be 'forward' or 'backward'")
edges = []
for level_ix in range(1, depth+1):
if direction == 'forward':
prev_nodes = [(level_ix - 1, n) for n in level[level_ix - 1]]
cur_nodes = [(level_ix, n) for n in level[level_ix]]
elif direction == 'backward':
prev_nodes = [(depth - (level_ix - 1), n)
for n in level[level_ix - 1]]
cur_nodes = [(depth - level_ix, n) for n in level[level_ix]]
for prev_node, cur_node in itertools.product(prev_nodes, cur_nodes):
if direction == 'forward' and (cur_node[1] in g[prev_node[1]]):
edges.append((prev_node, cur_node))
elif direction == 'backward' and (prev_node[1] in g[cur_node[1]]):
edges.append((prev_node, cur_node))
graph = nx.DiGraph()
graph.add_edges_from(edges)
draw(graph, join(output_dir, 'toy_%s_graph.pdf' % direction))
if __name__ == '__main__':
output_dir = sys.argv[1]
# Draw G
draw(g, join(output_dir, 'toy_g.pdf'))
depth = 4
source = 'S'
target = 'T'
f_level, b_level = get_reachable_sets(g, source, target, depth)
draw_reachset(g, f_level, 'forward', depth, output_dir)
draw_reachset(g, b_level, 'backward', depth, output_dir)
print("f_level", f_level)
print("b_level", b_level)
pg = PathsGraph.from_graph(g, source, target, depth)
draw(pg.graph, join(output_dir, 'toy_pg_%d.pdf' % depth))
# Combined paths graph
pg_list = []
for i in range(1, 4+1):
pg_list.append(PathsGraph.from_graph(g, source, target, i))
cpg = CombinedPathsGraph(pg_list)
draw(cpg.graph, join(output_dir, 'toy_combined_pg.pdf'))
# Cycle-free paths graph
cfpg = CFPG.from_pg(pg)
# Remove the frozensets for drawing
cfpg_edges_fixed = []
for u, v in cfpg.graph.edges():
u_set = '{}' if u[2] == 0 else str(set(u[2]))
v_set = '{}' if v[2] == 0 else str(set(v[2]))
u_fixed = str((u[0], u[1], u_set))
v_fixed = str((v[0], v[1], v_set))
cfpg_edges_fixed.append((u_fixed, v_fixed))
cfpg_fixed = nx.DiGraph()
cfpg_fixed.add_edges_from(cfpg_edges_fixed)
draw(cfpg_fixed, join(output_dir, 'toy_cfpg_%d.pdf' % depth))
# Non-uniform sampling
# Graph for testing sampling uniformly vs. non-uniformly
g_samp = nx.DiGraph()
g_samp.add_edges_from([
('S', 'A1'), ('S', 'A2'),
('A1', 'B1'),
('A2', 'B2'), ('A2', 'B3'), ('A2', 'B4'), ('A2', 'B5'),
('B1', 'T'),
('B2', 'T'), ('B3', 'T'), ('B4', 'T'), ('B5', 'T')])
draw(g_samp, join(output_dir, 'toy_g_samp.pdf'))
|
[
"bachmanjohn@gmail.com"
] |
bachmanjohn@gmail.com
|
cc25488a356eea6631beecf1a989641aef813926
|
8698757521458c2061494258886e5d3cdfa6ff11
|
/argo/core/network/BernoulliPlusMinusOne.py
|
76ad7fbf9c559a0f3805086659ee05c98f3fd103
|
[
"MIT"
] |
permissive
|
ricvo/argo
|
546c91e84d618c4bc1bb79a6bc7cba01dca56d57
|
a10c33346803239db8a64c104db7f22ec4e05bef
|
refs/heads/master
| 2023-02-25T01:45:26.412280
| 2020-07-05T22:55:35
| 2020-07-05T22:55:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,524
|
py
|
import tensorflow as tf
import sonnet as snt
import numpy as np
from operator import xor
import types
import pdb
from abc import ABC, abstractmethod
import tensorflow_probability as tfp
from .PlusMinusOneMapping import PlusMinusOneMapping
from .AbstractModule import AbstractModule
class BernoulliPlusMinusOne(AbstractModule):
def __init__(self, output_size=-1, output_shape=-1, initializers={}, regularizers={}, clip_value=0, dtype=None,
name='Bernoulli'):
super().__init__(name = name)
assert xor(output_size==-1, output_shape==-1), "Either output_size or output_shape mut be specified, not both"
if output_size!=-1:
self._output_shape = [output_size]
else:
self._output_shape = output_shape
self._initializers = initializers
self._regularizers = regularizers
self._clip_value = clip_value
self._dtype = dtype
def _build(self, inputs):
# create the layers for mean and covariance
output_shape = [-1] + self._output_shape
logits = tf.reshape(snt.Linear(np.prod(self._output_shape), initializers=self._initializers, regularizers=self._regularizers)(inputs),output_shape)
dtype = tf.float32 #inputs.dtype
if self._dtype is not None:
dtype = self._dtype
if self._clip_value > 0:
probs = tf.nn.sigmoid(logits)
probs = tf.clip_by_value(probs, self._clip_value, 1-self._clip_value)
bernoulli = tfp.distributions.Bernoulli(probs=probs, dtype=dtype)
else:
bernoulli = tfp.distributions.Bernoulli(logits=logits, dtype=dtype)
affine_transform = PlusMinusOneMapping(scale=2., shift=-1.)
bernoulli_plus_minus_one = tfp.distributions.TransformedDistribution(distribution = bernoulli, bijector = affine_transform, name="BernoulliPlusMinusOne")
def reconstruction_node(self):
return self.mean()
bernoulli_plus_minus_one.reconstruction_node = types.MethodType(reconstruction_node, bernoulli_plus_minus_one)
def distribution_parameters(self):
return [self.mean()]
bernoulli_plus_minus_one.distribution_parameters = types.MethodType(distribution_parameters, bernoulli_plus_minus_one)
def get_probs(self):
return self.distribution.probs
bernoulli_plus_minus_one.get_probs = types.MethodType(get_probs, bernoulli_plus_minus_one)
return bernoulli_plus_minus_one
|
[
"volpi@rist.ro"
] |
volpi@rist.ro
|
97cceea6940dc0fa8bde3ae795dc678e5f5deeb8
|
d2c92cfe95a60a12660f1a10c0b952f0df3f0e8e
|
/zz91cp/zz91cp/news.py
|
2139b8049b8fff68de63a330fe6b56bb90017710
|
[] |
no_license
|
snamper/zzpython
|
71bf70ec3762289bda4bba80525c15a63156a3ae
|
20415249fa930ccf66849abb5edca8ae41c81de6
|
refs/heads/master
| 2021-12-21T16:12:22.190085
| 2017-09-30T06:26:05
| 2017-09-30T06:26:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,523
|
py
|
#-*- coding:utf-8 -*-
import os,MySQLdb,settings,codecs,time,sys,datetime,random,shutil,requests
from django.utils.http import urlquote
from django.shortcuts import render_to_response
from django.template.loader import get_template
from django.http import HttpResponse,HttpResponseRedirect,HttpResponseNotFound,HttpResponsePermanentRedirect
from django.core.paginator import Paginator,InvalidPage,EmptyPage,PageNotAnInteger
from datetime import timedelta, date
from zz91settings import SPHINXCONFIG
from django.core.cache import cache
try:
import cPickle as pickle
except ImportError:
import pickle
from math import ceil
from sphinxapi import *
from zz91page import *
reload(sys)
sys.setdefaultencoding('utf-8')
from zz91db_ast import companydb
dbc=companydb()
nowpath=os.path.dirname(__file__)
execfile(nowpath+"/conn.py")
execfile(nowpath+"/commfunction.py")
execfile(nowpath+"/function.py")
def newsdetail(request,newsid):
host = request.META['HTTP_HOST']
navlistt=getlbhex()
listall=getnewsdetail(newsid=newsid,newszd="old_news_id")
list=listall['list']
if not list:
render_to_response('404.html',locals())
# t = get_template('404.html')
# html = t.render(Context())
# return HttpResponseNotFound(html)
tags=listall['tags']
newslist=getindexbbslist(limitcount=10)
prolist=getindexofferlist_pic(kname='废',limitcount=8)
xgnewslist=getoldnewslist()
xgnewslist1=getindexbbslist(kname=tags,limitcount=10)
if tags and tags!='':
cplist=getcplist(tags,50)
else:
cplist=getcplist('',50)
return render_to_response('news/newsdetail.html',locals())
closeconn()
def bbsdetail(request,newsid):
host = request.META['HTTP_HOST']
navlistt=getlbhex()
listall=getnewsdetail(newsid=newsid,newszd="old_forum_id")
list=listall['list']
tags=listall['tags']
newslist=getindexbbslist(limitcount=10)
prolist=getindexofferlist_pic(kname='废',limitcount=8)
xgnewslist=getoldnewslist()
xgnewslist1=getindexbbslist(kname=tags,limitcount=10)
if tags and tags!='':
cplist=getcplist(tags,50)
else:
cplist=getcplist('',50)
return render_to_response('news/newsdetail.html',locals())
closeconn()
def guanzhudetail(request,newsid):
navlistt=getlbhex()
host = request.META['HTTP_HOST']
listall=getnewsdetail(newsid=newsid,newszd="old_guanzhu_id")
list=listall['list']
tags=listall['tags']
newslist=getindexbbslist(limitcount=10)
prolist=getindexofferlist_pic(kname='废',limitcount=8)
xgnewslist=getoldnewslist()
xgnewslist1=getindexbbslist(kname=tags,limitcount=10)
if tags and tags!='':
cplist=getcplist(tags,50)
else:
cplist=getcplist('',50)
return render_to_response('news/newsdetail.html',locals())
closeconn()
def newslist(request,keywords,page):
host = request.META['HTTP_HOST']
#keywords=request.GET.get('keywords')
navlistt=getlbhex()
keywords_hex=keywords
keywords=getjiemi(keywords)
#page=request.GET.get('page')
if (page==None):
page=1
funpage = zz91page()
limitNum=funpage.limitNum(60)
nowpage=funpage.nowpage(int(page))
frompageCount=funpage.frompageCount()
after_range_num = funpage.after_range_num(5)
before_range_num = funpage.before_range_num(9)
newslist=getbbslist(keywords,frompageCount,limitNum,None)
listcount=newslist['count']
if (int(listcount)>1000000):
listcount=1000000-1
listcount = funpage.listcount(listcount)
page_listcount=funpage.page_listcount()
firstpage = funpage.firstpage()
lastpage = funpage.lastpage()
page_range = funpage.page_range()
nextpage = funpage.nextpage()
prvpage = funpage.prvpage()
cplist=getcplist('',50)
return render_to_response('news/list.html',locals())
closeconn()
def newsindex(request):
#return HttpResponsePermanentRedirect('http://news.zz91.com/')
host = request.META['HTTP_HOST']
offerlist1=getindexofferlist(None,1,10)
offerlist2=getindexofferlist(None,0,10)
navlistt=getlbhex()
return render_to_response('news/newsindex.html',locals())
closeconn()
def newssearchfirst(request):
keywords=request.GET.get('keywords')
keywords_hex=getjiami(keywords)
nowurl="/news/newslist-"+keywords_hex+"-1.html"
return HttpResponseRedirect(nowurl)
|
[
"2496256902@qq.com"
] |
2496256902@qq.com
|
af42c1bf5d7e8dfc842e61148776f07f9e98398d
|
648a5d24157885336999d9eb4a6b5098d14a3cfb
|
/pyoutline/setup.py
|
a18b50d4fa669e4dbd6a8e73964dbd2688a3bca9
|
[
"Apache-2.0"
] |
permissive
|
sqlboy/OpenCue
|
26c82f2be1b69fddff5bc330a951cbbcbd16434f
|
b0faee1d49f52dda076fe03467446f0a0c6ef327
|
refs/heads/master
| 2022-02-15T16:36:46.898562
| 2022-02-01T17:29:03
| 2022-02-01T17:29:03
| 175,265,207
| 0
| 0
|
Apache-2.0
| 2019-06-01T00:28:33
| 2019-03-12T17:34:28
|
Java
|
UTF-8
|
Python
| false
| false
| 2,180
|
py
|
# Copyright Contributors to the OpenCue Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import find_packages
from setuptools import setup
pyoutline_dir = os.path.abspath(os.path.dirname(__file__))
version = 'unknown'
possible_version_paths = [
os.path.join(pyoutline_dir, 'VERSION'),
os.path.join(os.path.dirname(pyoutline_dir), 'VERSION'),
]
for possible_version_path in possible_version_paths:
if os.path.exists(possible_version_path):
with open(possible_version_path) as fp:
version = fp.read().strip()
with open(os.path.join(pyoutline_dir, 'README.md')) as fp:
long_description = fp.read()
setup(
name='pyoutline',
version=version,
description='The OpenCue PyOutline library',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/imageworks/OpenCue',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
packages=find_packages(exclude=['tests']),
data_files=[
('bin', ['bin/cuerunbase.py', 'bin/pycuerun', 'bin/util_qc_job_layer.py']),
('etc', ['etc/outline.cfg']),
('wrappers', ['wrappers/opencue_wrap_frame', 'wrappers/opencue_wrap_frame_no_ss', 'wrappers/local_wrap_frame']),
],
test_suite='tests',
)
|
[
"noreply@github.com"
] |
sqlboy.noreply@github.com
|
1a84aac2a33453183ae6e6a9144490665a7ddf91
|
809773aa3a7c0cb19fae421289711a26162cbb90
|
/main/migrations/0004_book_is_favorite.py
|
6a6c802ae51312a76aea5c505e209cc212722249
|
[] |
no_license
|
Altynai29/todo
|
237fe76ea32967bdc9afd2e13fd2340a6e9206ce
|
3232d052a776f7765528abdc1e86ae59133d68f6
|
refs/heads/main
| 2023-02-24T20:41:42.903200
| 2021-01-29T05:27:09
| 2021-01-29T05:27:09
| 329,367,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 387
|
py
|
# Generated by Django 3.1.3 on 2021-01-24 15:55
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20210124_1211'),
]
operations = [
migrations.AddField(
model_name='book',
name='is_favorite',
field=models.BooleanField(default=False),
),
]
|
[
"altynay-2013@mail.ru git config --global user.name Altynai29"
] |
altynay-2013@mail.ru git config --global user.name Altynai29
|
811be7b606ce611153cf8665ee300c9d58a527ce
|
77bc4b05385fe0374432a6078443da974b22eaa8
|
/blogs/migrations/0001_initial.py
|
23718c82563407084b0546cd8fdc552ba8e29256
|
[] |
no_license
|
Victorjuma/blog
|
85db3a1d5f2d7329bbf64666a284ff49b977d2df
|
bdc28404a08ad3f735bd9756734716350a5aa475
|
refs/heads/main
| 2022-12-30T13:06:42.139922
| 2020-10-21T12:31:51
| 2020-10-21T12:31:51
| 306,014,050
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 935
|
py
|
# Generated by Django 2.2.4 on 2019-12-12 08:40
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('content', models.TextField()),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"noreply@github.com"
] |
Victorjuma.noreply@github.com
|
967694d2f9922f26f36d569c22cfc551cafca956
|
ee76b01ff4d8f59873b6a0ff15bbf485d8c6dcd8
|
/snake/speech_rec.py
|
638d3018fa1884662b23faf63f04860f39625488
|
[] |
no_license
|
DurandBastien/robotics2018
|
250bc0ee9f81ae72db245b0a58932e12d64943c4
|
5324fafbbf3a2e5e7f3a7a24780b05435453eaae
|
refs/heads/master
| 2020-04-02T06:57:15.113631
| 2018-12-11T20:55:31
| 2018-12-11T20:55:31
| 153,422,034
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,675
|
py
|
#!/usr/bin/env python
import speech_recognition as sperec
import rospy
from std_msgs.msg import String
def wordlist():
return ["begin", "stop"]
def micListener():
pub = rospy.Publisher('voice_commands', String, queue_size=10)
rospy.init_node('voice_recogniser', anonymous=True)
rospy.loginfo("Node created")
rate = rospy.Rate(10)
while not rospy.is_shutdown():
result = listen()
accepted = False
if (result == "REQUERROR"):
rospy.loginfo("Connection error. Speech recognition will not work until connection reestablished.")
else:
if(result != "NOVD"):
rospy.loginfo(result) #Very much for debugging purposes. Comment out if you don't want the code to report everything it hears.
for word in wordlist():
if (result.find(word) != -1) and (not accepted):
rospy.loginfo(word)
pub.publish(word)
accepted = True
def listen():
rec = sperec.Recognizer()
rospy.loginfo("rec setup")
mic = sperec.Microphone()
rospy.loginfo("mic setup")
with mic as audiosource:
rec.adjust_for_ambient_noise(audiosource, duration=0.7)
rospy.loginfo("ambient noise adjusted")
audio = rec.listen(audiosource)
rospy.loginfo("listened")
try:
input = rec.recognize_google(audio)
return input
except sperec.RequestError:
return "REQUERROR"
except sperec.UnknownValueError:
return "NOVD"
if __name__ == '__main__':
try:
micListener()
except rospy.ROSInterruptException:
pass
|
[
"maija.fil@gmail.com"
] |
maija.fil@gmail.com
|
026e469ebd0249e937555db84273acba3bf99dc7
|
e303a6b7ca952060ebbd54b47db4dc74310e5a9b
|
/robotics/views.py
|
32b0d436c296febe5440a1d6b9e7f7510442f1cf
|
[] |
no_license
|
RobotGyal/Personal-Site
|
17677e848f0b241e5c8383e01d86fab60ef770a4
|
1396f51065723c913e19a512c65c3252fecbe546
|
refs/heads/master
| 2022-12-11T01:36:10.664839
| 2020-06-26T22:02:14
| 2020-06-26T22:02:14
| 228,452,593
| 0
| 0
| null | 2022-12-08T04:22:08
| 2019-12-16T18:45:35
|
HTML
|
UTF-8
|
Python
| false
| false
| 143
|
py
|
from django.shortcuts import render
# Create your views here.
def index(request):
return (render(request, "robotics/robotics_index.html"))
|
[
"aleiaknight@gmail.com"
] |
aleiaknight@gmail.com
|
3dff92daba9619acfcd2c57ceb691e806e04bc3d
|
e8a703977d788646d70e807bbfd8fbdb21223209
|
/openrepairplatform/event/migrations/0029_auto_20200208_1231.py
|
075fb3a5966340b6fabfe828dd79b1e0bb4774e0
|
[] |
no_license
|
AtelierSoude/OpenRepairPlatform
|
10f8d8ef8ddfba07b4d84a5eac9f56b3871920ae
|
bc90d25a5879b1c7ef0ed170ad428dffdbc8405f
|
refs/heads/master
| 2023-06-13T23:49:51.136082
| 2021-05-12T12:54:15
| 2021-05-12T12:54:15
| 127,430,187
| 33
| 14
| null | 2023-04-17T08:41:10
| 2018-03-30T13:16:01
|
Python
|
UTF-8
|
Python
| false
| false
| 1,029
|
py
|
# Generated by Django 2.2.3 on 2020-02-08 11:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('event', '0028_auto_20200208_1133'),
]
operations = [
migrations.AddField(
model_name='event',
name='booking',
field=models.BooleanField(default=True, verbose_name='This event demands booking ?'),
),
migrations.AddField(
model_name='historicalevent',
name='booking',
field=models.BooleanField(default=True, verbose_name='This event demands booking ?'),
),
migrations.AlterField(
model_name='event',
name='is_free',
field=models.BooleanField(default=False, verbose_name='No booking limit ?'),
),
migrations.AlterField(
model_name='historicalevent',
name='is_free',
field=models.BooleanField(default=False, verbose_name='No booking limit ?'),
),
]
|
[
"clement.poudret@laposte.net"
] |
clement.poudret@laposte.net
|
790a463870e509cc9188f08192725211371987e1
|
54f95342dde22c2efe5e44e2edfecd13bfd98787
|
/ejercicios_obligatorios/ejercicio_1.py
|
5210f43b53042d7ba91c95e0f9b61b56fde17dd8
|
[] |
no_license
|
LucianoBartomioli/-EDU-POO_IRESM_2021
|
3293e98aada2ae8bd3221991a9ac95200f0541bd
|
dfaca205c31b95cf784cd8c04ae8060c9c3950da
|
refs/heads/main
| 2023-04-14T20:44:10.449823
| 2021-05-01T22:13:44
| 2021-05-01T22:13:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,070
|
py
|
def cargar_funcion():
cantidad_espectadores = int(input("Ingrese la cantidad de espectadores: "))
descuento = str(input("Ingrese si la funcion sera con descuento (SI/NO): "))
funcion = [cantidad_espectadores, descuento]
return funcion
def calcular_recaudacion_total(funciones):
cantidad_de_funciones = len(funciones)
total_recaudado = 0
cantidad_funciones_descuento = 0
cantidad_funciones_sin_descuento = 0
for funcion in funciones:
cantidad_espectadores = funcion[0]
descuento_aplicado = funcion[1]
if descuento_aplicado == "si":
cantidad_funciones_descuento = cantidad_funciones_descuento + 1
precio_funcion_descuento = 50
total_recaudado_por_funcion = cantidad_espectadores * precio_funcion_descuento
else:
total_recaudado_por_funcion = cantidad_espectadores * 75
total_recaudado = total_recaudado + total_recaudado_por_funcion
return [cantidad_funciones_descuento, total_recaudado]
def opcion_menu():
opcion = int(input("""
---------------MENU-----------
(1) CARGAR FUNCION
(2) CALCULAR RECAUDACION TOTAL
(3) FUNCIONES CON DESCUENTO
(0) PARA SALIR
Ingrese una opcion: """))
return opcion
# La carga finaliza cuando se selecciona otra opcion en el menu.
funciones = []
opcion_menu_ = opcion_menu()
while opcion_menu_ != 0:
if opcion_menu_ == 1:
nueva_funcion = cargar_funcion()
funciones.append(nueva_funcion)
elif opcion_menu_ == 2:
total_recaudado = calcular_recaudacion_total(funciones)[1]
print(f"El total recaudado es ${total_recaudado}")
elif opcion_menu_ == 3:
cantidad_funciones_descuento = calcular_recaudacion_total(funciones)[0]
porcentaje_funciones_descuento = cantidad_funciones_descuento * 100 / len(funciones)
print(f"Se realizaron {cantidad_funciones_descuento} funciones con descuento, que representan el {porcentaje_funciones_descuento}% del total de funciones")
opcion_menu_ = opcion_menu()
|
[
"69654179+LucianoBartomioli@users.noreply.github.com"
] |
69654179+LucianoBartomioli@users.noreply.github.com
|
4f1327a351299a253e5e0205cfb5a447f79abe04
|
564d6a4d305a8ac6a7e01c761831fb2081c02d0f
|
/sdk/servicebus/azure-mgmt-servicebus/azure/mgmt/servicebus/operations/_disaster_recovery_configs_operations.py
|
240dba7e99543e526cd89585ee8329274218e911
|
[
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] |
permissive
|
paultaiton/azure-sdk-for-python
|
69af4d889bac8012b38f5b7e8108707be679b472
|
d435a1a25fd6097454b7fdfbbdefd53e05029160
|
refs/heads/master
| 2023-01-30T16:15:10.647335
| 2020-11-14T01:09:50
| 2020-11-14T01:09:50
| 283,343,691
| 0
| 0
|
MIT
| 2020-07-28T22:43:43
| 2020-07-28T22:43:43
| null |
UTF-8
|
Python
| false
| false
| 36,935
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DisasterRecoveryConfigsOperations(object):
"""DisasterRecoveryConfigsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.servicebus.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def check_name_availability(
self,
resource_group_name, # type: str
namespace_name, # type: str
parameters, # type: "models.CheckNameAvailability"
**kwargs # type: Any
):
# type: (...) -> "models.CheckNameAvailabilityResult"
"""Check the give namespace name availability.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param parameters: Parameters to check availability of the given namespace name.
:type parameters: ~azure.mgmt.servicebus.models.CheckNameAvailability
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CheckNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.servicebus.models.CheckNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CheckNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'CheckNameAvailability')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('CheckNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/CheckNameAvailability'} # type: ignore
def list(
self,
resource_group_name, # type: str
namespace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.ArmDisasterRecoveryListResult"]
"""Gets all Alias(Disaster Recovery configurations).
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ArmDisasterRecoveryListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.servicebus.models.ArmDisasterRecoveryListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ArmDisasterRecoveryListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ArmDisasterRecoveryListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs'} # type: ignore
def create_or_update(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
parameters, # type: "models.ArmDisasterRecovery"
**kwargs # type: Any
):
# type: (...) -> Optional["models.ArmDisasterRecovery"]
"""Creates or updates a new Alias(Disaster Recovery configuration).
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param parameters: Parameters required to create an Alias(Disaster Recovery configuration).
:type parameters: ~azure.mgmt.servicebus.models.ArmDisasterRecovery
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ArmDisasterRecovery, or the result of cls(response)
:rtype: ~azure.mgmt.servicebus.models.ArmDisasterRecovery or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.ArmDisasterRecovery"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ArmDisasterRecovery')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
def delete(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes an Alias(Disaster Recovery configuration).
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
def get(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.ArmDisasterRecovery"
"""Retrieves Alias(Disaster Recovery configuration) for primary or secondary namespace.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ArmDisasterRecovery, or the result of cls(response)
:rtype: ~azure.mgmt.servicebus.models.ArmDisasterRecovery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.ArmDisasterRecovery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ArmDisasterRecovery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}'} # type: ignore
def break_pairing(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""This operation disables the Disaster Recovery and stops replicating changes from primary to
secondary namespaces.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.break_pairing.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
break_pairing.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/breakPairing'} # type: ignore
def fail_over(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Invokes GEO DR failover and reconfigure the alias to point to the secondary namespace.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.fail_over.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
fail_over.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/failover'} # type: ignore
def list_authorization_rules(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.SBAuthorizationRuleListResult"]
"""Gets the authorization rules for a namespace.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SBAuthorizationRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.servicebus.models.SBAuthorizationRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SBAuthorizationRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_authorization_rules.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SBAuthorizationRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_authorization_rules.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/AuthorizationRules'} # type: ignore
def get_authorization_rule(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
authorization_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.SBAuthorizationRule"
"""Gets an authorization rule for a namespace by rule name.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param authorization_rule_name: The authorization rule name.
:type authorization_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SBAuthorizationRule, or the result of cls(response)
:rtype: ~azure.mgmt.servicebus.models.SBAuthorizationRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SBAuthorizationRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.get_authorization_rule.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('SBAuthorizationRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_authorization_rule.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/AuthorizationRules/{authorizationRuleName}'} # type: ignore
def list_keys(
self,
resource_group_name, # type: str
namespace_name, # type: str
alias, # type: str
authorization_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.AccessKeys"
"""Gets the primary and secondary connection strings for the namespace.
:param resource_group_name: Name of the Resource group within the Azure subscription.
:type resource_group_name: str
:param namespace_name: The namespace name.
:type namespace_name: str
:param alias: The Disaster Recovery configuration name.
:type alias: str
:param authorization_rule_name: The authorization rule name.
:type authorization_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AccessKeys, or the result of cls(response)
:rtype: ~azure.mgmt.servicebus.models.AccessKeys
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.AccessKeys"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-04-01"
accept = "application/json"
# Construct URL
url = self.list_keys.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'namespaceName': self._serialize.url("namespace_name", namespace_name, 'str', max_length=50, min_length=6),
'alias': self._serialize.url("alias", alias, 'str', max_length=50, min_length=1),
'authorizationRuleName': self._serialize.url("authorization_rule_name", authorization_rule_name, 'str', max_length=50, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('AccessKeys', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ServiceBus/namespaces/{namespaceName}/disasterRecoveryConfigs/{alias}/AuthorizationRules/{authorizationRuleName}/listKeys'} # type: ignore
|
[
"noreply@github.com"
] |
paultaiton.noreply@github.com
|
a8da613a5f7d5bab340289fbc640dee73653ff26
|
0728513cfd064b8f6c130d42ad8ef79f49b6b9b2
|
/xepmts/models/muveto_install.py
|
2c080e258a6e0f1edd9a20bca533ca55fb47df57
|
[] |
no_license
|
XENONnT/pmts-api-client
|
7e70574e45c3e1e639b066513c7f07047ac4dd30
|
2b1025fc6cec01726e2d555f609c148891c6d879
|
refs/heads/master
| 2022-12-10T02:04:12.942994
| 2020-09-27T15:39:09
| 2020-09-27T15:39:09
| 276,297,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27,900
|
py
|
# coding: utf-8
"""
XENON PMT API
API for the XENON PMT database # noqa: E501
The version of the OpenAPI document: 1.0
Contact: joe.mosbacher@gmail.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from xepmts.configuration import Configuration
class MuvetoInstall(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'uid': 'str',
'array': 'str',
'detector': 'str',
'experiment': 'str',
'pmt_index': 'int',
'sector': 'int',
'position_x': 'float',
'position_y': 'float',
'position_z': 'float',
'position_r': 'float',
'amplifier_crate': 'int',
'amplifier_fan': 'int',
'amplifier_plug': 'int',
'amplifier_serial': 'int',
'amplifier_slot': 'int',
'amplifier_channel': 'int',
'digitizer_channel': 'int',
'digitizer_crate': 'int',
'digitizer_module': 'int',
'digitizer_slot': 'int',
'high_voltage_crate': 'int',
'high_voltage_board': 'int',
'high_voltage_channel': 'int',
'high_voltage_connector': 'int',
'high_voltage_feedthrough': 'str',
'high_voltage_return': 'int',
'serial_number': 'str',
'signal_channel': 'int',
'signal_connector': 'int',
'signal_feedthrough': 'str',
'id': 'str'
}
attribute_map = {
'uid': 'uid',
'array': 'array',
'detector': 'detector',
'experiment': 'experiment',
'pmt_index': 'pmt_index',
'sector': 'sector',
'position_x': 'position_x',
'position_y': 'position_y',
'position_z': 'position_z',
'position_r': 'position_r',
'amplifier_crate': 'amplifier_crate',
'amplifier_fan': 'amplifier_fan',
'amplifier_plug': 'amplifier_plug',
'amplifier_serial': 'amplifier_serial',
'amplifier_slot': 'amplifier_slot',
'amplifier_channel': 'amplifier_channel',
'digitizer_channel': 'digitizer_channel',
'digitizer_crate': 'digitizer_crate',
'digitizer_module': 'digitizer_module',
'digitizer_slot': 'digitizer_slot',
'high_voltage_crate': 'high_voltage_crate',
'high_voltage_board': 'high_voltage_board',
'high_voltage_channel': 'high_voltage_channel',
'high_voltage_connector': 'high_voltage_connector',
'high_voltage_feedthrough': 'high_voltage_feedthrough',
'high_voltage_return': 'high_voltage_return',
'serial_number': 'serial_number',
'signal_channel': 'signal_channel',
'signal_connector': 'signal_connector',
'signal_feedthrough': 'signal_feedthrough',
'id': '_id'
}
def __init__(self, uid=None, array=None, detector='muveto', experiment='xenonnt', pmt_index=None, sector=None, position_x=None, position_y=None, position_z=None, position_r=None, amplifier_crate=None, amplifier_fan=None, amplifier_plug=None, amplifier_serial=None, amplifier_slot=None, amplifier_channel=None, digitizer_channel=None, digitizer_crate=None, digitizer_module=None, digitizer_slot=None, high_voltage_crate=None, high_voltage_board=None, high_voltage_channel=None, high_voltage_connector=None, high_voltage_feedthrough=None, high_voltage_return=None, serial_number=None, signal_channel=None, signal_connector=None, signal_feedthrough=None, id=None, local_vars_configuration=None): # noqa: E501
"""MuvetoInstall - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._uid = None
self._array = None
self._detector = None
self._experiment = None
self._pmt_index = None
self._sector = None
self._position_x = None
self._position_y = None
self._position_z = None
self._position_r = None
self._amplifier_crate = None
self._amplifier_fan = None
self._amplifier_plug = None
self._amplifier_serial = None
self._amplifier_slot = None
self._amplifier_channel = None
self._digitizer_channel = None
self._digitizer_crate = None
self._digitizer_module = None
self._digitizer_slot = None
self._high_voltage_crate = None
self._high_voltage_board = None
self._high_voltage_channel = None
self._high_voltage_connector = None
self._high_voltage_feedthrough = None
self._high_voltage_return = None
self._serial_number = None
self._signal_channel = None
self._signal_connector = None
self._signal_feedthrough = None
self._id = None
self.discriminator = None
self.uid = uid
if array is not None:
self.array = array
self.detector = detector
self.experiment = experiment
self.pmt_index = pmt_index
if sector is not None:
self.sector = sector
if position_x is not None:
self.position_x = position_x
if position_y is not None:
self.position_y = position_y
if position_z is not None:
self.position_z = position_z
if position_r is not None:
self.position_r = position_r
if amplifier_crate is not None:
self.amplifier_crate = amplifier_crate
if amplifier_fan is not None:
self.amplifier_fan = amplifier_fan
if amplifier_plug is not None:
self.amplifier_plug = amplifier_plug
if amplifier_serial is not None:
self.amplifier_serial = amplifier_serial
if amplifier_slot is not None:
self.amplifier_slot = amplifier_slot
if amplifier_channel is not None:
self.amplifier_channel = amplifier_channel
if digitizer_channel is not None:
self.digitizer_channel = digitizer_channel
if digitizer_crate is not None:
self.digitizer_crate = digitizer_crate
if digitizer_module is not None:
self.digitizer_module = digitizer_module
if digitizer_slot is not None:
self.digitizer_slot = digitizer_slot
if high_voltage_crate is not None:
self.high_voltage_crate = high_voltage_crate
if high_voltage_board is not None:
self.high_voltage_board = high_voltage_board
if high_voltage_channel is not None:
self.high_voltage_channel = high_voltage_channel
if high_voltage_connector is not None:
self.high_voltage_connector = high_voltage_connector
if high_voltage_feedthrough is not None:
self.high_voltage_feedthrough = high_voltage_feedthrough
if high_voltage_return is not None:
self.high_voltage_return = high_voltage_return
if serial_number is not None:
self.serial_number = serial_number
if signal_channel is not None:
self.signal_channel = signal_channel
if signal_connector is not None:
self.signal_connector = signal_connector
if signal_feedthrough is not None:
self.signal_feedthrough = signal_feedthrough
if id is not None:
self.id = id
@property
def uid(self):
"""Gets the uid of this MuvetoInstall. # noqa: E501
:return: The uid of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._uid
@uid.setter
def uid(self, uid):
"""Sets the uid of this MuvetoInstall.
:param uid: The uid of this MuvetoInstall. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and uid is None: # noqa: E501
raise ValueError("Invalid value for `uid`, must not be `None`") # noqa: E501
self._uid = uid
@property
def array(self):
"""Gets the array of this MuvetoInstall. # noqa: E501
:return: The array of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._array
@array.setter
def array(self, array):
"""Sets the array of this MuvetoInstall.
:param array: The array of this MuvetoInstall. # noqa: E501
:type: str
"""
self._array = array
@property
def detector(self):
"""Gets the detector of this MuvetoInstall. # noqa: E501
:return: The detector of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._detector
@detector.setter
def detector(self, detector):
"""Sets the detector of this MuvetoInstall.
:param detector: The detector of this MuvetoInstall. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and detector is None: # noqa: E501
raise ValueError("Invalid value for `detector`, must not be `None`") # noqa: E501
allowed_values = ["tpc", "nveto", "muveto", "unknown"] # noqa: E501
if self.local_vars_configuration.client_side_validation and detector not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `detector` ({0}), must be one of {1}" # noqa: E501
.format(detector, allowed_values)
)
self._detector = detector
@property
def experiment(self):
"""Gets the experiment of this MuvetoInstall. # noqa: E501
:return: The experiment of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._experiment
@experiment.setter
def experiment(self, experiment):
"""Sets the experiment of this MuvetoInstall.
:param experiment: The experiment of this MuvetoInstall. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and experiment is None: # noqa: E501
raise ValueError("Invalid value for `experiment`, must not be `None`") # noqa: E501
allowed_values = ["xenon1t", "xenonnt", "unknown"] # noqa: E501
if self.local_vars_configuration.client_side_validation and experiment not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `experiment` ({0}), must be one of {1}" # noqa: E501
.format(experiment, allowed_values)
)
self._experiment = experiment
@property
def pmt_index(self):
"""Gets the pmt_index of this MuvetoInstall. # noqa: E501
:return: The pmt_index of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._pmt_index
@pmt_index.setter
def pmt_index(self, pmt_index):
"""Sets the pmt_index of this MuvetoInstall.
:param pmt_index: The pmt_index of this MuvetoInstall. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and pmt_index is None: # noqa: E501
raise ValueError("Invalid value for `pmt_index`, must not be `None`") # noqa: E501
self._pmt_index = pmt_index
@property
def sector(self):
"""Gets the sector of this MuvetoInstall. # noqa: E501
:return: The sector of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._sector
@sector.setter
def sector(self, sector):
"""Sets the sector of this MuvetoInstall.
:param sector: The sector of this MuvetoInstall. # noqa: E501
:type: int
"""
self._sector = sector
@property
def position_x(self):
"""Gets the position_x of this MuvetoInstall. # noqa: E501
:return: The position_x of this MuvetoInstall. # noqa: E501
:rtype: float
"""
return self._position_x
@position_x.setter
def position_x(self, position_x):
"""Sets the position_x of this MuvetoInstall.
:param position_x: The position_x of this MuvetoInstall. # noqa: E501
:type: float
"""
self._position_x = position_x
@property
def position_y(self):
"""Gets the position_y of this MuvetoInstall. # noqa: E501
:return: The position_y of this MuvetoInstall. # noqa: E501
:rtype: float
"""
return self._position_y
@position_y.setter
def position_y(self, position_y):
"""Sets the position_y of this MuvetoInstall.
:param position_y: The position_y of this MuvetoInstall. # noqa: E501
:type: float
"""
self._position_y = position_y
@property
def position_z(self):
"""Gets the position_z of this MuvetoInstall. # noqa: E501
:return: The position_z of this MuvetoInstall. # noqa: E501
:rtype: float
"""
return self._position_z
@position_z.setter
def position_z(self, position_z):
"""Sets the position_z of this MuvetoInstall.
:param position_z: The position_z of this MuvetoInstall. # noqa: E501
:type: float
"""
self._position_z = position_z
@property
def position_r(self):
"""Gets the position_r of this MuvetoInstall. # noqa: E501
:return: The position_r of this MuvetoInstall. # noqa: E501
:rtype: float
"""
return self._position_r
@position_r.setter
def position_r(self, position_r):
"""Sets the position_r of this MuvetoInstall.
:param position_r: The position_r of this MuvetoInstall. # noqa: E501
:type: float
"""
self._position_r = position_r
@property
def amplifier_crate(self):
"""Gets the amplifier_crate of this MuvetoInstall. # noqa: E501
:return: The amplifier_crate of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_crate
@amplifier_crate.setter
def amplifier_crate(self, amplifier_crate):
"""Sets the amplifier_crate of this MuvetoInstall.
:param amplifier_crate: The amplifier_crate of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_crate = amplifier_crate
@property
def amplifier_fan(self):
"""Gets the amplifier_fan of this MuvetoInstall. # noqa: E501
:return: The amplifier_fan of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_fan
@amplifier_fan.setter
def amplifier_fan(self, amplifier_fan):
"""Sets the amplifier_fan of this MuvetoInstall.
:param amplifier_fan: The amplifier_fan of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_fan = amplifier_fan
@property
def amplifier_plug(self):
"""Gets the amplifier_plug of this MuvetoInstall. # noqa: E501
:return: The amplifier_plug of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_plug
@amplifier_plug.setter
def amplifier_plug(self, amplifier_plug):
"""Sets the amplifier_plug of this MuvetoInstall.
:param amplifier_plug: The amplifier_plug of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_plug = amplifier_plug
@property
def amplifier_serial(self):
"""Gets the amplifier_serial of this MuvetoInstall. # noqa: E501
:return: The amplifier_serial of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_serial
@amplifier_serial.setter
def amplifier_serial(self, amplifier_serial):
"""Sets the amplifier_serial of this MuvetoInstall.
:param amplifier_serial: The amplifier_serial of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_serial = amplifier_serial
@property
def amplifier_slot(self):
"""Gets the amplifier_slot of this MuvetoInstall. # noqa: E501
:return: The amplifier_slot of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_slot
@amplifier_slot.setter
def amplifier_slot(self, amplifier_slot):
"""Sets the amplifier_slot of this MuvetoInstall.
:param amplifier_slot: The amplifier_slot of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_slot = amplifier_slot
@property
def amplifier_channel(self):
"""Gets the amplifier_channel of this MuvetoInstall. # noqa: E501
:return: The amplifier_channel of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._amplifier_channel
@amplifier_channel.setter
def amplifier_channel(self, amplifier_channel):
"""Sets the amplifier_channel of this MuvetoInstall.
:param amplifier_channel: The amplifier_channel of this MuvetoInstall. # noqa: E501
:type: int
"""
self._amplifier_channel = amplifier_channel
@property
def digitizer_channel(self):
"""Gets the digitizer_channel of this MuvetoInstall. # noqa: E501
:return: The digitizer_channel of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._digitizer_channel
@digitizer_channel.setter
def digitizer_channel(self, digitizer_channel):
"""Sets the digitizer_channel of this MuvetoInstall.
:param digitizer_channel: The digitizer_channel of this MuvetoInstall. # noqa: E501
:type: int
"""
self._digitizer_channel = digitizer_channel
@property
def digitizer_crate(self):
"""Gets the digitizer_crate of this MuvetoInstall. # noqa: E501
:return: The digitizer_crate of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._digitizer_crate
@digitizer_crate.setter
def digitizer_crate(self, digitizer_crate):
"""Sets the digitizer_crate of this MuvetoInstall.
:param digitizer_crate: The digitizer_crate of this MuvetoInstall. # noqa: E501
:type: int
"""
self._digitizer_crate = digitizer_crate
@property
def digitizer_module(self):
"""Gets the digitizer_module of this MuvetoInstall. # noqa: E501
:return: The digitizer_module of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._digitizer_module
@digitizer_module.setter
def digitizer_module(self, digitizer_module):
"""Sets the digitizer_module of this MuvetoInstall.
:param digitizer_module: The digitizer_module of this MuvetoInstall. # noqa: E501
:type: int
"""
self._digitizer_module = digitizer_module
@property
def digitizer_slot(self):
"""Gets the digitizer_slot of this MuvetoInstall. # noqa: E501
:return: The digitizer_slot of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._digitizer_slot
@digitizer_slot.setter
def digitizer_slot(self, digitizer_slot):
"""Sets the digitizer_slot of this MuvetoInstall.
:param digitizer_slot: The digitizer_slot of this MuvetoInstall. # noqa: E501
:type: int
"""
self._digitizer_slot = digitizer_slot
@property
def high_voltage_crate(self):
"""Gets the high_voltage_crate of this MuvetoInstall. # noqa: E501
:return: The high_voltage_crate of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._high_voltage_crate
@high_voltage_crate.setter
def high_voltage_crate(self, high_voltage_crate):
"""Sets the high_voltage_crate of this MuvetoInstall.
:param high_voltage_crate: The high_voltage_crate of this MuvetoInstall. # noqa: E501
:type: int
"""
self._high_voltage_crate = high_voltage_crate
@property
def high_voltage_board(self):
"""Gets the high_voltage_board of this MuvetoInstall. # noqa: E501
:return: The high_voltage_board of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._high_voltage_board
@high_voltage_board.setter
def high_voltage_board(self, high_voltage_board):
"""Sets the high_voltage_board of this MuvetoInstall.
:param high_voltage_board: The high_voltage_board of this MuvetoInstall. # noqa: E501
:type: int
"""
self._high_voltage_board = high_voltage_board
@property
def high_voltage_channel(self):
"""Gets the high_voltage_channel of this MuvetoInstall. # noqa: E501
:return: The high_voltage_channel of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._high_voltage_channel
@high_voltage_channel.setter
def high_voltage_channel(self, high_voltage_channel):
"""Sets the high_voltage_channel of this MuvetoInstall.
:param high_voltage_channel: The high_voltage_channel of this MuvetoInstall. # noqa: E501
:type: int
"""
self._high_voltage_channel = high_voltage_channel
@property
def high_voltage_connector(self):
"""Gets the high_voltage_connector of this MuvetoInstall. # noqa: E501
:return: The high_voltage_connector of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._high_voltage_connector
@high_voltage_connector.setter
def high_voltage_connector(self, high_voltage_connector):
"""Sets the high_voltage_connector of this MuvetoInstall.
:param high_voltage_connector: The high_voltage_connector of this MuvetoInstall. # noqa: E501
:type: int
"""
self._high_voltage_connector = high_voltage_connector
@property
def high_voltage_feedthrough(self):
"""Gets the high_voltage_feedthrough of this MuvetoInstall. # noqa: E501
:return: The high_voltage_feedthrough of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._high_voltage_feedthrough
@high_voltage_feedthrough.setter
def high_voltage_feedthrough(self, high_voltage_feedthrough):
"""Sets the high_voltage_feedthrough of this MuvetoInstall.
:param high_voltage_feedthrough: The high_voltage_feedthrough of this MuvetoInstall. # noqa: E501
:type: str
"""
self._high_voltage_feedthrough = high_voltage_feedthrough
@property
def high_voltage_return(self):
"""Gets the high_voltage_return of this MuvetoInstall. # noqa: E501
:return: The high_voltage_return of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._high_voltage_return
@high_voltage_return.setter
def high_voltage_return(self, high_voltage_return):
"""Sets the high_voltage_return of this MuvetoInstall.
:param high_voltage_return: The high_voltage_return of this MuvetoInstall. # noqa: E501
:type: int
"""
self._high_voltage_return = high_voltage_return
@property
def serial_number(self):
"""Gets the serial_number of this MuvetoInstall. # noqa: E501
:return: The serial_number of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._serial_number
@serial_number.setter
def serial_number(self, serial_number):
"""Sets the serial_number of this MuvetoInstall.
:param serial_number: The serial_number of this MuvetoInstall. # noqa: E501
:type: str
"""
self._serial_number = serial_number
@property
def signal_channel(self):
"""Gets the signal_channel of this MuvetoInstall. # noqa: E501
:return: The signal_channel of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._signal_channel
@signal_channel.setter
def signal_channel(self, signal_channel):
"""Sets the signal_channel of this MuvetoInstall.
:param signal_channel: The signal_channel of this MuvetoInstall. # noqa: E501
:type: int
"""
self._signal_channel = signal_channel
@property
def signal_connector(self):
"""Gets the signal_connector of this MuvetoInstall. # noqa: E501
:return: The signal_connector of this MuvetoInstall. # noqa: E501
:rtype: int
"""
return self._signal_connector
@signal_connector.setter
def signal_connector(self, signal_connector):
"""Sets the signal_connector of this MuvetoInstall.
:param signal_connector: The signal_connector of this MuvetoInstall. # noqa: E501
:type: int
"""
self._signal_connector = signal_connector
@property
def signal_feedthrough(self):
"""Gets the signal_feedthrough of this MuvetoInstall. # noqa: E501
:return: The signal_feedthrough of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._signal_feedthrough
@signal_feedthrough.setter
def signal_feedthrough(self, signal_feedthrough):
"""Sets the signal_feedthrough of this MuvetoInstall.
:param signal_feedthrough: The signal_feedthrough of this MuvetoInstall. # noqa: E501
:type: str
"""
self._signal_feedthrough = signal_feedthrough
@property
def id(self):
"""Gets the id of this MuvetoInstall. # noqa: E501
:return: The id of this MuvetoInstall. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this MuvetoInstall.
:param id: The id of this MuvetoInstall. # noqa: E501
:type: str
"""
self._id = id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MuvetoInstall):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, MuvetoInstall):
return True
return self.to_dict() != other.to_dict()
|
[
"joe.mosbacher@gmail.com"
] |
joe.mosbacher@gmail.com
|
5f460f92e70ed6bfc681d172052de35abcd84187
|
cf5b2850dc9794eb0fc11826da4fd3ea6c22e9b1
|
/xlsxwriter/test/comparison/test_background07.py
|
fca888e205ddcc9f1423cc81a0c2e895dc901b59
|
[
"BSD-2-Clause"
] |
permissive
|
glasah/XlsxWriter
|
bcf74b43b9c114e45e1a3dd679b5ab49ee20a0ec
|
1e8aaeb03000dc2f294ccb89b33806ac40dabc13
|
refs/heads/main
| 2023-09-05T03:03:53.857387
| 2021-11-01T07:35:46
| 2021-11-01T07:35:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,229
|
py
|
###############################################################################
#
# Tests for XlsxWriter.
#
# SPDX-License-Identifier: BSD-2-Clause
# Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('background07.xlsx')
self.ignore_elements = {'xl/worksheets/sheet1.xml': ['<pageSetup']}
def test_create_file(self):
"""Test the creation of an XlsxWriter file with a background image."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_image('E9', self.image_dir + 'logo.jpg')
worksheet.set_background(self.image_dir + 'logo.jpg')
worksheet.set_header('&C&G',
{'image_center': self.image_dir + 'blue.jpg'})
worksheet.write('A1', 'Foo')
worksheet.write_comment('B2', 'Some text')
worksheet.set_comments_author('John')
workbook.close()
self.assertExcelEqual()
|
[
"jmcnamara@cpan.org"
] |
jmcnamara@cpan.org
|
9e10939f1b20275173127d0fb1b3bf20a2eb28e3
|
1da4f005a7e54b99c25a74a327c8ee633870823b
|
/exercises/lambdas/fallout/solution.py
|
aae1e23ff241d1a5a21af0bd8826f8c0564740cc
|
[] |
no_license
|
r0588667/scripting
|
2b4492adadd978be4e981049ffcec78f6dce96e9
|
9936e27ce42ce301a3b6711c80fcb92973864248
|
refs/heads/master
| 2021-01-17T14:50:40.624994
| 2017-02-14T16:00:02
| 2017-02-14T16:00:02
| 84,099,474
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 404
|
py
|
def count_common_chars(s1, s2):
return sum( 1 for c1, c2 in zip(s1, s2) if c1 == c2 )
def hack(candidates, attempt):
while len(candidates) > 1:
candidate = candidates[0]
n = attempt(candidate)
if n == len(candidate):
return candidate
candidates = [ c for c in candidates if count_common_chars(c, candidate) == n ]
return candidates[0]
|
[
"frederic.vogels@gmail.com"
] |
frederic.vogels@gmail.com
|
20b68f21b4023ad55842c33799923546c44734dc
|
6e8f2e28479566dbaa338300b2d61f784ff83f97
|
/.history/code/datasetup_20210418122549.py
|
3c4a70465143ba39751486a36c9df470bf528691
|
[] |
no_license
|
eeng5/CV-final-project
|
55a7d736f75602858233ebc380c4e1d67ab2b866
|
580e28819560b86f6974959efb1d31ef138198fc
|
refs/heads/main
| 2023-04-09T21:28:21.531293
| 2021-04-21T19:57:22
| 2021-04-21T19:57:22
| 352,703,734
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,462
|
py
|
from PIL import Image
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import pandas as pd
import cv2
import os
import glob
from pathlib import Path
def cleanTestDirs():
emotions = ['angry', 'happy', 'disgust', 'sad', 'neutral', 'surprise', 'fear']
for e in emotions:
pathy = '/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/'+e
for f in Path(pathy).glob('*.jpg'):
try:
#f.unlink()
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanTrainDirs():
emotions = ['angry', 'happy', 'disgust', 'sad', 'neutral', 'surprise', 'fear']
for e in emotions:
pathy = '/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/'+e
for f in Path(pathy).glob('*.jpg'):
try:
#f.unlink()
os.remove(f)
except OSError as e:
print("Error: %s : %s" % (f, e.strerror))
def cleanAll():
cleanTestDirs()
cleanTrainDirs()
def createPixelArray(arr):
arr = list(map(int, arr.split()))
array = np.array(arr, dtype=np.uint8)
array = array.reshape((48, 48))
return array
def equalize_hist(img):
img = cv2.equalizeHist(img)
return img
def showImages(imgs):
_, axs = plt.subplots(1, len(imgs), figsize=(20, 20))
axs = axs.flatten()
for img, ax in zip(imgs, axs):
ax.imshow(img,cmap=plt.get_cmap('gray'))
plt.show()
def augmentIMG(img, task):
imgs = [img]
img1 = equalize_hist(img)
imgs.append(img1)
if task == '1'
img2 = cv2.bilateralFilter(img1, d=9, sigmaColor=75, sigmaSpace=75)
imgs.append(img2)
kernel = np.array([[-1.0, -1.0, -1.0],
[-1.0, 9, -1.0],
[-1.0, -1.0, -1.0]])
img3 = cv2.filter2D(img2,-1,kernel)
imgs.append(img3)
img4 = equalize_hist(img3)
imgs.append(img4)
img5 = cv2.bilateralFilter(img4, d=9, sigmaColor=100, sigmaSpace=100)
imgs.append(img5)
img6 = cv2.flip(img, 1) # flip horizontally
imgs.append(img6)
return imgs
def saveIMG(arr, num, folderLoc):
im = Image.fromarray(arr)
filename = folderLoc + "image_"+ num+".jpg"
im.save(filename)
def createTrain(emotion_dict, task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/train.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/train/" # CHANGE ME
for index, row in df.iterrows():
px = row['pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createTest(emotion_dict , task):
df = pd.read_csv('/Users/Natalie/Desktop/cs1430/CV-final-project/data/icml_face_data.csv') # CHANGE ME
base_filename = "/Users/Natalie/Desktop/cs1430/CV-final-project/data/test/" # CHANGE ME
for index, row in df.iterrows():
if (row[' Usage'] == "PublicTest"):
px = row[' pixels']
emot = int(row['emotion'])
emot_loc = emotion_dict[emot]
filename = base_filename + emot_loc
img = createPixelArray(px)
img_arr = augmentIMG(img, task)
idx = 0
for i in img_arr:
num = str(index) + "_" + str(idx)
idx +=1
saveIMG(i, num, filename)
def createEmotionDict():
emotionDict = {}
emotionDict[0]="angry/"
emotionDict[1]="disgust/"
emotionDict[2]="fear/"
emotionDict[3]="happy/"
emotionDict[4]="sad/"
emotionDict[5]="surprise/"
emotionDict[6] = "neutral/"
return emotionDict
def createSimpleData():
cleanAll()
print("Cleaning done")
emot_dict = createEmotionDict()
createTrain(emot_dict, 1)
print("Training done")
createTest(emot_dict, 1)
print("Testing done")
def createComplexData():
cleanAll()
emot_dict = createEmotionDict()
createTrain(emot_dict, 3)
createTest(emot_dict, 3)
def main():
# cleanAll()
# print("Cleaning done")
emot_dict = createEmotionDict()
createTrain(emot_dict, 1)
print("Training done")
createTest(emot_dict, 1)
print("Testing done")
if __name__ == '__main__':
main()
|
[
"natalie_rshaidat@brown.edu"
] |
natalie_rshaidat@brown.edu
|
0f4d6064aa4fdf28f521cb742739d1ff19af1469
|
38422c3edeb269926502fed31a0761aff8dd3d3b
|
/Scatterometer/Old_files/Scatterometer_funksjoner.py
|
b5f43231a8460d713e41c5f3738e7ab38799670a
|
[] |
no_license
|
vfurtula/Alle-projekter
|
2dab3ccbf7ddb6be3ee09f9f5e87085f354dd84a
|
da3d7c9611088043e2aea5d844f1ae6056215e04
|
refs/heads/master
| 2022-06-07T05:17:35.327228
| 2020-04-30T10:28:48
| 2020-04-30T10:28:48
| 260,180,957
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 22,581
|
py
|
#import matplotlib.pyplot as plt
#from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
#from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
import numpy
import scipy.optimize as scop
class Scatterometer_funksjoner:
def __init__(self):
pass
def Mueller_Matrix_polarizer_error(self,px,py):
M_pol=0.5*numpy.array([[px**2+py**2,px**2-py**2,0,0],
[px**2-py**2,px**2+py**2,0,0],
[0,0,2*px*py,0],
[0,0,0,2*px*py]])
return M_pol
def Changebasis3(self,m,c):
#Rewritten by LM
#H matrix calculated with Maple
H=numpy.array([[m[0][0]-c[0][0], -c[1][0], -c[2][0], -c[3][0], m[0][1], 0, 0, 0, m[0][2], 0, 0, 0, m[0][3], 0, 0, 0],
[-c[0][1], m[0][0]-c[1][1], -c[2][1], -c[3][1], 0, m[0][1], 0, 0, 0, m[0][2], 0, 0, 0, m[0][3], 0, 0],
[-c[0][2], -c[1][2], m[0][0]-c[2][2], -c[3][2], 0, 0, m[0][1], 0, 0, 0, m[0][2], 0, 0, 0, m[0][3], 0],
[-c[0][3], -c[1][3], -c[2][3], m[0][0]-c[3][3], 0, 0, 0, m[0][1], 0, 0, 0, m[0][2], 0, 0, 0, m[0][3]],
[m[1][0], 0, 0, 0, m[1][1]-c[0][0], -c[1][0], -c[2][0], -c[3][0], m[1][2], 0, 0, 0, m[1][3], 0, 0, 0],
[0, m[1][0], 0, 0, -c[0][1], m[1][1]-c[1][1], -c[2][1], -c[3][1], 0, m[1][2], 0, 0, 0, m[1][3], 0, 0],
[0, 0, m[1][0], 0, -c[0][2], -c[1][2], m[1][1]-c[2][2], -c[3][2], 0, 0, m[1][2], 0, 0, 0, m[1][3], 0],
[0, 0, 0, m[1][0], -c[0][3], -c[1][3], -c[2][3], m[1][1]-c[3][3], 0, 0, 0, m[1][2], 0, 0, 0, m[1][3]],
[m[2][0], 0, 0, 0, m[2][1], 0, 0, 0, m[2][2]-c[0][0], -c[1][0], -c[2][0], -c[3][0], m[2][3], 0, 0, 0],
[0, m[2][0], 0, 0, 0, m[2][1], 0, 0, -c[0][1], m[2][2]-c[1][1], -c[2][1], -c[3][1], 0, m[2][3], 0, 0],
[0, 0, m[2][0], 0, 0, 0, m[2][1], 0, -c[0][2], -c[1][2], m[2][2]-c[2][2], -c[3][2], 0, 0, m[2][3], 0],
[0, 0, 0, m[2][0], 0, 0, 0, m[2][1], -c[0][3], -c[1][3], -c[2][3], m[2][2]-c[3][3], 0, 0, 0, m[2][3]],
[m[3][0], 0, 0, 0, m[3][1], 0, 0, 0, m[3][2], 0, 0, 0, m[3][3]-c[0][0], -c[1][0], -c[2][0], -c[3][0]],
[0, m[3][0], 0, 0, 0, m[3][1], 0, 0, 0, m[3][2], 0, 0, -c[0][1], m[3][3]-c[1][1], -c[2][1], -c[3][1]],
[0, 0, m[3][0], 0, 0, 0, m[3][1], 0, 0, 0, m[3][2], 0, -c[0][2], -c[1][2], m[3][3]-c[2][2], -c[3][2]],
[0, 0, 0, m[3][0], 0, 0, 0, m[3][1], 0, 0, 0, m[3][2], -c[0][3], -c[1][3], -c[2][3], m[3][3]-c[3][3]]])
return H
def changebasis3A(self,m,c):
#Rewritten by LM
#H matrix calculated with Maple
H=numpy.array([[m[0][0]-c[0][0], m[1][0], m[2][0], m[3][0], -c[0][1], 0, 0, 0, -c[0][2], 0, 0, 0, -c[0][3], 0, 0, 0],
[m[0][1], m[1][1]-c[0][0], m[2][1], m[3][1], 0, -c[0][1], 0, 0, 0, -c[0][2], 0, 0, 0, -c[0][3], 0, 0],
[m[0][2], m[1][2], m[2][2]-c[0][0], m[3][2], 0, 0, -c[0][1], 0, 0, 0, -c[0][2], 0, 0, 0, -c[0][3], 0],
[m[0][3], m[1][3], m[2][3], m[3][3]-c[0][0], 0, 0, 0, -c[0][1], 0, 0, 0, -c[0][2], 0, 0, 0, -c[0][3]],
[-c[1][0], 0, 0, 0, m[0][0]-c[1][1], m[1][0], m[2][0], m[3][0], -c[1][2], 0, 0, 0, -c[1][3], 0, 0, 0],
[0, -c[1][0], 0, 0, m[0][1], m[1][1]-c[1][1], m[2][1], m[3][1], 0, -c[1][2], 0, 0, 0, -c[1][3], 0, 0],
[0, 0, -c[1][0], 0, m[0][2], m[1][2], m[2][2]-c[1][1], m[3][2], 0, 0, -c[1][2], 0, 0, 0, -c[1][3], 0],
[0, 0, 0, -c[1][0], m[0][3], m[1][3], m[2][3], m[3][3]-c[1][1], 0, 0, 0, -c[1][2], 0, 0, 0, -c[1][3]],
[-c[2][0], 0, 0, 0, -c[2][1], 0, 0, 0, m[0][0]-c[2][2], m[1][0], m[2][0], m[3][0], -c[2][3], 0, 0, 0],
[0, -c[2][0], 0, 0, 0, -c[2][1], 0, 0, m[0][1], m[1][1]-c[2][2], m[2][1], m[3][1], 0, -c[2][3], 0, 0],
[0, 0, -c[2][0], 0, 0, 0, -c[2][1], 0, m[0][2], m[1][2], m[2][2]-c[2][2], m[3][2], 0, 0, -c[2][3], 0],
[0, 0, 0, -c[2][0], 0, 0, 0, -c[2][1], m[0][3], m[1][3], m[2][3], m[3][3]-c[2][2], 0, 0, 0, -c[2][3]],
[-c[3][0], 0, 0, 0, -c[3][1], 0, 0, 0, -c[3][2], 0, 0, 0, m[0][0]-c[3][3], m[1][0], m[2][0], m[3][0]],
[0, -c[3][0], 0, 0, 0, -c[3][1], 0, 0, 0, -c[3][2], 0, 0, m[0][1], m[1][1]-c[3][3], m[2][1], m[3][1]],
[0, 0, -c[3][0], 0, 0, 0, -c[3][1], 0, 0, 0, -c[3][2], 0, m[0][2], m[1][2], m[2][2]-c[3][3], m[3][2]],
[0, 0, 0, -c[3][0], 0, 0, 0, -c[3][1], 0, 0, 0, -c[3][2], m[0][3], m[1][3], m[2][3], m[3][3]-c[3][3]]])
return H
def ConvertNxMby1To4byMMatrix(self,vec,m):
if len(vec)==4*m:
M=numpy.zeros((4,m))
for i in range(4):
M[i][:] = vec[i*m:(i+1)*m]
return M
else:
return None
def FindEigenvalueRatio4forAandW2(self,y,InMatrisesW,InMatrisesA,theta1):
errorKAW=self.FindEigenvalueRatio4forA2(InMatrisesA,theta1,y)+self.FindEigenvalueRatio5(InMatrisesW,theta1,y)
return errorKAW
def FindEigenvalueRatio4forA2(self,InMatrises,theta1,y):
MC1=InMatrises[0:4][0:4]
C1=InMatrises[4:8][0:4]
MC2=InMatrises[8:12][0:4]
C2=InMatrises[12:16][0:4]
MC3=InMatrises[16:20][0:4]
C3=InMatrises[20:24][0:4]
H1=self.changebasis3A(self.MuellerRotation_inline(MC1,theta1),C1)
H2=self.changebasis3A(self.MuellerRotation_inline(MC2,y[0]),C2)
H3=self.changebasis3A(self.MuellerRotation_inline(MC3,y[1]),C3)
K=numpy.dot(H1.T,H1)+numpy.dot(H2.T,H2)+numpy.dot(H3.T,H3)
u_,s_,v_=numpy.linalg.svd(K)
EIGK=numpy.sort(s_)
EigenvalueRatioV=numpy.zeros(16)
for h in numpy.arange(1,len(EIGK)):
EigenvalueRatioV[h] = EIGK[0]/EIGK[h]
EigenvalueRatio2=numpy.sum(EigenvalueRatioV)
return EigenvalueRatio2
def FindEigenvalueRatio5(self,InMatrises,theta1,y):
MC1=InMatrises[0:4][0:4]
C1=InMatrises[4:8][0:4]
MC2=InMatrises[8:12][0:4]
C2=InMatrises[12:16][0:4]
MC3=InMatrises[16:20][0:4]
C3=InMatrises[20:24][0:4]
H1=self.Changebasis3(self.MuellerRotation_inline(MC1,theta1),C1)
H2=self.Changebasis3(self.MuellerRotation_inline(MC2,y[0]),C2)
H3=self.Changebasis3(self.MuellerRotation_inline(MC3,y[1]),C3)
K=numpy.dot(H1.T,H1)+numpy.dot(H2.T,H2)+numpy.dot(H3.T,H3)
u_,s_,v_=numpy.linalg.svd(K)
EIGK=numpy.sort(s_)
EigenvalueRatioV=numpy.zeros(16)
for h in numpy.arange(1,len(EIGK)):
EigenvalueRatioV[h] = EIGK[0]/EIGK[h]
EigenvalueRatio2=numpy.sum(EigenvalueRatioV)
return EigenvalueRatio2
def EVCSamplePropertiesForAandW_MK2(self,C1W,C2W,C3W,C1A,C2A,C3A,THETA0,opt,mode,MC1Rold,MC2Rold,MC3Rold,thetaold):
# Eigenvalue calibration method of a Mueller Matix ellipsometry
# Written after Applied optics vol38 No 16 1999
#eml.extrinsic('fminegen');
#if nargin==8:
# mode='r' #added 230407,JL,311007 tB
##################### FINNER EGENVERDIER OK pr 10.10 2006 ###########################
#Egenverdien til polarisatoren samt r og p verdier.
tauPol=(numpy.trace(C1W)+numpy.trace(C2W))/2
MC1=tauPol*self.Mueller_Matrix_polarizer_error(1,0)
MC2=MC1
#################################################################
#Egenverdien og fase til retarderen.
eig_vals, eig_vecs = numpy.linalg.eig(C3W)
sortedEg = self.sortEig(eig_vals)
# PSIM=[numpy.arctan(numpy.sqrt(numpy.real(sortedEg[0])),numpy.sqrt(numpy.real(sortedEg[1]))) numpy.arctan(numpy.sqrt(numpy.real(sortedEg[1])),numpy.sqrt(numpy.real(sortedEg[0])))];
PSIM=numpy.array([numpy.arctan2(numpy.real(numpy.sqrt(sortedEg[0])),numpy.real(numpy.sqrt(sortedEg[1]))),
numpy.arctan2(numpy.real(numpy.sqrt(sortedEg[1])),numpy.real(numpy.sqrt(sortedEg[0])))])
DELTAM=0.5*numpy.array([numpy.angle(sortedEg[2]/sortedEg[3]),numpy.angle(sortedEg[3]/sortedEg[2])])
# TAU=numpy.trace(C3)#bug? kan gi tau>1
TAU = (sortedEg[0]+sortedEg[1])/2
MC3 = self.Mueller_Matrix_ppret(TAU,PSIM[1],DELTAM[1]+numpy.pi,mode)
#Her maa det kontrolleres DELTA, for retardanser over 90deg gir egenverdiene feil veerdi...
# if opt==1,
############################################################################
# Finner ut av rotasjonen til polarizator
MC1R=MC1Rold
MC2R=MC2Rold
MC3R=MC3Rold
THETA=thetaold
# if rot==1:
############################################################################
# Finner ut av rotasjonen til polarizator
InMatrisesW=numpy.vstack([MC1,C1W,MC2,C2W,MC3,C3W])
InMatrisesA=numpy.vstack([MC1,C1A,MC2,C2A,MC3,C3A])
#options = optimset('Display','off','LargeScale','on','MaxFunEvals',8000,'TolX',1e-15)
#options = optimset('Display','off','LevenbergMarquardt','on','LargeScale','off','MaxFunEvals',8000,'TolX',1e-15);
#options = optimset('Display','off','lsqnonneg','on','LargeScale','off','MaxFunEvals',8000,'TolX',1e-15);
if opt==0:
THETA=THETA0
elif opt==1:
theta1=THETA0[0]
y0=numpy.array([THETA0[1], THETA0[2]])
# [y1,y2,FinalEigenvalueRatio]=fminegen(InMatrisesW,InMatrisesA,theta1,y0);
# Lars's fminegen har jeg ikke funnet. Kanskje paa NIR MMI maskinen?
# THETA=[theta1,y1,y2];
y = scop.fmin(self.FindEigenvalueRatio4forAandW2,y0,args=(InMatrisesW,InMatrisesA,theta1),xtol=1e-15)
THETA=[theta1,y[0],y[1]]
else:
pass
MC1R=self.MuellerRotation_inline(MC1,THETA[0])
MC2R=self.MuellerRotation_inline(MC2,THETA[1])
MC3R=self.MuellerRotation_inline(MC3,THETA[2])
# end
return MC1R,MC2R,THETA,MC3R,TAU,PSIM,DELTAM,tauPol
def sortEig(self,Ein):
def is_real(val):
if numpy.isreal(val)==True:
return 1
else:
return 0
Ein=numpy.sort(Ein)[::-1]
# Sorterer egenverdiene
antallimag=is_real(Ein[0])+is_real(Ein[1])+is_real(Ein[2])+is_real(Ein[3])
# NumReal=sum(imag(Ein)==0);
Es=numpy.zeros(4)+1j*numpy.zeros(4)
if antallimag==2:
j=0
k=2
for i in range(4):
if numpy.isreal(Ein[i]):
Es[j]=Ein[i]
j=j+1
else:
Es[k]=Ein[i]
k=k+1
else:
Ein_imag_abs=numpy.abs(numpy.imag(Ein))
pos = numpy.argsort(Ein_imag_abs)
Es[0]=Ein[pos[0]]
Es[1]=Ein[pos[1]]
Es[2]=Ein[pos[2]]
Es[3]=Ein[pos[3]]
Es[2:4]=numpy.sort(Es[2:4])
# [V2 pos]=sort(real(Es(3:4)));
# Es2(3)=Es(pos(1));
# Es2(4)=Es(pos(2));
Es[0:2]=numpy.sort(Es[0:2])[::-1]
return Es
def Mueller_Matrix_ppret(self,tau,psi,delta,mode):
# M=Mueller_Matrix_diatonator(tau,psi,delta)
# Returnes the mueller matrix for a retarder with beta phase between fast axis and slow axis
# Frantz Stabo-Eeg
# M=tau*0.5*[1,-cos(2*psi),0,0;-cos(2*psi),1,0,0;0,0,sin(2*psi)*cos(delta),sin(2*psi)*sin(delta);0,0,-sin(2*psi)*sin(delta),sin(2*psi)*cos(delta)];
#Jarle 200407, factor 0.5 removed
#230407, mode added.
#if nargin==3:
# mode = 'r'
if mode=='r':
tr=-1
elif mode=='t':
tr=1
else:
tr=-1
M=tau*numpy.array([[1,tr*numpy.cos(2*psi),0,0],[tr*numpy.cos(2*psi),1,0,0],
[0,0,numpy.sin(2*psi)*numpy.cos(delta),numpy.sin(2*psi)*numpy.sin(delta)],
[0,0,-numpy.sin(2*psi)*numpy.sin(delta),numpy.sin(2*psi)*numpy.cos(delta)]])
return M
def MuellerRotation_inline(self,M,theta):
M = numpy.dot(numpy.dot(self.Mueller_rotator(-theta),M),self.Mueller_rotator(theta))
return M
def Mueller_rotator(self,theta):
M = numpy.array([[1,0,0,0], [0, numpy.cos(2*theta), numpy.sin(2*theta), 0],
[0, -numpy.sin(2*theta), numpy.cos(2*theta), 0], [0,0,0,1]])
return M
def Mueller_Matrix_Rotate(self,M,theta):
# Mueller_Matrix_Rotate(M,theta)
# Calculates the rotated Mueller matrix M, rotated by an angle theata
# Frantz Stabo-Eeg
M_P=numpy.array([[1,0,0,0],[0,numpy.cos(2*theta),-numpy.sin(2*theta),0],
[0,numpy.sin(2*theta),numpy.cos(2*theta),0],[0,0,0,1]])
M_N=numpy.array([[1,0,0,0],[0,numpy.cos(2*theta),numpy.sin(2*theta),0],
[0,-numpy.sin(2*theta),numpy.cos(2*theta),0],[0,0,0,1]])
M_theta=numpy.dot(numpy.dot(M_P,M),M_N)
return M_theta
def MuellerMatrix_depolIndex(self,M):
# function [Pd]=MuellerMatrix_depolIndex(M)
# Calculates the depolarization index Pd of a Mueller matrix M.
# J. J. Gil and E. Bernabeu, Journal of Modern Optics 33, 185 (1986).
# Note that depolarization index is not allways equvivalent to the
# geometrical average of the degree of polarization
# R. A. Chipman, Appl. Opt. 44, 2490 (2005).
#Pd=sqrt(-M(1,1)^2)/(3*M(1,1)^2));
Pd=numpy.sqrt(numpy.sum(numpy.sum(numpy.square(M)))-M[0][0]**2)/(numpy.sqrt(3)*M[0][0]) #Corrected 2007.11.07
# Alternativley can the polindex be calculated from the eigenvalues
# H=MullerCoherencyMatrixConversion(M);
# # sorting the eigenvalues
# [W D]=eig(H);
# [lambda]=[D(1,1), D(2,2), D(3,3),D(4,4)];
# [lambda2 sortI]=sort(abs(lambda),'descend');
# D2=zeros(4,4);
# for k=1:4
# W2(:,k)=W(:,sortI(k));
# D2(k,k)=lambda(sortI(k));
# P(k)=lambda2(k)/sum(lambda2);
# end
#
# Pd2=sqrt(1/3*(4*sum(P.^2)-1));
return Pd
def matrixFilteringCloude2(self,M):
# function [Mf dBNoise DeltaM DeltaH]=matrixFilteringCloude2(M)
# By Frantz Stabo-Eeg last updated 2007.09.03
# Mf physical relizable filterd matix
# dBNoise eigenvalue ratio of positive over negative egienvaluse of the
# coherency matrix
# DeltaM=norm(M-Mf,'fro');
# References
# Opt Eng vol 34 no 6 p1599 (1995), by Shane R Cloude
# Cloude SPIE vol. 1166 (1989)
DeltaM=0
###########################################################################
# Calculation of the Systems Coherency matrix H
###########################################################################
# H=MullerCoherencyMatrixConversion(M);
H=self.coherencyConversion(M)
W2=numpy.zeros((4,4))+1j*numpy.zeros((4,4))
# sorting the eigen values
D,W = numpy.linalg.eig(H)
sortI=numpy.argsort(numpy.abs(D))[::-1]
D2=numpy.zeros((4,4))+1j*numpy.zeros((4,4))
for k in range(4):
W2[:][k]=W[:][sortI[k]]
D2[k][k]=D[sortI[k]]
###########################################################################
# Noise filtering of the coherency matrix and entrophy from Cloude SPIE vol. 1166
# (1989)
# Tested with values from from Cloude SPIE vol. 1166 (1989)
# ok
lambdap=0+1j*0
lambdan=0+1j*0
for k in range(4):
if D2[k][k]<0:
lambdan=D2[k][k]+lambdan
D2[k][k]=0
else:
lambdap=D2[k][k]+lambdap
dBfidelity=-10*numpy.log10(lambdap/numpy.abs(lambdan)) # i dB lagt til 6. mars
H2=numpy.dot(numpy.dot(W2,D2),numpy.conj(W2.T))
DeltaH=numpy.linalg.norm(H-H2) # Evaluation of measurements uncertainties
###########################################################################
###########################################################################
# Transforming back to filtered Mueller-Jones Matrix
# J.Phys Appl. Phys 29 p34-38 (1996)
Mf=numpy.real(self.coherencyConversion(H2))
# Mf=real(MullerCoherencyMatrixConversion(H2));
Mf=Mf/Mf[0][0]
# DeltaM=norm(M-Mf,'fro');
return Mf, dBfidelity, DeltaM, DeltaH
def coherencyConversion(self,M):
# pauli=zeros(2,2,4);
# pauli(:,:,1)=[1,0;0,1];
# pauli(:,:,2)=[1,0;0,-1];
# pauli(:,:,3)=[0,1;1,0];
# pauli(:,:,4)=[0,-1i;1i,0];
#
# H=zeros(4,4);
#
# for k=1:4
# for j=1:4
# H=H+(0.5*M(k,j)*kron(squeeze(pauli(:,:,k)),conj(squeeze(pauli(:,:,j)))));
# end
# end
# for k=1:4
# for j=1:4
# kron(squeeze(pauli(:,:,k)),conj(squeeze(pauli(:,:,j))))
# end
# end
##
H=numpy.zeros((4,4))+1j*numpy.zeros((4,4))
eta=numpy.zeros((16,4,4))+1j*numpy.zeros((16,4,4))
eta[0][:][:]=numpy.array([[1,0,0,0],[0,1,0,0],[0,0,1,0],[0,0,0,1]])
eta[1][:][:]=numpy.array([[0,1,0,0],[1,0,0,0],[0,0,0,1j],[0,0,-1j,0]])
eta[2][:][:]=numpy.array([[0,0,1,0],[0,0,0,-1j],[1,0,0,0,],[0,1j,0,0]])
eta[3][:][:]=numpy.array([[0,0,0,1],[0,0,1j,0],[0,-1j,0,0],[1,0,0,0]])
eta[4][:][:]=numpy.array([[0,1,0,0,],[1,0,0,0,],[0,0,0,-1j],[0,0,1j,0,]])
eta[5][:][:]=numpy.array([[1,0,0,0],[0,1,0,0],[0,0,-1,0],[0,0,0,-1]])
eta[6][:][:]=numpy.array([[0,0,0,-1j],[0,0,1,0],[0,1,0,0],[1j,0,0,0]])
eta[7][:][:]=numpy.array([[0,0,1j,0],[0,0,0,1],[-1j,0,0,0],[0,1,0,0]])
eta[8][:][:]=numpy.array([[0,0,1,0],[0,0,0,1j],[1,0,0,0],[0,-1j,0,0]])
eta[9][:][:]=numpy.array([[0,0,0,1j],[0,0,1,0],[0,1,0,0],[-1j,0,0,0]])
eta[10][:][:]=numpy.array([[1,0,0,0],[0,-1,0,0],[0,0,1,0],[0,0,0,-1]])
eta[11][:][:]=numpy.array([[0,-1j,0,0],[1j,0,0,0],[0,0,0,1],[0,0,1,0]])
eta[12][:][:]=numpy.array([[0,0,0,1],[0,0,-1j,0],[0,1j,0,0],[1,0,0,0]])
eta[13][:][:]=numpy.array([[0,0,-1j,0],[0,0,0,1],[1j,0,0,0],[0,1,0,0]])
eta[14][:][:]=numpy.array([[0,1j,0,0],[-1j,0,0,0],[0,0,0,1],[0,0,1,0]])
eta[15][:][:]=numpy.array([[1,0,0,0],[0,-1,0,0],[0,0,-1,0],[0,0,0,1]])
for i in range(4):
for j in range(4):
H=H+0.5*M[i][j]*eta[4*i+j][:][:]
return H
def return_calib_params(self,B0,B1,B2,B3,THETA0):
#addpath(genpath('C:\MatlabMM-hg'));
condB0 = numpy.linalg.cond(B0) # sjekker Condition number direkt paa B0?
MC1R=numpy.zeros((4,4))
MC2R=numpy.zeros((4,4))
MC3R=numpy.zeros((4,4))
VEC_THETAM = numpy.zeros(3)
C1W = numpy.linalg.solve(B0,B1)
C2W = numpy.linalg.solve(B0,B2)
C3W = numpy.linalg.solve(B0,B3)
C1A = numpy.linalg.solve(B0.T,B1.T).T
C2A = numpy.linalg.solve(B0.T,B2.T).T
C3A = numpy.linalg.solve(B0.T,B3.T).T
opt=1
MC1R,MC2R,THETA,MC3R,TAU,PSIM,DELTAM,tauPol=self.EVCSamplePropertiesForAandW_MK2(C1W,C2W,C3W,C1A,C2A,C3A,THETA0*numpy.pi/180,opt,'t',MC1R,MC2R,MC3R,VEC_THETAM)
#[MC1Rold,MC2Rold,THETAMold,MC3Rold,TAUold,PSIMold,DELTAMold,tauPolold]=EVCSamplePropertiesForAandW_LM(C1W,C2W,C3W,C1A,C2A,C3A,THETA0,opt,'t');
# Final Calculation
H1W=self.Changebasis3(MC1R,C1W) # Independent of A
H2W=self.Changebasis3(MC2R,C2W)
H3W=self.Changebasis3(MC3R,C3W)
KW = numpy.dot(H1W.T,H1W)+numpy.dot(H2W.T,H2W)+numpy.dot(H3W.T,H3W)
EIGKW, VectorEig = numpy.linalg.eig(KW)
pos = EIGKW.argmin()
EIGKW=numpy.sort(EIGKW)
errorKW=numpy.sqrt(EIGKW[0]/EIGKW[1])
n,m = B0.shape
W = self.ConvertNxMby1To4byMMatrix(VectorEig[:,pos],m)
condW = numpy.linalg.cond(W)
# check condition number. If it's equal to 1000 or larger, we can consider
# for sure that the matrix is singular. The user will be notified later
# about the issue. tb.
if condW<1000:
W=numpy.real(W/W[0][0])
AI=numpy.linalg.solve(W.T,B0.T).T
# doing the same for A; 311007 tB
H1A=self.changebasis3A(MC1R,C1A) # Independent of W
H2A=self.changebasis3A(MC2R,C2A)
H3A=self.changebasis3A(MC3R,C3A)
KA = numpy.dot(H1A.T,H1A)+numpy.dot(H2A.T,H2A)+numpy.dot(H3A.T,H3A)
EIGKA, VectorEig = numpy.linalg.eig(KA)
pos = EIGKA.argmin()
EIGKA=numpy.sort(EIGKA)
errorKA=numpy.sqrt(EIGKA[0]/EIGKA[1])
n,m = B0.shape
A=self.ConvertNxMby1To4byMMatrix(VectorEig[:,pos],m)
condA = numpy.linalg.cond(A)
# check condition number. If it's equal to 1000 or larger, we can consider
# for sure that the matrix is singular. The user will be notified later
# about the issue. tb.
if condA<1000:
A=numpy.real(A/A[0][0])
WI=numpy.linalg.solve(A,B0)
scaling=numpy.linalg.norm(B0,2)/numpy.linalg.norm(numpy.dot(A,W),2) # Ny skalering FSE
#print "scaling", scaling
A=A*numpy.sqrt(scaling)
W=W*numpy.sqrt(scaling)
###############################################################################
errorK = (errorKW + errorKA)/2 # (use some mean error)
#THETAM*180/pi
#condB0=condnr(B0)
#condW=condnr(W);
#condA=condnr(A);
# These are now in principle converting to old variable names
M0 = numpy.linalg.solve(W.T,numpy.linalg.solve(A,B0).T).T
M1 = numpy.linalg.solve(W.T,numpy.linalg.solve(A,B1).T).T
M2 = numpy.linalg.solve(W.T,numpy.linalg.solve(A,B2).T).T
M3 = numpy.linalg.solve(W.T,numpy.linalg.solve(A,B3).T).T
THETAM=THETA
#THETAM=pi*THETA/180;
Mf0, Entrophy0, DeltaM, DeltaH = self.matrixFilteringCloude2(M0)
Mf1, Entrophy1, DeltaM, DeltaH = self.matrixFilteringCloude2(M1)
Mf2, Entrophy2, DeltaM, DeltaH = self.matrixFilteringCloude2(M2)
Mf3, Entrophy3, DeltaM, DeltaH = self.matrixFilteringCloude2(M3)
M2ut=self.Mueller_Matrix_Rotate(M2,-THETAM[1])
M3ut=self.Mueller_Matrix_Rotate(M3,-THETAM[2])
Pd0 = self.MuellerMatrix_depolIndex(Mf0)
Pd1 = self.MuellerMatrix_depolIndex(Mf1)
Pd2 = self.MuellerMatrix_depolIndex(Mf2)
Pd3 = self.MuellerMatrix_depolIndex(Mf3)
return A, W, condB0, [M0,M1,M2,M3]
def measure_single_Matrix(self,Bsingle,A,W):
#addpath('Y:\Phd\Lab maalinger\EVC Mueller\#MullerMatrixMaalinger\Analyse av #Maaleoppsett');
#addpath('Y:\Phd\Matlab\polardecompostion');
Msingle = numpy.linalg.solve(W.T,numpy.linalg.solve(A,Bsingle).T).T
Mf, Entrophy, DeltaM, DeltaH = self.matrixFilteringCloude2(Msingle)
Pd = self.MuellerMatrix_depolIndex(Mf)
return Msingle, Mf, Entrophy, DeltaM, DeltaH, Pd
def test():
sf=Scatterometer_funksjoner()
B0=numpy.array([[4.1255,0.6310,6.7022,6.7677],[0.8191,7.5681,4.3100,7.0470],[3.7190,4.1148,7.6192,0.5092],[7.3032,3.5817,0.5545,4.0887]])
B1=numpy.array([[0.2010,0.1997,0.8529,0.5011],[1.0802,6.5608,4.4944,2.6172],[0.8551,5.4221,3.7119,2.1429],[0.2653,1.5821,1.0219,0.6140]])
B2=numpy.array([[5.0157,1.0076,1.7487,5.0128],[1.6775,0.3586,0.5522,1.6555],[1.1441,0.2244,0.3533,1.0905],[4.7943,1.0330,1.6344,4.8927]])
B3=numpy.array([[3.1867,2.6811,5.9539,0.5649],[3.0612,5.9736,1.4207,3.2600],[6.3661,1.2321,1.6104,3.5465],[0.8754,2.0146,3.7022,5.5458]])
THETA0=numpy.array([0,85,55])
A, W, condB0, M = sf.return_calib_params(B0,B1,B2,B3,THETA0)
print "A:", A
print "W:", W
print "condB0:", condB0
print "M0:", M[0]
print "M1:", M[1]
print "M2:", M[2]
print "M3:", M[3]
Bsingle=numpy.random.rand(4,4)
Msingle, Mf, Entrophy, DeltaM, DeltaH, Pd = sf.measure_single_Matrix(Bsingle,A,W)
print "Msingle:", Msingle
print "Entrophy:", Entrophy
print "Pd:", Pd
if __name__=="__main__":
test()
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
b090bcaad8e91125686471f5311f4a26beb58fc7
|
4e66ac1db7baf1c7b01f20140478615e39526035
|
/basic.py
|
271e4b0793ad3019156a298780b1b9e83f1b81ed
|
[
"MIT"
] |
permissive
|
sid230798/Doom_Reinforcement_Learning
|
1c33ff738e95898b4f52af5cb32d8184b0251083
|
eb4972a54389e0f9d07a34c302ec6e6256c6084d
|
refs/heads/master
| 2020-05-22T15:11:40.284019
| 2020-05-03T07:38:25
| 2020-05-03T07:38:25
| 186,402,515
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 18,503
|
py
|
from include import *
def create_environment():
game = DoomGame()
# Load the correct configuration
game.load_config("basic.cfg")
# Load the correct scenario (in our case basic scenario)
game.set_doom_scenario_path("basic.wad")
game.init()
# Here our possible actions
left = [1, 0, 0]
right = [0, 1, 0]
shoot = [0, 0, 1]
possible_actions = [left, right, shoot]
return game, possible_actions
"""
Here we performing random action to test the environment
"""
def test_environment():
game = DoomGame()
game.load_config("basic.cfg")
game.set_doom_scenario_path("basic.wad")
game.init()
shoot = [0, 0, 1]
left = [1, 0, 0]
right = [0, 1, 0]
actions = [shoot, left, right]
episodes = 10
for i in range(episodes):
game.new_episode()
while not game.is_episode_finished():
state = game.get_state()
img = state.screen_buffer
misc = state.game_variables
action = random.choice(actions)
print(action)
reward = game.make_action(action)
print ("\treward:", reward)
time.sleep(0.02)
print ("Result:", game.get_total_reward())
time.sleep(2)
game.close()
game, possible_actions = create_environment()
def preprocess_frame(frame):
# Greyscale frame already done in our vizdoom config
# x = np.mean(frame,-1)
# Crop the screen (remove the roof because it contains no information)
cropped_frame = frame[30:-10,30:-30]
# Normalize Pixel Values
normalized_frame = cropped_frame/255.0
# Resize
preprocessed_frame = transform.resize(normalized_frame, [84,84])
return preprocessed_frame
stack_size = 4 # We stack 4 frames
# Initialize deque with zero-images one array for each image
stacked_frames = deque([np.zeros((84,84), dtype=np.int) for i in range(stack_size)], maxlen=4)
def stack_frames(stacked_frames, state, is_new_episode):
# Preprocess frame
frame = preprocess_frame(state)
if is_new_episode:
# Clear our stacked_frames
stacked_frames = deque([np.zeros((84,84), dtype=np.int) for i in range(stack_size)], maxlen=4)
# Because we're in a new episode, copy the same frame 4x
stacked_frames.append(frame)
stacked_frames.append(frame)
stacked_frames.append(frame)
stacked_frames.append(frame)
# Stack the frames
stacked_state = np.stack(stacked_frames, axis=2)
else:
# Append frame to deque, automatically removes the oldest frame
stacked_frames.append(frame)
# Build the stacked state (first dimension specifies different frames)
stacked_state = np.stack(stacked_frames, axis=2)
return stacked_state, stacked_frames
### MODEL HYPERPARAMETERS
state_size = [84,84,4] # Our input is a stack of 4 frames hence 84x84x4 (Width, height, channels)
action_size = game.get_available_buttons_size() # 3 possible actions: left, right, shoot
learning_rate = 0.0002 # Alpha (aka learning rate)
### TRAINING HYPERPARAMETERS
total_episodes = 20 # Total episodes for training
max_steps = 100 # Max possible steps in an episode
batch_size = 64
# Exploration parameters for epsilon greedy strategy
explore_start = 1.0 # exploration probability at start
explore_stop = 0.01 # minimum exploration probability
decay_rate = 0.0001 # exponential decay rate for exploration prob
# Q learning hyperparameters
gamma = 0.95 # Discounting rate
### MEMORY HYPERPARAMETERS
pretrain_length = batch_size # Number of experiences stored in the Memory when initialized for the first time
memory_size = 1000000 # Number of experiences the Memory can keep
### MODIFY THIS TO FALSE IF YOU JUST WANT TO SEE THE TRAINED AGENT
training = True
## TURN THIS TO TRUE IF YOU WANT TO RENDER THE ENVIRONMENT
episode_render = False
class DQNetwork:
def __init__(self, state_size, action_size, learning_rate, name='DQNetwork'):
self.state_size = state_size
self.action_size = action_size
self.learning_rate = learning_rate
with tf.variable_scope(name):
# We create the placeholders
# *state_size means that we take each elements of state_size in tuple hence is like if we wrote
# [None, 84, 84, 4]
self.inputs_ = tf.placeholder(tf.float32, [None, *state_size], name="inputs")
self.actions_ = tf.placeholder(tf.float32, [None, 3], name="actions_")
# Remember that target_Q is the R(s,a) + ymax Qhat(s', a')
self.target_Q = tf.placeholder(tf.float32, [None], name="target")
"""
First convnet:
CNN
BatchNormalization
ELU
"""
# Input is 84x84x4
self.conv1 = tf.layers.conv2d(inputs = self.inputs_,
filters = 32,
kernel_size = [8,8],
strides = [4,4],
padding = "VALID",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv1")
self.conv1_batchnorm = tf.layers.batch_normalization(self.conv1,
training = True,
epsilon = 1e-5,
name = 'batch_norm1')
self.conv1_out = tf.nn.elu(self.conv1_batchnorm, name="conv1_out")
## --> [20, 20, 32]
"""
Second convnet:
CNN
BatchNormalization
ELU
"""
self.conv2 = tf.layers.conv2d(inputs = self.conv1_out,
filters = 64,
kernel_size = [4,4],
strides = [2,2],
padding = "VALID",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv2")
self.conv2_batchnorm = tf.layers.batch_normalization(self.conv2,
training = True,
epsilon = 1e-5,
name = 'batch_norm2')
self.conv2_out = tf.nn.elu(self.conv2_batchnorm, name="conv2_out")
## --> [9, 9, 64]
"""
Third convnet:
CNN
BatchNormalization
ELU
"""
self.conv3 = tf.layers.conv2d(inputs = self.conv2_out,
filters = 128,
kernel_size = [4,4],
strides = [2,2],
padding = "VALID",
kernel_initializer=tf.contrib.layers.xavier_initializer_conv2d(),
name = "conv3")
self.conv3_batchnorm = tf.layers.batch_normalization(self.conv3,
training = True,
epsilon = 1e-5,
name = 'batch_norm3')
self.conv3_out = tf.nn.elu(self.conv3_batchnorm, name="conv3_out")
## --> [3, 3, 128]
self.flatten = tf.layers.flatten(self.conv3_out)
## --> [1152]
self.fc = tf.layers.dense(inputs = self.flatten,
units = 512,
activation = tf.nn.elu,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
name="fc1")
self.output = tf.layers.dense(inputs = self.fc,
kernel_initializer=tf.contrib.layers.xavier_initializer(),
units = 3,
activation=None)
# Q is our predicted Q value.
self.Q = tf.reduce_sum(tf.multiply(self.output, self.actions_), axis=1)
# The loss is the difference between our predicted Q_values and the Q_target
# Sum(Qtarget - Q)^2
self.loss = tf.reduce_mean(tf.square(self.target_Q - self.Q))
self.optimizer = tf.train.RMSPropOptimizer(self.learning_rate).minimize(self.loss)
# Reset the graph
tf.reset_default_graph()
# Instantiate the DQNetwork
DQNetwork = DQNetwork(state_size, action_size, learning_rate)
class Memory():
def __init__(self, max_size):
self.buffer = deque(maxlen = max_size)
def add(self, experience):
self.buffer.append(experience)
def sample(self, batch_size):
buffer_size = len(self.buffer)
index = np.random.choice(np.arange(buffer_size),
size = batch_size,
replace = False)
return [self.buffer[i] for i in index]
# Instantiate memory
memory = Memory(max_size = memory_size)
# Render the environment
game.new_episode()
for i in range(pretrain_length):
# If it's the first step
if i == 0:
# First we need a state
state = game.get_state().screen_buffer
state, stacked_frames = stack_frames(stacked_frames, state, True)
# Random action
action = random.choice(possible_actions)
# Get the rewards
reward = game.make_action(action)
# Look if the episode is finished
done = game.is_episode_finished()
# If we're dead
if done:
# We finished the episode
next_state = np.zeros(state.shape)
# Add experience to memory
memory.add((state, action, reward, next_state, done))
# Start a new episode
game.new_episode()
# First we need a state
state = game.get_state().screen_buffer
# Stack the frames
state, stacked_frames = stack_frames(stacked_frames, state, True)
else:
# Get the next state
next_state = game.get_state().screen_buffer
next_state, stacked_frames = stack_frames(stacked_frames, next_state, False)
# Add experience to memory
memory.add((state, action, reward, next_state, done))
# Our state is now the next_state
state = next_state
# Setup TensorBoard Writer
writer = tf.summary.FileWriter("./tensorboard/dqn/1")
## Losses
tf.summary.scalar("Loss", DQNetwork.loss)
write_op = tf.summary.merge_all()
"""
This function will do the part
With ϵ select a random action atat, otherwise select at=argmaxaQ(st,a)
"""
def predict_action(explore_start, explore_stop, decay_rate, decay_step, state, actions):
## EPSILON GREEDY STRATEGY
# Choose action a from state s using epsilon greedy.
## First we randomize a number
exp_exp_tradeoff = np.random.rand()
# Here we'll use an improved version of our epsilon greedy strategy used in Q-learning notebook
explore_probability = explore_stop + (explore_start - explore_stop) * np.exp(-decay_rate * decay_step)
if (explore_probability > exp_exp_tradeoff):
# Make a random action (exploration)
action = random.choice(possible_actions)
else:
# Get action from Q-network (exploitation)
# Estimate the Qs values state
Qs = sess.run(DQNetwork.output, feed_dict = {DQNetwork.inputs_: state.reshape((1, *state.shape))})
# Take the biggest Q value (= the best action)
choice = np.argmax(Qs)
action = possible_actions[int(choice)]
return action, explore_probability
# Saver will help us to save our model
saver = tf.train.Saver()
if training == True:
with tf.Session() as sess:
# Initialize the variables
sess.run(tf.global_variables_initializer())
# Initialize the decay rate (that will use to reduce epsilon)
decay_step = 0
# Init the game
game.init()
for episode in range(total_episodes):
# Set step to 0
step = 0
# Initialize the rewards of the episode
episode_rewards = []
# Make a new episode and observe the first state
game.new_episode()
state = game.get_state().screen_buffer
# Remember that stack frame function also call our preprocess function.
state, stacked_frames = stack_frames(stacked_frames, state, True)
while step < max_steps:
step += 1
# Increase decay_step
decay_step +=1
# Predict the action to take and take it
action, explore_probability = predict_action(explore_start, explore_stop, decay_rate, decay_step, state, possible_actions)
# Do the action
reward = game.make_action(action)
# Look if the episode is finished
done = game.is_episode_finished()
# Add the reward to total reward
episode_rewards.append(reward)
# If the game is finished
if done:
# the episode ends so no next state
next_state = np.zeros((84,84), dtype=np.int)
next_state, stacked_frames = stack_frames(stacked_frames, next_state, False)
# Set step = max_steps to end the episode
step = max_steps
# Get the total reward of the episode
total_reward = np.sum(episode_rewards)
print('Episode: {}'.format(episode),
'Total reward: {}'.format(total_reward),
'Training loss: {:.4f}'.format(loss),
'Explore P: {:.4f}'.format(explore_probability))
memory.add((state, action, reward, next_state, done))
else:
# Get the next state
next_state = game.get_state().screen_buffer
# Stack the frame of the next_state
next_state, stacked_frames = stack_frames(stacked_frames, next_state, False)
# Add experience to memory
memory.add((state, action, reward, next_state, done))
# st+1 is now our current state
state = next_state
### LEARNING PART
# Obtain random mini-batch from memory
batch = memory.sample(batch_size)
states_mb = np.array([each[0] for each in batch], ndmin=3)
actions_mb = np.array([each[1] for each in batch])
rewards_mb = np.array([each[2] for each in batch])
next_states_mb = np.array([each[3] for each in batch], ndmin=3)
dones_mb = np.array([each[4] for each in batch])
target_Qs_batch = []
# Get Q values for next_state
Qs_next_state = sess.run(DQNetwork.output, feed_dict = {DQNetwork.inputs_: next_states_mb})
# Set Q_target = r if the episode ends at s+1, otherwise set Q_target = r + gamma*maxQ(s', a')
for i in range(0, len(batch)):
terminal = dones_mb[i]
# If we are in a terminal state, only equals reward
if terminal:
target_Qs_batch.append(rewards_mb[i])
else:
target = rewards_mb[i] + gamma * np.max(Qs_next_state[i])
target_Qs_batch.append(target)
targets_mb = np.array([each for each in target_Qs_batch])
loss, _ = sess.run([DQNetwork.loss, DQNetwork.optimizer],
feed_dict={DQNetwork.inputs_: states_mb,
DQNetwork.target_Q: targets_mb,
DQNetwork.actions_: actions_mb})
# Write TF Summaries
summary = sess.run(write_op, feed_dict={DQNetwork.inputs_: states_mb,
DQNetwork.target_Q: targets_mb,
DQNetwork.actions_: actions_mb})
writer.add_summary(summary, episode)
writer.flush()
# Save model every 5 episodes
if episode % 5 == 0:
save_path = saver.save(sess, "./models/model.ckpt")
print("Model Saved")
with tf.Session() as sess:
game, possible_actions = create_environment()
totalScore = 0
# Load the model
saver.restore(sess, "./models/model.ckpt")
game.init()
for i in range(1):
game.new_episode()
while not game.is_episode_finished():
frame = game.get_state().screen_buffer
state = stack_frames(stacked_frames, frame, True)
# Take the biggest Q value (= the best action)
Qs = sess.run(DQNetwork.output, feed_dict = {DQNetwork.inputs_: state.reshape((1, *state.shape))})
action = np.argmax(Qs)
action = possible_actions[int(action)]
game.make_action(action)
score = game.get_total_reward()
print("Score: ", score)
totalScore += score
print("TOTAL_SCORE", totalScore/100.0)
game.close()
|
[
"noreply@github.com"
] |
sid230798.noreply@github.com
|
aa53f6246723dc53711b5a6a1effb0d247e695f4
|
28e8a7db08e074864d60f7dcc24bba6d3e323a11
|
/gradient_optimizer.py
|
8edfd20275a30680a129d72abb87e73d66a0cf77
|
[
"BSD-3-Clause"
] |
permissive
|
RainZhang94/super-engine
|
ebc86d503b805538b5d346b6e5dd477e2440823d
|
630472933713dc2ac034f99e77edee8db335b5f0
|
refs/heads/master
| 2020-12-24T12:20:37.063895
| 2016-11-07T07:48:33
| 2016-11-07T07:48:33
| 73,053,043
| 0
| 0
| null | 2016-11-07T07:31:06
| 2016-11-07T07:31:05
| null |
UTF-8
|
Python
| false
| false
| 1,006
|
py
|
import numpy as np
import theano
import theano.tensor as T
class GradientOptimizer:
def __init__(self, lr):
self.lr = lr
def __call__(self, cost, params):
pass
@property
def learningRate(self):
return self.lr
@learningRate.setter
def learningRate(self, i):
self.lr = i
class RMSprop(GradientOptimizer):
def __init__(self, lr=0.01, rho=0.9, epsilon=1e-6):
super(RMSprop, self).__init__(lr)
self.rho = rho
self.epsilon = epsilon
def __call__(self, cost, params):
grads = T.grad(cost=cost, wrt=params)
updates = []
for p, g in zip(params, grads):
acc = theano.shared(p.get_value() * 0.)
acc_new = self.rho * acc + (1 - self.rho) * g ** 2
gradient_scaling = T.sqrt(acc_new + self.epsilon)
g = g / gradient_scaling
updates.append((acc, acc_new))
updates.append((p, p - self.lr * g))
return updates
|
[
"yguan@guanshankou.cn"
] |
yguan@guanshankou.cn
|
ea7a23bbc054e9ab84731ae0e79acb1b16afe02d
|
3b1efdd0aacc98738f3b8b9ee09c6ff59cccc14e
|
/ietf/group/dot.py
|
0efc4054594d92924ddecffcdc653a5714efd626
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
unofficial-mirror/ietfdb
|
15beb6bf17b1d4abb257ee656ac6b7488339d331
|
ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81
|
refs/heads/master
| 2020-08-06T17:24:13.966746
| 2019-10-04T20:54:05
| 2019-10-04T20:54:05
| 213,088,920
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,679
|
py
|
# Copyright The IETF Trust 2017-2019, All Rights Reserved
# -*- coding: utf-8 -*-
# -*- check-flake8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from django.db.models import Q
from django.template.loader import render_to_string
from ietf.doc.models import RelatedDocument
class Edge(object):
def __init__(self, relateddocument):
self.relateddocument = relateddocument
def __hash__(self):
return hash("|".join([str(hash(nodename(self.relateddocument.source.name))),
str(hash(nodename(self.relateddocument.target.document.name))),
self.relateddocument.relationship.slug]))
def __eq__(self, other):
return self.__hash__() == other.__hash__()
def sourcename(self):
return nodename(self.relateddocument.source.name)
def targetname(self):
return nodename(self.relateddocument.target.document.name)
def styles(self):
# Note that the old style=dotted, color=red styling is never used
if self.relateddocument.is_downref():
return { 'color': 'red', 'arrowhead': 'normalnormal' }
else:
styles = { 'refnorm' : { 'color': 'blue' },
'refinfo' : { 'color': 'green' },
'refold' : { 'color': 'orange' },
'refunk' : { 'style': 'dashed' },
'replaces': { 'color': 'pink', 'style': 'dashed', 'arrowhead': 'diamond' },
}
return styles[self.relateddocument.relationship.slug]
def nodename(name):
return name.replace('-', '_')
def get_node_styles(node, group):
styles = dict()
# Shape and style (note that old diamond shape is never used
styles['style'] = 'filled'
if node.get_state('draft').slug == 'rfc':
styles['shape'] = 'box'
elif not node.get_state('draft-iesg').slug in ['idexists', 'watching', 'dead']:
styles['shape'] = 'parallelogram'
elif node.get_state('draft').slug == 'expired':
styles['shape'] = 'house'
styles['style'] = 'solid'
styles['peripheries'] = 3
elif node.get_state('draft').slug == 'repl':
styles['shape'] = 'ellipse'
styles['style'] = 'solid'
styles['peripheries'] = 3
else:
pass # quieter form of styles['shape'] = 'ellipse'
# Color (note that the old 'Flat out red' is never used
if node.group.acronym == 'none':
styles['color'] = '"#FF800D"' # orangeish
elif node.group == group:
styles['color'] = '"#0AFE47"' # greenish
else:
styles['color'] = '"#9999FF"' # blueish
# Label
label = node.name
if label.startswith('draft-'):
if label.startswith('draft-ietf-'):
label = label[11:]
else:
label = label[6:]
try:
t = label.index('-')
label = r"%s\n%s" % (label[:t], label[t+1:])
except:
pass
if node.group.acronym != 'none' and node.group != group:
label = "(%s) %s" % (node.group.acronym, label)
if node.get_state('draft').slug == 'rfc':
label = "%s\\n(%s)" % (label, node.canonical_name())
styles['label'] = '"%s"' % label
return styles
def make_dot(group):
references = Q(source__group=group, source__type='draft', relationship__slug__startswith='ref')
both_rfcs = Q(source__states__slug='rfc', target__docs__states__slug='rfc')
inactive = Q(source__states__slug__in=['expired', 'repl'])
attractor = Q(target__name__in=['rfc5000', 'rfc5741'])
removed = Q(source__states__slug__in=['auth-rm', 'ietf-rm'])
relations = ( RelatedDocument.objects.filter(references).exclude(both_rfcs)
.exclude(inactive).exclude(attractor).exclude(removed) )
edges = set()
for x in relations:
target_state = x.target.document.get_state_slug('draft')
if target_state != 'rfc' or x.is_downref():
edges.add(Edge(x))
replacements = RelatedDocument.objects.filter(relationship__slug='replaces',
target__docs__in=[x.relateddocument.target.document for x in edges])
for x in replacements:
edges.add(Edge(x))
nodes = set([x.relateddocument.source for x in edges]).union([x.relateddocument.target.document for x in edges])
for node in nodes:
node.nodename = nodename(node.name)
node.styles = get_node_styles(node, group)
return render_to_string('group/dot.txt',
dict( nodes=nodes, edges=edges )
)
|
[
"henrik@levkowetz.com"
] |
henrik@levkowetz.com
|
35b2288bc3a4034b6d7b89a4ff1431c77c526d82
|
43020adf20596c1824088a28a8761869c68d651f
|
/sarvmusicals/asgi.py
|
620cb74abfb811a0ee387403f102c3f5f51b4732
|
[] |
no_license
|
AmitVermaDce/Sarvmusicals
|
5ff531097a3c3a24bb83c22c05c0f6dde688f0eb
|
386270462122c6796739e9dcfa174e14b02f242d
|
refs/heads/master
| 2023-04-05T08:06:06.968659
| 2021-04-06T18:48:18
| 2021-04-06T18:48:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 401
|
py
|
"""
ASGI config for sarvmusicals project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sarvmusicals.settings')
application = get_asgi_application()
|
[
"rfmd1350@orange.com"
] |
rfmd1350@orange.com
|
ed87d7af7e74cf52fe1eb8f12e6fcb786fc247b8
|
15b12d69ac3123d1562986970ce01d7a47d171de
|
/BeginningZeros.py
|
db99c8fcff71f0dcbca090ae9ab47ede6bfb3c2b
|
[
"Apache-2.0"
] |
permissive
|
simplymanas/python-learning
|
9b67b5a7acfb3a7c2455a7d1fc66203a2b419c37
|
75bc99c0dce211fd1bce5f6ce1155e0f4c71d7d0
|
refs/heads/master
| 2021-07-11T06:40:24.803589
| 2021-06-20T12:06:02
| 2021-06-20T12:06:02
| 241,769,614
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,442
|
py
|
# you have a string that consist only of digits. You need to find how many zero digits ("0") are at the beginning of the given string.
# Input: A string, that consist of digits.
# Output: An Int.
# Example:
# beginning_zeros('100') == 0
# beginning_zeros('001') == 2
# beginning_zeros('100100') == 0
# beginning_zeros('001001') == 2
# beginning_zeros('012345679') == 1
# beginning_zeros('0000') == 4
from functools import reduce
def beginning_zeros(number: str) -> int:
y = 0
for x in str(number):
if int(x) > 0:
break
else:
y += 1
return y
#
# numofzero = lambda x,y : (y:y+1 if x > 0) ,(for y in str(number))
# return numofzero
# return reduce(lambda x, y: x * int(y),(y for y in str(number) if int(y) != 0),1)
# result= lambda x,y: y+1 if (int(x)==0) else exit
#
# print(result)
#
# numofzero = list(filter(lambda x: x > 10 and x < 20, listofNum))
# print('Filtered List : ', listofNum)
if __name__ == '__main__':
print("Example:")
print(beginning_zeros('00100'))
# These "asserts" are used for self-checking and not for an auto-testing
assert beginning_zeros('100') == 0
assert beginning_zeros('001') == 2
assert beginning_zeros('100100') == 0
assert beginning_zeros('001001') == 2
assert beginning_zeros('012345679') == 1
assert beginning_zeros('0000') == 4
# print("Coding complete? Click 'Check' to earn cool rewards!")
|
[
"manas.dash@tesco.com"
] |
manas.dash@tesco.com
|
7748235c7ec28880a6a0878162e89debcbd21931
|
b34c86385635b3817c15a6ca55686c5d143f4174
|
/api/resources/index.py
|
7ba605a86f9f2540e9eaf56b16b2ac40e094611e
|
[] |
no_license
|
ortaman/my-flask-restful-skeleton
|
15e07bf7d3c84d35f0a3277ef23a60ec83dfd1d1
|
88346e791f08990f95a2f49d137c2c24f2c6de72
|
refs/heads/master
| 2020-04-27T23:37:01.376369
| 2019-03-10T05:39:33
| 2019-03-10T05:39:33
| 174,782,938
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 131
|
py
|
from flask_restful import Resource
class IndexResource(Resource):
def get(self):
return {'data': 'Hello Test'}, 200
|
[
"ente011@gmail.com"
] |
ente011@gmail.com
|
a3b8cd90c04981ea193c294ef1d7a1f3da1f321f
|
2e2d8d2b2db67dedb309d8a93a5e306508edb3b9
|
/pages/views.py
|
09eef42f6646a4bcecc501d3b8604b4b4b16ed7e
|
[] |
no_license
|
tuhin47/django2.0.7
|
d10bac1c55ac22b1c36375ab786c911920950c0a
|
fb169aa795aac2be2d4b2ded167396dd21f9a54e
|
refs/heads/master
| 2020-05-14T16:09:20.682275
| 2019-04-17T12:15:47
| 2019-04-17T12:15:47
| 181,867,000
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 359
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def home_view(request, *args, **kwargs):
print(request.user)
user = {"name": request.user}
return render(request, "home.html", user)
def about_view(request, *args, **kwargs):
print(request.user)
return render(request, "about.html", {})
|
[
"tuhintowhidul9@gmail.com"
] |
tuhintowhidul9@gmail.com
|
925a7ead050177ad270b0e907629e5e59deec76a
|
a7a1772df0ae3a84dfa989ba76b69f4586c31269
|
/MSGG/GRU_channel.py
|
1f1e44bb3b0908d69ea21e6898e10707760ec2c0
|
[] |
no_license
|
ShuangWangCN/MSGG
|
cdfd640779aff0d05dec88642fc37b637d911925
|
c8d6a19a1b00252f507a80d722e25c118190fc45
|
refs/heads/master
| 2021-07-10T18:20:51.290068
| 2021-03-24T02:28:19
| 2021-03-24T02:28:19
| 233,840,444
| 5
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,394
|
py
|
from MSGG.chemical_rdkit import mol2node, edgeVec, ATOM_FDIM, BOND_FDIM, EDGE_FDIM, len_ELEM_LIST
from collections import deque
from torch.nn import init
import torch.nn as nn
import torch
def RNN_weights_init(m):
if isinstance(m, nn.GRU):
ALL_weights=m.all_weights
for weight in ALL_weights:
init.orthogonal_(weight[0])
init.orthogonal_(weight[1])
class GRU_channel(nn.Module):
def __init__(self, vocabLabel, hidden_size, embedding=None):
super(GRU_channel, self).__init__()
self.hidden_size = hidden_size
self.vocabLabel_size = vocabLabel.size()
self.vocabLabel = vocabLabel
self.atom_in_dim = ATOM_FDIM
self.bond_in_dim = 2 * ATOM_FDIM + BOND_FDIM
self.link_in_dim = len_ELEM_LIST
self.atom_out_dim = int(hidden_size / 4)
self.bond_out_dim = int(hidden_size / 4)
self.link_out_dim = int(hidden_size / 4)
self.edge_out_dim = int(hidden_size / 4)
self.node_in_dim = self.atom_out_dim + self.bond_out_dim + self.link_out_dim
self.node_out_dim =self.atom_out_dim+self.bond_out_dim+self.link_out_dim
#node embedding
self.node_embedding_out = int(hidden_size / 4)
#three channels' GRU
self.nodeRnn_in_dim = self.node_out_dim
self.nodeRnn_out_dim = int(hidden_size / 4)
self.edgeRnn_in_dim = 2 * self.node_embedding_out+ self.edge_out_dim
self.edgeRnn_out_dim = int(hidden_size / 4)
self.treeRnn_in_dim = self.node_out_dim + self.node_out_dim + self.edge_out_dim
self.treeRnn_out_dim = int(hidden_size / 4)
#attention
self.attention_node_in_dim=2*self.nodeRnn_out_dim
self.attention_node_out_dim=int(hidden_size / 2)
self.attention_tree_in_dim=2*self.treeRnn_out_dim
self.attention_tree_out_dim=int(hidden_size / 2)
self.attention_edge_in_dim=2*self.edgeRnn_out_dim
self.attention_edge_out_dim=int(hidden_size / 2)
if embedding is None:
self.embedding = nn.Embedding(vocabLabel.size(), self.node_embedding_out)
init.uniform_(self.embedding.weight)
else:
self.embedding = embedding
#node-level
self.atomW_i = nn.Linear(self.atom_in_dim, self.atom_out_dim, bias=True)
self.bondW_i = nn.Linear(self.bond_in_dim, self.bond_out_dim, bias=True)
self.linkPositionW_i = nn.Linear(self.link_in_dim, self.link_out_dim, bias=True)
self.edgeW_i = nn.Linear(EDGE_FDIM, self.edge_out_dim, bias=True)
#molecule-level
self.edgeRnn=nn.GRU(self.edgeRnn_in_dim,self.edgeRnn_out_dim,num_layers =2,bidirectional=True, bias=True)
self.nodeRnn = nn.GRU(self.nodeRnn_in_dim, self.nodeRnn_out_dim, num_layers=2, bidirectional=True, bias=True)
self.treeVecRnn = nn.GRU(self.treeRnn_in_dim, self.treeRnn_out_dim, num_layers=2, bidirectional=True, bias=True)
RNN_weights_init(self.nodeRnn)
RNN_weights_init(self.edgeRnn)
RNN_weights_init(self.treeVecRnn)
#attention network
self.att_W_node = nn.Linear(self.attention_node_in_dim, self.attention_node_out_dim)
self.att_W_tree = nn.Linear(self.attention_tree_in_dim, self.attention_tree_out_dim)
self.att_W_edge = nn.Linear(self.attention_edge_in_dim, self.attention_edge_out_dim)
self.attention_a_node = nn.Linear(2 * self.attention_node_out_dim, 1)
self.attention_a_tree = nn.Linear(2 * self.attention_tree_out_dim, 1)
self.attention_a_edge = nn.Linear(2 * self.attention_tree_out_dim, 1)
self.ac_Fun = nn.ReLU()
def forward(self, SGraph_batch):
TreeVec_batch = []
node_vec_tree_batch = []
root_vec_batch = []
edgeList_batch = []
label_batch=[]
root_batch = []
for sgraph in SGraph_batch:
root_batch.append(sgraph.nodes[0])
label_batch.append([sgraph.label])
NodeOrders = []
for root in root_batch:
NodeOrder = get_bfs_order(root) #bfs node order
NodeOrders.append(NodeOrder)
root_vec = Node2Vec(self, root)
root_vec_batch.append(root_vec)
for batch_id in range(len(NodeOrders)):
node_order = NodeOrders[batch_id]
sgraph = SGraph_batch[batch_id]
cur_root = torch.tensor([sgraph.nodes[0].widFlatten]).cuda()
cur_root_embedding = self.embedding(cur_root)
edgeList = sgraph.edgeList
edgeList_vec_Tree = [] # edge channel
node_vec_tree = [] # node channel
tree_vec = [] # neighbor channel
for node in node_order:
cur_node_vec = Node2Vec(self, node)
node_vec_tree.append(cur_node_vec)
cur_node_id = node.idx
nei_mess = [] # calculate the sum of each neighbor edge and corresponding node for the node
if not node.neighbors:
nei_mess.append(torch.zeros(1, (self.node_out_dim + self.edge_out_dim)).cuda())
for node_neighbor in node.neighbors:
node_nei_vec = Node2Vec(self, node_neighbor)
node_nei_id = node_neighbor.idx
edgeType = FindEdge(cur_node_id, node_nei_id, sgraph.edgeNoseq)[0]
edgecode = edgeVec(edgeType).cuda()
edgeFeature = self.ac_Fun(self.edgeW_i(edgecode))
edge_mess = torch.cat([node_nei_vec, edgeFeature], 1)
nei_mess.append(edge_mess)
nei_mess = torch.stack(nei_mess)
nei_mess = nei_mess.sum(0)
node_edge_node_vec = torch.cat([cur_node_vec, nei_mess], 1)
tree_vec.append(node_edge_node_vec)
tree_vec = torch.stack(tree_vec, 0)
tree_vec_out = self.treeVecRnn(tree_vec)
tree_vec_out = tree_vec_out[0].squeeze(1)
tree_vec_out = mol_attention(self,tree_vec_out,'tree')
TreeVec_batch.append(tree_vec_out)
node_vec_tree = torch.stack(node_vec_tree, 0)
node_vec_out = self.nodeRnn(node_vec_tree)
node_vec_out = node_vec_out[0].squeeze(1)
node_vec_out = mol_attention(self,node_vec_out,'node')
node_vec_tree_batch.append(node_vec_out)
if not edgeList:
edgeList_vec_Tree.append(torch.cat([cur_root_embedding,cur_root_embedding,torch.zeros(1,self.edge_out_dim).cuda()],1))
for edge in edgeList:
edgeNo = edge[2][0]
nodeStart=torch.tensor([edge[0]]).cuda()
nodeEnd=torch.tensor([edge[1]]).cuda()
nodeStartVec=self.embedding(nodeStart)
nodeEndVec=self.embedding(nodeEnd)
edgecode=edgeVec(edgeNo).cuda()
edgeFeature=self.ac_Fun(self.edgeW_i(edgecode))
edgeListFeature=torch.cat([nodeStartVec,nodeEndVec,edgeFeature],dim=1)
edgeList_vec_Tree.append(edgeListFeature)
edgeList_vec_Tree = torch.stack(edgeList_vec_Tree, 0)
edgeList_vec_out = self.edgeRnn(edgeList_vec_Tree)
edgeList_vec_out = edgeList_vec_out[0].squeeze(1)
edgeList_vec_out = mol_attention(self, edgeList_vec_out, 'edge')
edgeList_batch.append(edgeList_vec_out)
node_vec_tree_batch = torch.stack(node_vec_tree_batch, dim=0)
node_vec_tree_batch = torch.squeeze(node_vec_tree_batch)
edgeList_batch = torch.stack(edgeList_batch, dim=0)
edgeList_batch = torch.squeeze(edgeList_batch)
TreeVec_batch = torch.stack(TreeVec_batch, dim=0)
TreeVec_batch = torch.squeeze(TreeVec_batch)
label_batch = torch.FloatTensor(label_batch).squeeze(dim=1).cuda()
return label_batch,TreeVec_batch,node_vec_tree_batch, edgeList_batch
"""
Helper functions
"""
def mol_attention(self,vec_tensor,state):
super_ave=torch.mean(vec_tensor,0).unsqueeze(0)
if state=='node':
W_node_vec=self.att_W_node(vec_tensor)
W_super_ave=self.att_W_node(super_ave).expand(vec_tensor.shape[0],-1)
W_cat_vec=torch.cat([W_super_ave,W_node_vec],dim=1)
alpha_ac=nn.LeakyReLU()(self.attention_a_node(W_cat_vec))
alpha=torch.exp(alpha_ac)
alpha_sum=torch.sum(alpha)
alpha_final=alpha/alpha_sum
alpha_final=torch.transpose(alpha_final,1,0)
node_att_vec=self.ac_Fun(alpha_final.mm(W_node_vec))
return node_att_vec
if state=='tree':
W_tree_vec=self.att_W_tree(vec_tensor)
W_super_ave=self.att_W_tree(super_ave).expand(vec_tensor.shape[0],-1)
W_cat_vec=torch.cat([W_super_ave,W_tree_vec],dim=1)
alpha_ac=nn.LeakyReLU()(self.attention_a_tree(W_cat_vec))
alpha=torch.exp(alpha_ac)
alpha_sum=torch.sum(alpha)
alpha_final=alpha/alpha_sum
alpha_final=torch.transpose(alpha_final,1,0)
tree_att_vec=self.ac_Fun(alpha_final.mm(W_tree_vec))
return tree_att_vec
if state == 'edge':
W_edge_vec = self.att_W_edge(vec_tensor)
W_super_ave = self.att_W_edge(super_ave).expand(vec_tensor.shape[0],-1)
W_cat_vec = torch.cat([W_super_ave, W_edge_vec], dim=1)
alpha_ac = nn.LeakyReLU()(self.attention_a_edge(W_cat_vec))
alpha = torch.exp(alpha_ac)
alpha_sum = torch.sum(alpha)
alpha_final = alpha / alpha_sum
alpha_final = torch.transpose(alpha_final, 1, 0)
tree_att_vec = self.ac_Fun(alpha_final.mm(W_edge_vec))
return tree_att_vec
def FindEdge(atomId, neiId, edgeNoseq):
edge_atom_seq = []
for edge in edgeNoseq:
edge_atom_seq.append([edge[0], edge[1]])
if [atomId, neiId] in edge_atom_seq:
edgeNo = edge_atom_seq.index([atomId, neiId])
else:
edgeNo = edge_atom_seq.index([neiId, atomId])
edgeType = edgeNoseq[edgeNo][2]
return edgeType
def Node2Vec(self, node):
fatoms, fbonds, linkpositions = mol2node(node)
fatomsVec = self.atomW_i(fatoms)
fatomsVec = fatomsVec.sum(dim=0)
fatomsVec = torch.unsqueeze(fatomsVec, 0)
fatomsVec = self.ac_Fun(fatomsVec)
fbondsVec = self.bondW_i(fbonds)
fbondsVec = fbondsVec.sum(dim=0)
fbondsVec = torch.unsqueeze(fbondsVec, 0)
fbondsVec=self.ac_Fun(fbondsVec)
linkpositionsVec = self.linkPositionW_i(linkpositions)
linkpositionsVec = linkpositionsVec.sum(dim=0)
linkpositionsVec = torch.unsqueeze(linkpositionsVec, 0)
linkpositionsVec = self.ac_Fun(linkpositionsVec)
nodevec = torch.cat([fatomsVec, fbondsVec, linkpositionsVec], dim=1)
return nodevec
def by_scort(t):
return t[1]
def get_bfs_order(root):
queue = deque([root])
visited = set([root.idx])
visitedAtoms = []
visitedAtoms.append(root)
root.depth = 0
while len(queue) > 0:
x = queue.popleft()
x_nei = []
for y in x.neighbors:
x_nei_item=(y,y.widFlatten)
x_nei.append(x_nei_item)
x_nei=sorted(x_nei,key=by_scort)
for x_nei_sort_item in x_nei:
y_nei,_=x_nei_sort_item
if y_nei.idx not in visited:
queue.append(y_nei)
visited.add(y_nei.idx)
visitedAtoms.append(y_nei)
y_nei.depth = x.depth + 1
return visitedAtoms
|
[
"noreply@github.com"
] |
ShuangWangCN.noreply@github.com
|
6d2d77e617f92545b4f2deb72355e75519a1e5cb
|
91ef5a418b17283087c48f6270086a7f1652a053
|
/utils/tasks.py
|
442b0bc4eb4402ee172d6241d775503bdad6a6b1
|
[
"MIT"
] |
permissive
|
badcw-OnlineJudge/OnlineJudge
|
b9f5c012386e04fb2722662b9537eab19f63a0b9
|
75436d845a772a1d59e6db4efc19188aa4606a86
|
refs/heads/master
| 2020-04-10T23:43:49.926142
| 2020-04-06T16:33:13
| 2020-04-06T16:33:13
| 161,362,745
| 0
| 1
|
MIT
| 2018-12-15T17:17:36
| 2018-12-11T16:30:14
|
Python
|
UTF-8
|
Python
| false
| false
| 187
|
py
|
import os
from celery import shared_task
@shared_task
def delete_files(*args):
for item in args:
try:
os.remove(item)
except Exception:
pass
|
[
"virusdefender@outlook.com"
] |
virusdefender@outlook.com
|
fdf8cc4cfd23aba4b1b5920e8d42644b2ddba809
|
2104f2ca2caa2e86a5bde37f8971ef778a123d6b
|
/bookorbooks/book/migrations/0004_auto_20210812_0751.py
|
22c8e20f5f405d73895261ee8a8a7bbad1b59e4e
|
[
"MIT"
] |
permissive
|
talhakoylu/SummerInternshipBackend
|
34ea412b77189f4945dc10cdfdc35d16902a5534
|
4ecedf5c97f73e3d32d5a534769e86aac3e4b6d3
|
refs/heads/main
| 2023-07-26T02:50:35.815293
| 2021-09-09T18:58:39
| 2021-09-09T18:58:39
| 393,022,807
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,188
|
py
|
# Generated by Django 3.2.5 on 2021-08-12 07:51
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('book', '0003_readinghistory_counter'),
]
operations = [
migrations.AddField(
model_name='booklevel',
name='title_english',
field=models.CharField(default='English title', max_length=50, verbose_name='İngilizce Başlık'),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='description_english',
field=models.CharField(default='English description', max_length=256, verbose_name='İngilizce Açıklama'),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='title_english',
field=models.CharField(default='English title', max_length=50, verbose_name='İngilizce Başlık'),
preserve_default=False,
),
migrations.AlterField(
model_name='bookpage',
name='content_position',
field=models.SmallIntegerField(blank=True, choices=[(0, 'Orta'), (1, 'Üst'), (2, 'Alt')], default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(2)], verbose_name='İçeriğin Konumu'),
),
migrations.AlterField(
model_name='bookpage',
name='image_position',
field=models.SmallIntegerField(blank=True, choices=[(0, 'Orta'), (1, 'Üst'), (2, 'Alt')], default=0, null=True, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(2)], verbose_name='Görselin Konumu'),
),
migrations.AlterField(
model_name='category',
name='description',
field=models.CharField(max_length=256, verbose_name='Türkçe Açıklama'),
),
migrations.AlterField(
model_name='category',
name='title',
field=models.CharField(max_length=50, verbose_name='Türkçe Başlık'),
),
]
|
[
"ahmettalha.net@gmail.com"
] |
ahmettalha.net@gmail.com
|
d8b7447ce2ddaaacb628d05fc3a62538a298e930
|
35f4354e293952378637854a2684c34cd5a90816
|
/documentation/sportswebapp/util/sessions.py
|
b66711b19629f66d34fe1c5d26efc967de42c858
|
[] |
no_license
|
Cosmo65/nihlapp
|
fff663368e95ac10b2fc97db6ef8b52ef642f557
|
cee6db21dab4be7ef96dd76f8a057fe8a303b9b0
|
refs/heads/master
| 2022-02-07T08:44:50.450899
| 2010-05-13T17:05:21
| 2010-05-13T17:05:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,915
|
py
|
import os
import time
import datetime
import random
import Cookie
import logging
from google.appengine.api import memcache
# Note - please do not use this for production applications
# see: http://code.google.com/p/appengine-utitlies/
COOKIE_NAME = 'appengine-simple-session-sid'
DEFAULT_COOKIE_PATH = '/'
SESSION_EXPIRE_TIME = 7200 # sessions are valid for 7200 seconds (2 hours)
class Session(object):
def __init__(self):
self.sid = None
self.key = None
self.session = None
string_cookie = os.environ.get('HTTP_COOKIE', '')
self.cookie = Cookie.SimpleCookie()
self.cookie.load(string_cookie)
# check for existing cookie
if self.cookie.get(COOKIE_NAME):
self.sid = self.cookie[COOKIE_NAME].value
self.key = 'session-' + self.sid
try:
self.session = memcache.get(self.key)
except:
self.session = None
if self.session is None:
logging.info('Invalidating session '+self.sid)
self.sid = None
self.key = None
if self.session is None:
self.sid = str(random.random())[5:]+str(random.random())[5:]
self.key = 'session-' + self.sid
logging.info('Creating session '+self.key);
self.session = dict()
memcache.add(self.key, self.session, 3600)
self.cookie[COOKIE_NAME] = self.sid
self.cookie[COOKIE_NAME]['path'] = DEFAULT_COOKIE_PATH
# Send the Cookie header to the browser
print self.cookie
# Private method to update the cache on modification
def _update_cache(self):
memcache.replace(self.key, self.session, 3600)
# Convenient delete with no error method
def delete(self, keyname):
if keyname in self.session:
del self.session[keyname]
self._update_cache()
# Support the dictionary get() method
def get(self, keyname, default=None):
if keyname in self.session:
return self.session[keyname]
return default
# session[keyname] = value
def __setitem__(self, keyname, value):
self.session[keyname] = value
self._update_cache()
# x = session[keyname]
def __getitem__(self, keyname):
if keyname in self.session:
return self.session[keyname]
raise KeyError(str(keyname))
# del session[keyname]
def __delitem__(self, keyname):
if keyname in self.session:
del self.session[keyname]
self._update_cache()
return
raise KeyError(str(keyname))
# if keyname in session :
def __contains__(self, keyname):
try:
r = self.__getitem__(keyname)
except KeyError:
return False
return True
# x = len(session)
def __len__(self):
return len(self.session)
|
[
"lothix@gmail.com"
] |
lothix@gmail.com
|
30613f0fcbce9af70ea44f5117927ad5b9c8a6ad
|
d83118503614bb83ad8edb72dda7f449a1226f8b
|
/src/dprj/platinumegg/app/cabaret/management/commands/migrate_redisdata.py
|
b68508cd176e30b78310f989c74305db0aa91dc5
|
[] |
no_license
|
hitandaway100/caba
|
686fe4390e182e158cd9714c90024a082deb8c69
|
492bf477ac00c380f2b2758c86b46aa7e58bbad9
|
refs/heads/master
| 2021-08-23T05:59:28.910129
| 2017-12-03T19:03:15
| 2017-12-03T19:03:15
| 112,512,044
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,476
|
py
|
# -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
import settings_sub
import redis
from platinumegg.app.cabaret.util.db_util import ModelRequestMgr
from platinumegg.app.cabaret.util.api import BackendApi
class Command(BaseCommand):
"""Redisのデータを移行する.
"""
def handle(self, *args, **options):
print '================================'
print 'migrate_redisdata'
print '================================'
model_mgr = ModelRequestMgr()
# メンテナンス確認.
appconfig = BackendApi.get_appconfig(model_mgr)
if not appconfig.is_maintenance():
print u'メンテナンスモードにしてください'
return
print 'check maintenance...OK'
# どこから.
if settings_sub.IS_LOCAL:
from_host = '127.0.0.1'
from_db = 0
else:
from_host = '10.116.41.122'
from_db = 0
# どこへ.
to_host = settings_sub.REDIS_KPI_HOST
to_db = settings_sub.REDIS_KPI_NUMBER
# redisクライアント.
client_from = redis.Redis(host=from_host, port=6379, db=from_db)
client_to = redis.Redis(host=to_host, port=6379, db=to_db)
# 移行元のkeyを全て取得.
key_list = client_from.keys()
errors = []
for key in key_list:
# データの型を取得.
data_type = client_from.type(key)
print '{}...{}'.format(key, data_type)
# 型ごとの処理を実行.
f = getattr(self, 'migrate_{}'.format(data_type), None)
if f is None:
continue
if not f(client_from, client_to, key):
errors.append('{}...{}'.format(key, data_type))
continue
# keyの有効期限を取得.
ttl = client_from.ttl(key)
if 0 < ttl:
# keyの有効期限を設定.
client_to.expire(key, ttl)
print '================================'
print 'all done..'
print 'error : {}'.format(len(errors))
if errors:
for err in errors:
print ' {}'.format(err)
def migrate_string(self, client_from, client_to, key):
'@type client_from: redis.Redis'
'@type client_to: redis.Redis'
'@type key: string'
"""文字列型データの移行.
"""
v = client_from.get(key)
client_to.set(key, v)
return client_to.get(key) == v
def migrate_list(self, client_from, client_to, key):
'@type client_from: redis.Redis'
'@type client_to: redis.Redis'
'@type key: string'
"""リスト型データの移行.
"""
length = client_from.llen(key)
values = client_from.lrange(key, 0, length - 1)
client_to.rpush(key, *values)
return client_to.llen(key) == length and client_to.lrange(key, 0, length - 1) == values
def migrate_set(self, client_from, client_to, key):
'@type client_from: redis.Redis'
'@type client_to: redis.Redis'
'@type key: string'
"""セット型データの移行.
"""
members = client_from.smembers(key)
client_to.sadd(key, *members)
return client_to.smembers(key) == members
def migrate_zset(self, client_from, client_to, key):
'@type client_from: redis.Redis'
'@type client_to: redis.Redis'
'@type key: string'
"""ソート済みセット型データの移行.
"""
cnt = client_from.zcard(key)
start = 0
while start < cnt:
end = min(cnt, start + 10000)
scores = dict(client_from.zrange(key, start, end, withscores=True))
client_to.zadd(key, **scores)
start = end + 1
if client_to.zcard(key) == cnt:
return client_from.zrange(key, 0, cnt, withscores=True) == client_to.zrange(key, 0, cnt, withscores=True)
return False
def migrate_hash(self, client_from, client_to, key):
'@type client_from: redis.Redis'
'@type client_to: redis.Redis'
'@type key: string'
"""ハッシュ型データの移行.
"""
mapping = client_from.hgetall(key)
client_to.hmset(key, mapping)
return mapping == client_to.hgetall(key)
|
[
"shangye@mail.com"
] |
shangye@mail.com
|
1c38781fc34b334ce732582ffe3d759dc17810bd
|
41e6db1a2079f4ecc27abd9b6168320688f6bbf1
|
/B04505004_HW02.py
|
36d278cef9ac7ef5f9f15357c71c57efbf215908
|
[] |
no_license
|
huaiche94/ESOE-CS101-2015
|
dfd574e0c07a7f7fb48031c5f1586ecb140c20f9
|
56693d08841d93ae2cfa5f27bc02ec5b74a1ed3c
|
refs/heads/master
| 2021-05-29T23:00:36.141278
| 2015-10-11T14:10:49
| 2015-10-11T14:10:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,137
|
py
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# input限制為任何以二進位表示的正負整數
# 若輸入非二進位數,也將得到一個值
decimal = 0
value = 1
k = 10
binary = int(input("Enter Binary Number:"))
if(binary > 0):
x = binary
while(x > 0):
remainder = (x % k)
if(remainder > 0):
decimal += value
k *= 10
x -= remainder
value *= 2
print (decimal)
else:
x = - binary
while(x > 0):
remainder = (x % k)
if(remainder > 0):
decimal += value
k *= 10
x -= remainder
value *= 2
print (0 - decimal)
# p2-19a= 10
# p2-19b= 17
# p2-19c= 6
# p2-19d= 8
# p2-20a= 14
# p2-20b= 8
# p2-20c= 13
# p2-20d= 4
# p2-22a= 00010001 11101010 00100010 00001110
# p2-22b= 00001110 00111000 11101010 00111000
# p2-22c= 01101110 00001110 00111000 01001110
# p2-22d= 00011000 00111000 00001101 00001011
# p3-28a= 234
# p3-28b= 560
# p3-28c= 874
# p3-28d= 888
# p3-30a= 234
# p3-30b= 560
# p3-30c= 875
# p3-30d= 889
|
[
"kkiill2525@gmail.com"
] |
kkiill2525@gmail.com
|
e81bcabd90f2bc88002a2a4b0241c5c56bbec20d
|
85d4007b9c7f540c81526a538db01c826d64e429
|
/todo_project/todo_app/views.py
|
dac38953b57d7b82fed472bf732cfb6f2cfe18b2
|
[] |
no_license
|
AjayKodoth/todo_app
|
23c7c6f2d52cc76ae6a376308c54dac1b84fee54
|
14cf1e42c7f3cceaa5eadc995051155df01b3e6b
|
refs/heads/master
| 2023-05-09T15:52:29.870637
| 2021-06-03T15:51:55
| 2021-06-03T15:51:55
| 373,558,532
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,012
|
py
|
from django.shortcuts import render,redirect
from . models import Task
from . forms import Todoforms
from django.views.generic import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import UpdateView,DeleteView
from django.urls import reverse_lazy
# Create your views here.
class TasklistView(ListView):
model = Task
template_name = 'taskview.html'
context_object_name = 'obj1'
class TaskDetailView(DetailView):
model = Task
template_name = 'detail.html'
context_object_name = 'i'
class TaskUpdateView(UpdateView):
model = Task
template_name = 'update.html'
context_object_name = 'task'
fields = ('name','priority','date')
def get_success_url(self):
return reverse_lazy('cbvdetail',kwargs={'pk':self.object.id})
class TaskDeleteView(DeleteView):
model = Task
template_name = 'delete.html'
success_url = reverse_lazy('cbvtask')
def taskview(request):
obj1=Task.objects.all()
if request.method == 'POST':
name = request.POST.get('name')
priority = request.POST.get('priority')
date = request.POST.get('date')
obj = Task(name=name, priority=priority,date=date)
obj.save()
return render(request,'taskview.html',{'obj1':obj1})
def delete(request,taskid):
task=Task.objects.get(id=taskid)
if request.method=='POST':
task.delete()
return redirect('/')
return render(request,'delete.html',{'task':task})
def update(request,id):
task=Task.objects.get(id=id)
form=Todoforms(request.POST or None,instance=task)
if form.is_valid():
form.save()
return redirect('/')
return render(request,'edit.html',{'task':task,'form':form})
# def task(request):
# if request.method == 'POST':
# name = request.POST.get('name')
# priority = request.POST.get('priority')
# obj = Task(name=name, priority=priority)
# obj.save()
#
# return render(request,'task.html')
|
[
"sujithvengayil8432@gmail.com"
] |
sujithvengayil8432@gmail.com
|
26feb465de8a9602b49002a06ede218156b065c1
|
5f5006fcaffd8f7dd50f67c7063d52b983ab3e86
|
/Comparison of Two Data Frames.py
|
6479cec469cbdb5989d726bd37a9cde6bbacbd09
|
[] |
no_license
|
wasseypro/ipnyb-files
|
6c430220132d04b3f44e6bdac61f9e920250e3d3
|
d920f318908a70f2d468fd6f99335e0e6c1002c9
|
refs/heads/master
| 2023-03-14T13:27:06.737672
| 2021-02-12T14:15:55
| 2021-02-12T14:15:55
| 296,563,925
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 431
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# Comparing features of Dataframe 1 and Dataframe 2
# In[2]:
import numpy as np
import pandas as pd
import filecmp
# In[13]:
# Creating Dataframe
df1=pd.read_csv('e:/Datasets/train.csv')
df2=pd.read_csv('e:/Datasets/test.csv')
# In[14]:
df1.info(),df2.info()
# In[5]:
df2.info()
# In[6]:
df3=pd.concat([df1,df2], axis=1)
# In[7]:
df3.info()
# In[ ]:
|
[
"noreply@github.com"
] |
wasseypro.noreply@github.com
|
99586924182b6d7ef60ff1e2c69c81da21ff9886
|
24f1d5e6e9bd463b461613c0f5fe6b7dad5e8567
|
/search.py
|
85f44f1f88730388a3e66404ef7e98fea1ada6a6
|
[] |
no_license
|
jokedewinter/phonebook_app
|
78d291f224f3316b40a2cd1153f2cadefebfaa3b
|
ed2e44474071cf1ae2366903563f471489a72fd6
|
refs/heads/master
| 2020-04-22T04:28:01.261342
| 2019-02-11T12:24:07
| 2019-02-11T12:24:07
| 170,124,594
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,162
|
py
|
import sqlite3, requests
from math import sin, cos, sqrt, atan2, radians
def get_database():
try:
conn = sqlite3.connect('static/db/phonebook.db')
cursor = conn.cursor()
return conn, cursor
except FileNotFoundError:
return False
def close_database(cursor, conn):
cursor.close()
conn.close()
return
def query_database(query, value):
conn, cursor = get_database()
results = cursor.execute(query, value).fetchall()
close_database(cursor, conn)
return results
def show_business_types():
select_query = "SELECT DISTINCT business_category FROM business ORDER BY business_category"
results = query_database(select_query, "")
types = []
for item in results:
types.append(item[0])
return types
def get_lat_lon(location):
endpoint = "https://api.opencagedata.com/geocode/v1/json"
payload = {"q": location, "key": "054ed13663c94c4791d1806b7b14fd71"}
response = requests.get(endpoint, params=payload)
data = response.json()
lat = ""
lon = ""
if response.status_code == 200:
if data['total_results'] > 0:
if data['results'][0]['components']['country_code'] == 'gb':
lat = data['results'][0]['geometry']['lat']
lon = data['results'][0]['geometry']['lng']
return lat, lon
def distance_haversine(lat1, lon1, lat2, lon2):
radius = 6371 # km
lat = radians(lat2 - lat1)
lon = radians(lon2 - lon1)
sins_lat = sin(lat/2) * sin(lat/2)
sins_lon = sin(lon/2) * sin(lon/2)
cosinus = cos(radians(lat1)) * cos(radians(lat2))
a = sins_lat + cosinus * sins_lon
c = 2 * atan2(sqrt(a), sqrt(1-a))
distance = radius * c
return round(distance)
def get_locations(results, location):
"""
If a biz_location was given then find lat/lang for that.
"""
lat, lon = get_lat_lon(location)
i = 0
for item in results:
if ("" != lat) and ("" != lon):
# lat = 53.817675
# lon = -1.575675
# item[-1] = distance_haversine(item[8], item[9], lat, lon)
item['distance'] = distance_haversine(item['latitude'], item['longitude'], lat, lon)
i += 1
return sorted(results, key=lambda k: k['distance'])
def tuple_to_dictionary(results, table):
"""
Convert the tuple list elements into dictionaries.
That way you can append a distance value to update if applicable.
And it will be easier to display with Flask too.
"""
converted = []
for item in results:
dictionary = {}
if "business" == table:
dictionary['name'] = item[0]
dictionary['type'] = item[1]
elif "people" == table:
dictionary['first_name'] = item[0]
dictionary['last_name'] = item[1]
dictionary['street'] = item[2]
dictionary['town'] = item[3]
dictionary['region'] = item[4]
dictionary['postcode'] = item[5]
dictionary['telephone'] = item[6]
dictionary['latitude'] = item[8]
dictionary['longitude'] = item[9]
dictionary['distance'] = ""
converted.append(dictionary)
return converted
def process_results(results, location, table):
if len(results) > 0:
results = tuple_to_dictionary(results, table)
if location:
return get_locations(results, location)
return results
def search_business_type(biz_type, location=None):
"""
Results is a list with businesses and their lat/lang.
For each item, decide how far it is from location given.
Append result list with distance.
Sort result list by distance.
"""
select_query = "SELECT * FROM business INNER JOIN postcodes ON (business.postcode = postcodes.postcode) WHERE business.business_category = ?"
value_query = (biz_type, )
results = query_database(select_query, value_query)
return process_results(results, location, "business")
def search_business_name(biz_name, location=None):
"""
Results is a list with businesses and their lat/lang.
For each item, decide how far it is from location given.
Append result list with distance.
Sort result list by distance.
"""
select_query = "SELECT * FROM business INNER JOIN postcodes ON (business.postcode = postcodes.postcode) WHERE business.business_name LIKE ?"
value_query = ("%"+biz_name+"%", )
results = query_database(select_query, value_query)
return process_results(results, location, "business")
def search_people(person_name, location=None):
"""
Results is a list with people and their lat/lang.
For each item, decide how far it is from location given.
Append result list with distance.
Sort result list by distance.
"""
select_query = "SELECT * FROM people INNER JOIN postcodes ON (people.postcode = postcodes.postcode) WHERE people.last_name LIKE ?"
value_query = ("%"+person_name+"%", )
results = query_database(select_query, value_query)
return process_results(results, location, "people")
def sort_results(results, column):
return sorted(results, key=lambda k: k[column])
|
[
"joke@jokedewinter.co.uk"
] |
joke@jokedewinter.co.uk
|
9c93b9a5871e85359d182c2f0f6c73f4be68cf6d
|
f378dc4374377d0ded7b0d66ad79aeb439754073
|
/NCTU/7.reverse-integer.py
|
b68f7edf12cefb97b198d456ef933a43560b7140
|
[] |
no_license
|
DabaoHuang/LeetCodePractice
|
f5ea8a730a2912642e07c44890719f3247cab921
|
9766ba55e1c6a8581fcd0c3d11d2ac55fa008ce1
|
refs/heads/master
| 2020-07-23T23:34:43.049504
| 2020-04-30T02:42:19
| 2020-04-30T02:42:19
| 207,739,648
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 437
|
py
|
#
# @lc app=leetcode id=7 lang=python
#
# [7] Reverse Integer
#
# @lc code=start
import math
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
"""
if x == 0:
return 0
ret = int(('-' if x < 0 else '') + str(abs(x))[::-1].lstrip("0"))
if abs(ret) > (2 ** 31 - 1):
return 0
return ret
# @lc code=end
|
[
"cn41408@gmail.com"
] |
cn41408@gmail.com
|
eca955f9e01d8d0faad118f84bf0a415ff47ffbb
|
9190478862e71a475933c5d995eeade70a085415
|
/cybele_advanced_query_builder/settings/settings_base.py
|
e54f4ddd1917f2953083f2192ca98bd9a398c041
|
[] |
no_license
|
gtsapelas/cybele_test
|
aa4d2c0059108a8f0f90d7a0114c3217e8802463
|
b44508893b9b6556bd076a24272c29893b4e202d
|
refs/heads/master
| 2021-04-05T15:34:39.389014
| 2020-03-19T18:17:57
| 2020-03-19T18:17:57
| 248,572,559
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,429
|
py
|
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import uuid
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = int(os.environ.get("DEBUG", default=0))
DEBUG = True
ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS").split(" ")
# Application definition
INSTALLED_APPS = [
# django
'rest_framework',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.sites',
# visualisation
'leaflet',
# authentication
'allauth',
'allauth.account',
'allauth.socialaccount',
# apps
'aggregator',
'query_designer',
]
STORAGE_TARGET = 'CYBELE_LXS'
DATABASES = {
'default': {
'ENGINE': os.environ.get("SQL_ENGINE"),
'NAME': os.environ.get("SQL_DATABASE"),
'USER': os.environ.get("SQL_USER"),
'PASSWORD': os.environ.get("SQL_PASSWORD"),
'HOST': os.environ.get("SQL_HOST"),
'PORT': os.environ.get("SQL_PORT"),
},
'CYBELE_LXS': {
'HOST': os.environ.get("LXS_HOST"),
'PORT': os.environ.get("LXS_PORT"),
'USER': os.environ.get("LXS_USER"),
'PASSWORD': os.environ.get("LXS_PASSWORD"),
'NAME': os.environ.get("LXS_NAME"),
}
}
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
"django.contrib.auth.backends.ModelBackend",
# `allauth` specific authentication methods, such as login by e-mail
"allauth.account.auth_backends.AuthenticationBackend",
)
SITE_ID = 1
LOGIN_REDIRECT_URL = '/queries/'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
]
LOGIN_EXEMPT_URLS = (
r'^$',
r'^terms/$',
r'^about$',
r'^register$',
r'^accounts/',
)
ROOT_URLCONF = 'cybele_advanced_query_builder.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates'), os.path.join(BASE_DIR, 'templates', 'allauth')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django_settings_export.settings_export',
],
},
},
]
WSGI_APPLICATION = 'cybele_advanced_query_builder.wsgi.application'
PLATFORM_VERSION = '0.1.0'
SETTINGS_EXPORT = [
'PLATFORM_VERSION',
]
# AUTH_PASSWORD_VALIDATORS = [
# # {
# # 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
# # },
# {
# 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
# },
# # {
# # 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
# # },
# {
# 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
# },
# ]
# ACCOUNT_EMAIL_REQUIRED = True
# ACCOUNT_UNIQUE_EMAIL = True
# ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATETIME_FORMAT = 'Y-m-d H:i:s'
DATE_FORMAT = 'Y-m%d'
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
|
[
"gtsapelas@epu.ntua.gr"
] |
gtsapelas@epu.ntua.gr
|
e4af5e770fecba4f78342d1be2f34f8368fd576e
|
0f5772dbdb2ada66fe5b44a83047d75ea88fcd7f
|
/2.3_mapper.py
|
2bc39e0b6edbec258321428e9412f9791ee3556a
|
[] |
no_license
|
gtumuluri/W261-HW2
|
95728d6a0eaeb94e7da9f0b2d1085ef2574d678a
|
49e8faf434e0716dbd0f72cd5a07ed009f1ddce7
|
refs/heads/master
| 2021-01-22T09:47:22.280828
| 2015-09-15T23:11:51
| 2015-09-15T23:11:51
| 42,551,685
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,433
|
py
|
#!/usr/bin/python
import sys
import string
transtable = string.maketrans("","")
# Read input from the standard input
for line in sys.stdin:
line = line.strip()
items = line.split('\t')
# If there is no content (as in subject/body in the data), skip
if len(items) < 3:
continue
if items[1] != '0' and items[1] != '1':
continue
# Output a special word/keyword to allow reducer
# to count the number of times a given class occurs.
# Class is the second field in the data, so output
# that by appending it to the 'class_' keyword string
# and a count of 1 for each occurrence.
print '%s\t%s' % ('class_' + items[1], 1)
# If the line read has just subject, use that, otherwise
# catenate with body also and use the entire content.
if len(items) == 3:
content = items[2]
if len(items) == 4:
content = items[2] + ' ' + items[3]
# For each word in content, see if the word is same as user
# chosen word, and then output the word and class to which
# the document the word occurred in belongs to. This way, the
# reducer can compute class frequencies for a given word.
content = content.split()
for word in content:
# Remove punctuation
word = word.translate(transtable, string.punctuation)
if word.find(sys.argv[1]) == 0:
print '%s\t%s' % (word, items[1])
|
[
"gtumuluri@yahoo.com"
] |
gtumuluri@yahoo.com
|
2f1525dfb6e993538b599b1cae1fb1f9e15b01cf
|
facbc61e446670b9e00c4981f569da7a41dfd6af
|
/Project_5/Project_5.py
|
76d55f304a549d42839f531efdbccc15ac666770
|
[] |
no_license
|
abeldavid/Intro_to_Data_Science
|
19855a6cb0fbada2c9125bfdfb6203ae02114063
|
baae9e66eac4891f0595c5a9e60e80a8d3995c17
|
refs/heads/master
| 2021-08-08T02:34:22.244403
| 2017-11-09T11:02:10
| 2017-11-09T11:02:10
| 110,094,614
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,399
|
py
|
# import files
import sys
import pandas as pd
import numpy as np
import seaborn as sns
from sklearn.cross_validation import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn import metrics
def compute_cost(features, values, theta):
"""
Compute the cost of a list of parameters, theta, given a list of features
(input data points) and values (output data points).
"""
m = len(values)
sum_of_square_errors = numpy.square(numpy.dot(features, theta) - values).sum()
cost = sum_of_square_errors / (2*m)
return cost
def Linear_Regression_FN(feature, values,sel):
X = baseball_data[feature]
#X.head()
#print type(X)
print X.shape
y = baseball_data[values]
print y.shape
# Spliting X & y into Training and Testing Set
X_train, X_test, y_train, y_test = train_test_split(X,y,random_state=1)
print("Data split into Training Set and Testing Set")
print("Training Data")
#print X_train.shape
#print y_train.shape
linreg = LinearRegression()
linreg.fit(X_train,y_train)
print linreg.intercept_
print linreg.coef_
zip(feature,linreg.coef_)
print("Testing Data")
y_pred = linreg.predict(X_test)
print("Evaluate")
print("A")
print metrics.mean_absolute_error(y_test,y_pred)
print("B")
print metrics.mean_squared_error(y_test,y_pred)
print("C")
print np.sqrt(metrics.mean_squared_error(y_test,y_pred))
print("D")
compute_r_squared(y_test,y_pred)
def compute_r_squared(data, predictions):
# Write a function that, given two input numpy arrays, 'data', and 'predictions,'
# returns the coefficient of determination, R^2, for the model that produced
# predictions.
#
# Numpy has a couple of functions -- np.mean() and np.sum() --
# that you might find useful, but you don't have to use them.
# YOUR CODE GOES HERE
SST = ((data-np.mean(data))**2).sum()
SSReg = ((predictions-data)**2).sum()
r_squared = 1 - SSReg /SST
print r_squared
if __name__ == "__main__":
# Print the version of python used
print (sys.version)
# Path to csv file point to baseball_stats-csv.csv
path_to_csv = "C:/Users/David/Documents/Data/Research/2017/4_Sept_16_Data_Science/Scource_Code/Intro_to_Data_Science/Project_5/Data_Set/baseball_stats-csv.csv"
# Read the csv file as a panda dataframe
baseball_data = pd.read_csv(path_to_csv)
#Other attributes can be added to the .read_csv given below
#header=0, engine = 'python', converters = {'name': str,'handedness': str,'height': np.int64,'weight': np.int64, 'avg': np.float64, 'HR':np.int64})
#baseball_data['height'] = baseball_data['height'].astype(np.int64)
#baseball_data['weight'] = baseball_data['weight'].astype(int64)
#print baseball_data.dtypes
# Plot data
#sns.pairplot(baseball_data, x_vars = ['height','weight'],y_vars = ['HR'], size= 10, aspect = 1, kind= 'reg')
#print type(baseball_data['avg'][0])
# Seperating Features
feature = ['height','weight']
#print type(feature)
# Assigning values
values = ['HR']
Linear_Regression_FN(feature , values ,1)
|
[
"abeldavid.89@gmail.com"
] |
abeldavid.89@gmail.com
|
2fdb8ed35342f5700d9be03da1b463bf6807f021
|
88996c9a964805651d8944e1d04a3183c7d48a67
|
/canvasapi/grading_standard.py
|
31f5d0d2bf0a070d02dccab03a64a26a4b9bdec8
|
[
"MIT"
] |
permissive
|
aileenpongnon/canvasapi
|
535ed7103873c00028d6b2fb79e9804fc855c642
|
c6b825c0f541eeb650a86a0dee3268092273cbbe
|
refs/heads/master
| 2022-11-27T01:52:50.010858
| 2020-07-06T20:21:08
| 2020-07-06T20:21:08
| 277,850,050
| 0
| 0
|
MIT
| 2020-07-07T15:11:18
| 2020-07-07T15:11:17
| null |
UTF-8
|
Python
| false
| false
| 322
|
py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from six import python_2_unicode_compatible
from canvasapi.canvas_object import CanvasObject
@python_2_unicode_compatible
class GradingStandard(CanvasObject):
def __str__(self):
return "{} ({})".format(self.title, self.id)
|
[
"Jonathan.Guilbe@ucf.edu"
] |
Jonathan.Guilbe@ucf.edu
|
d19c20e21632cdafae4ce7033d36fc6e5226085c
|
e1859b43a4f4b0ff3dc23bf038c0b0172b386754
|
/zipdl/algos/multi_factor_training.py
|
9534e2afed1f9e99ab63df6c52aa6f9bc8c6ce41
|
[] |
no_license
|
FrankPSch/Deep-Reinforcement-Learning-in-Zipline
|
811d213a4dc88e3edb119ea9e4fe83675dcec843
|
c41bc77808e1e32f092b410d49669a6fd92fb00e
|
refs/heads/master
| 2020-03-24T04:56:44.866500
| 2018-05-01T22:13:37
| 2018-05-01T22:13:37
| 142,469,788
| 1
| 0
| null | 2018-07-26T16:58:35
| 2018-07-26T16:58:35
| null |
UTF-8
|
Python
| false
| false
| 10,028
|
py
|
from zipdl.envs import dynamic_beta_env as dbenv
ENV = dbenv.Dynamic_beta_env(dbenv.TRADING_START)
'''
ALGO BELOW
'''
from collections import deque
import numpy as np
import pandas as pd
from numexpr import evaluate
import zipline
from zipline.pipeline.factors.technical import RSI
from zipline.finance import commission, slippage
from zipline.pipeline import Pipeline, CustomFactor
from zipline.pipeline.filters import StaticAssets
from zipline.data import bundles
from zipline.api import *
from zipline.pipeline.data import USEquityPricing
from zipline.utils.math_utils import nanmean, nanstd
from zipline.finance.slippage import FixedSlippage
import empyrical
from zipdl.utils import utils
import datetime as dt
from sklearn.preprocessing import minmax_scale
# Weeks between a rebalance
REBALANCE_PERIOD = 4
# Lookback window, in days, for Momentum (Bollinger Bands and RSI) factor
MOMENTUM_LOOKBACK = 30
# If true, will switch from mean-reverting to trend-following momentum
TREND_FOLLOW = True
# Upper/lower SD's required for Bollinger Band signal
BBUPPER = 1.5
BBLOWER = 1.5
BBSTD = 1.5
NORMALIZE_VALUE_SCORES = False
# Upper/lower scores required for RSI signal
RSI_LOWER = 30
RSI_UPPER = 70
# Percentile in range [0, 1] of stocks that are shorted/bought
SHORTS_PERCENTILE = 0.01
LONGS_PERCENTILE = 0.01
# Constraint Parameters
MAX_GROSS_LEVERAGE = 1.0
DOLLAR_NEUTRAL = False
# If True, will screen out companies that have earnings releases between rebalance periods
AVOID_EARNINGS = True
# If False, shorts won't be ordered
ALLOW_SHORTS = True
# If True, will cut positions causing losses of LOSS_THRESHOLD or more
CUT_LOSSES = False
# Positions losing this much or more as a fraction of the investment will be cut if CUT_LOSSES is True
LOSS_THRESHOLD = 0.03
# Whether or not to print pipeline output stats. For backtest speed, turn off.
PRINT_PIPE = False
BATCH_SIZE = 16
ACTION = 0
#=================util=============================
#Throws out tickers not found in quandl-quantopian data (mostly tickers with no vol)
def safe_symbol(ticker):
try:
x = symbol(ticker)
return x
except:
return None
def safe_symbol_convert(tickers):
filtered_list = list(filter(None.__ne__, [safe_symbol(ticker) for ticker in tickers]))
return filtered_list
def universe_transform(date):
universe = utils.get_current_universe(date)
tradable_assets = safe_symbol_convert(universe)
return tradable_assets
#==================PRIMARY==========================
def initialize_environment(agent, trading_start, trading_day=2,):
assert 1 <= trading_day <= 5
def initialize(context):
context.start_date = trading_start
context.agent = agent
context.num_trials = 0
context.action = ACTION
context.values = deque(maxlen=21)
set_commission(commission.PerShare(cost=0.005, min_trade_cost=1.00))
set_slippage(slippage.FixedSlippage(0.00))
context.universe = universe_transform('2018-01-01')
schedule_function(rebalance_portfolio, date_rules.week_start(days_offset=trading_day), time_rules.market_open(hours=1))
#schedule_function(cancel_open_orders, date_rules.every_day(), time_rules.market_open())
schedule_function(prime_pipeline, date_rules.week_start(days_offset=trading_day-1), time_rules.market_close())
context.Factor_weights = ENV.current_node.weights
context.weights = None
context.run_pipeline = True #We want to run stock selector immediately
context.weeks_since_rebalance = -1
attach_pipeline(make_pipeline(context), 'my_pipeline')
return initialize
#schedule trading monthly
#schedule stop loss/take gain daily
def handle_data(context, data):
#Daily function
#context.universe = universe_transform(get_datetime())
pass
def rebalance_portfolio(context, data):
# rebalance portfolio
close_old_positions(context, data)
total_weight = np.sum(context.weights.abs())
weights = context.weights / total_weight
for stock, weight in weights.items():
if not np.isnan(weight):
result = order_target_percent(stock, weight)
def before_trading_start(context, data):
context.values.append(context.portfolio.portfolio_value)
if not context.run_pipeline:
return
date = get_datetime().replace(tzinfo=None)
if (date - context.start_date.replace(tzinfo=None)).days > 12:
print('training on {}'.format(date))
returns = pd.Series(list(context.values)).pct_change()
sortino_reward = empyrical.sortino_ratio(returns, period='monthly')
context.agent.last_score = sortino_reward
ENV.update_state(date)
print(ENV.state, ENV.state.shape, ENV.prev_state, ENV.prev_state.shape)
#print(context.num_trials, sortino_reward)
context.num_trials += 1
if ENV.prev_state.shape[0] == 4:
context.agent.remember(ENV.prev_state, context.action, sortino_reward, ENV.state, False)
new_action = context.agent.act(ENV.state)
context.Factor_weights = ENV.step(new_action)
context.action = new_action
if context.num_trials > BATCH_SIZE:
print('replaying')
context.agent.replay(BATCH_SIZE)
context.num_trials = 0
context.run_pipeline = False
def zero_one_scale(array):
return (array - np.nanmin(array))/(np.nanmax(array) - np.nanmin(array))
def compute(today, symbols):
#for verification
tickers = [symbol.symbol for symbol in symbols]
values = np.array(utils.get_fundamentals(today, 'VALUE', tickers))
#print(values)
#Scaling
values = zero_one_scale(values)
out = pd.Series(values, symbols)
return out
value_factor = compute(date, context.universe).to_frame()
context.output = pipeline_output('my_pipeline')
context.output['momentum_score'] = zero_one_scale(context.output['momentum_score'])
context.output = context.output.join(value_factor).dropna()
# Rank each column of pipeline output (higher rank is better). Then create composite score based on weighted average of factor ranks
individual_ranks = context.output.rank()
individual_ranks *= context.Factor_weights
ranks = individual_ranks.sum(axis=1).dropna().sort_values() + 1
ranks = ranks.dropna()
number_shorts = int(SHORTS_PERCENTILE*len(ranks))
number_longs = int(LONGS_PERCENTILE*len(ranks))
if number_shorts == 1 or number_longs == 1:
number_shorts = number_longs = 0
if (number_shorts + number_longs) > len(ranks):
ratio = float(number_longs)/number_shorts
number_longs = int(ratio*len(ranks)) - 1
number_shorts = len(ranks) - number_longs
shorts = 1.0 / ranks.head(number_shorts)
shorts /= sum(shorts)
shorts *= -1
longs = ranks.tail(number_longs)
longs /= sum(longs)
if ALLOW_SHORTS:
context.weights = shorts.append(longs)
else:
context.weights = longs
context.weights = context.weights.dropna()
# log.info(context.weights)
#==================UTILS==========================
def cancel_open_orders(context, data):
for stock in get_open_orders():
for order in get_open_orders(stock):
cancel_order(order)
def close_old_positions(context, data):
to_be_closed = pd.Series()
for stock in context.portfolio.positions:
if stock not in context.weights:
to_be_closed.set_value(stock, 0.0)
context.weights = to_be_closed.append(context.weights)
#===================FACTORS=========================
DB_FACTORS_USED = ['Shares_Outstanding', 'EBITDA', 'Short term debt', 'Long Term Debt', 'Cash and Cash Equivalents',
'Total Assets', 'Total Liabilities', 'Free Cash Flow']
class MomentumFactor(CustomFactor):
"""
Uses Bollinger Bands and RSI measures to determine whether or not a stock
should be bought (return 1), sold (return -1), or if there is no signal
(return 0). For a signal, both metrics have to indicate the same signal
(e.g., price below lower Bollinger Band and RSI below RSI_LOWER)
"""
inputs = [USEquityPricing.close]
window_length = MOMENTUM_LOOKBACK+10
def compute(self, today, asset_ids, out, close):
diffs = np.diff(close, axis=0)
ups = nanmean(np.clip(diffs, 0, np.inf), axis=0)
downs = abs(nanmean(np.clip(diffs, -np.inf, 0), axis=0))
rsi = np.zeros(len(out))
evaluate(
"100 - (100 / (1 + (ups / downs)))",
local_dict={'ups': ups, 'downs': downs},
global_dict={},
out=rsi,
)
difference = BBSTD * nanstd(close, axis=0)
middle = nanmean(close, axis=0)
upper = middle + difference
lower = middle - difference
for i in range(len(out)):
out[i] = 0
if rsi[i] < RSI_LOWER:
out[i] += RSI_LOWER / rsi[i]
elif rsi[i] > RSI_UPPER:
out[i] -= rsi[i] / RSI_UPPER
prices = close[:, i]
if prices[-1] < lower[i]:
out[i] += lower[i] / prices[-1]
elif prices[-1] > upper[i]:
out[i] -= prices[-1] / upper[i]
if TREND_FOLLOW:
out[i] *= -1
#============================Pipeline Stuff=============================
def make_pipeline(context):
screen = StaticAssets(context.universe)
momentum_factor = MomentumFactor()
pipe = Pipeline(
columns = {
'momentum_score': momentum_factor,
}
)
pipe.set_screen(screen)
return pipe
def prime_pipeline(context, data):
context.weeks_since_rebalance += 1
if context.weeks_since_rebalance >= REBALANCE_PERIOD:
context.run_pipeline = True
context.weeks_since_rebalance = 0
|
[
"austenzhu@berkeley.edu"
] |
austenzhu@berkeley.edu
|
f3f440672d73ebb13d5e82898fe0999869f4f736
|
c9cffaf983e1ba30e5b1520c1d75d9b08bd018d5
|
/bcidTsFixer.py
|
8fd7142cd26bdb251382a245851a076fd6f2981d
|
[
"MIT"
] |
permissive
|
biocodellc/geome-db
|
39713a274cf16c8140a9eb849e282caf0060d1df
|
62d9f994f2f80823dd8b8b2afd2e6ab2f57f54ac
|
refs/heads/master
| 2023-01-22T18:14:26.456380
| 2023-01-16T20:21:30
| 2023-01-16T20:21:30
| 82,691,229
| 3
| 0
| null | 2021-07-08T17:40:28
| 2017-02-21T14:49:15
|
Java
|
UTF-8
|
Python
| false
| false
| 1,705
|
py
|
#!/usr/bin/python
import sys, getopt, csv
def createBcidTsUpdateStatements(inputFile, outputFile, resourceType):
f = open(inputFile, 'r')
out_f = open(outputFile, 'w')
reader = csv.reader(f)
headers = next(reader, None)
idIndex = headers.index('_id')
tsIndex = headers.index('_created')
ercWhatIndex = headers.index('erc.what')
dcTypeIndex = headers.index('dc.type')
for row in reader:
stmt = ''
if resourceType:
if row[ercWhatIndex] == resourceType or row[dcTypeIndex] == resourceType:
stmt = "update bcids set ts = FROM_UNIXTIME('" + row[tsIndex] + "') where binary identifier = '" + row[idIndex] + "' and resourceType = '" + resourceType + "';\n"
else:
stmt = "update bcids set ts = '" + row[tsIndex] + "' where binary identifier = '" + row[idIndex] + "';\n"
if stmt:
out_f.write(stmt)
def main(argv):
inputfile = ''
outputfile = ''
resourceType = None
try:
opts, args = getopt.getopt(argv, "hi:o:", ["ifile=", "ofile=", "resourceType="])
except getopt.GetoptError:
print('test.py -i <inputfile> -o <outputfile> -r <resourceType>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('test.py -i <inputfile> -o <outputfile> -r <resourceType>')
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
elif opt in ("-r", "--resourceType"):
resourceType = arg
createBcidTsUpdateStatements(inputfile, outputfile, resourceType)
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"rj@rjewing.com"
] |
rj@rjewing.com
|
7b7afb4c5fcd5aacdd37975f825646cab4d19e80
|
ed722df964bb2dd7bba2c021486769e4984a2486
|
/work/Clashes/test_collecting_overlap_data.py
|
ebfb8dd8b91c4d097e84c1c7fbf4746efe460fe5
|
[
"MIT"
] |
permissive
|
youdar/work
|
a89e2b75c14e11266378873d6a39d60536573e0a
|
d5f32015e7be1d58ede255eeadbd1d12acb3f270
|
refs/heads/master
| 2021-01-02T08:51:11.142504
| 2016-01-09T08:30:49
| 2016-01-09T08:30:49
| 15,275,201
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,061
|
py
|
from __future__ import division
from collecting_overlap_data import Collecting_overlaps_data
import unittest
import sys
import os
csv_data = '''\
PDB ID,Macro mol. overlaps,Sym. overlaps,All overlaps,Macro mol. overlaps per 1000 atoms,Sym. overlaps per 1000 atoms,All overlaps per 1000 atoms,year.,experiment type
# some remarks
1a18,29,5,35,13.9,2.4,16.5,1997,X-RAY DIFFRACTION
xxxx,9,15,85,17.22,0,0,2001,NMR
yyyy,-1,-1,-1,-1,2.4,16.5,2010,X-RAY DIFFRACTION
'''
class Collect_data(unittest.TestCase):
def setUp(self):
""" set up paths """
self.curret_path = os.getcwd()
self.file_to_delete = []
if sys.platform.startswith('win'):
self.test_folder = r'C:\Phenix\Dev\Work\work\Clashes\junk'
else:
self.test_folder = '/net/cci-filer2/raid1/home/youval/Work/work/Clashes/junk'
os.chdir(self.test_folder)
def test_read_csv_file(self):
print sys._getframe().f_code.co_name
fn = 'test_file_csv_1.txt'
self.file_to_delete.append(fn)
open(fn,'w').write(csv_data)
obj = Collecting_overlaps_data()
obj.data_file_name = os.path.join(self.test_folder,fn)
self.assertEqual(len(obj.read_csv_data()),3)
def test_writing_data_files(self):
print sys._getframe().f_code.co_name
# create log files
data = ['1a18,29,5,35,13.9,2.4,16.5,1997,X-RAY DIFFRACTION',
'xxxx,9,15,85,17.22,0,0,2001,NMR',
'yyyy,-1,-1,-1,-1,2.4,16.5,2010,X-RAY DIFFRACTION']
for d in data:
fn = 'log_' + d[:4] + '.pdb'
open(fn,'w').write(d)
self.file_to_delete.append(fn)
#
obj = Collecting_overlaps_data()
self.assertFalse(os.path.isfile(obj.clean_dict_file_name))
self.assertFalse(os.path.isfile(obj.clean_data_file_name))
obj.queue_data_path = self.test_folder
obj.get_test_data()
self.file_to_delete.append('test_data.txt')
self.file_to_delete.append('test_clean_dict')
self.file_to_delete.append('test_clean_data')
self.assertTrue(os.path.isfile(obj.clean_dict_file_name))
self.assertTrue(os.path.isfile(obj.clean_data_file_name))
#
obj2 = Collecting_overlaps_data()
obj2.queue_data_path = self.test_folder
obj2.get_test_data()
self.assertTrue(len(obj2.data),3)
self.assertTrue(len(obj2.clean_data),2)
def tearDown(self):
""" delete files created in during testing"""
if self.file_to_delete:
for fn in self.file_to_delete:
if os.path.isfile(fn): os.remove(fn)
os.chdir(self.curret_path)
def run_selected_tests():
""" Run selected tests
1) List in "tests" the names of the particular test you want to run
2) Comment out unittest.main()
3) Un-comment unittest.TextTestRunner().run(run_selected_tests())
"""
tests = ['test_writing_data_files']
suite = unittest.TestSuite(map(Collect_data, tests))
return suite
if __name__ == '__main__':
try:
pdb_dir = os.environ["PDB_MIRROR_PDB"]
except KeyError:
pdb_dir = ''
# use for individual tests
# unittest.TextTestRunner().run(run_selected_tests())
# Use to run all tests
unittest.main()
|
[
"youval@gmail.com"
] |
youval@gmail.com
|
c77ec8fe4c44783410602adc3c02d0a84db8aa19
|
3798b3d5dc3c2e4f5d7456b7e46af17e1a371741
|
/largest_subarray_sum.py
|
0a86bb2863deda96424f32a65b15c05fe0f36ec0
|
[] |
no_license
|
divya814/Hackerrank
|
f677a42a5c8faf8f1641db8a6266dfe0c3909afb
|
8283e2d0a65063956ceac41eb1e665dd7ab9393f
|
refs/heads/main
| 2023-04-22T07:44:04.785564
| 2021-05-08T16:54:26
| 2021-05-08T16:54:26
| 316,585,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 843
|
py
|
def maxSubArraySum(a,size):
max_so_far = a[0]
max_ending_here = 0
for i in range(0, size):
max_ending_here = max_ending_here + a[i]
if max_ending_here < 0:
max_ending_here = 0
# Do not compare for all elements. Compare only
# when max_ending_here > 0
elif (max_so_far < max_ending_here):
max_so_far = max_ending_here
return max_so_far
# OR
def maxSubArraySum(a,size):
max_so_far =a[0]
curr_max = a[0]
for i in range(1,size):
curr_max = max(a[i], curr_max + a[i])
max_so_far = max(max_so_far,curr_max)
return max_so_far
# Driver function to check the above function
a = [-2, -3, 4, -1, -2, 1, 5, -3]
print"Maximum contiguous sum is" , maxSubArraySum(a,len(a))
|
[
"noreply@github.com"
] |
divya814.noreply@github.com
|
c1884d4ac7ddf4bc39cd8cb34d8e3a929e0d66cc
|
5d570acc907ca6275b841f1d86de46829b1bdebd
|
/duobei_sdk/exceptions.py
|
c4af77f27c1b3b439962699777136abc1971dad3
|
[
"MIT"
] |
permissive
|
duoduo369/duobei_sdk
|
d5fbc27540116c85e17c25b2d5c98571ed3e604b
|
cdffdae1a7248669a06891474e129c2555ce3ea3
|
refs/heads/master
| 2022-07-24T11:44:57.801809
| 2019-03-02T03:00:35
| 2019-03-02T03:00:35
| 172,877,619
| 0
| 0
|
MIT
| 2019-10-22T00:03:20
| 2019-02-27T08:44:51
|
Python
|
UTF-8
|
Python
| false
| false
| 202
|
py
|
# -*- coding: utf-8 -*-
class DuobeiSDKException(Exception):
pass
class DuobeiSDKInvalidParamException(DuobeiSDKException):
pass
class DuobeiSDKServerException(DuobeiSDKException):
pass
|
[
"duoduo3369@gmail.com"
] |
duoduo3369@gmail.com
|
b1e35cff67071273274158b67e34969a2796b4b1
|
86934d22619aabb41da309a63638781ec4b31512
|
/module2/fork_demo.py
|
95acfa9042b22d7bee5c8bc42ff9781c2d3376f1
|
[] |
no_license
|
yanzv/python-pentesters
|
3986ed87aea72580b201c98c4a13c5f21d6ca585
|
1fda3bb1298aaa540face932b9a0a0e93f970e75
|
refs/heads/master
| 2022-12-30T11:52:25.290322
| 2020-09-23T11:40:57
| 2020-09-23T11:40:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 574
|
py
|
#!/usr/bin/env python3
import os
def child_process():
print(f'I am the child process and PID is {os.getpid()}')
print('child is exiting')
def parent_process():
print(f'I am the parent process with PID : {os.getpid()}')
child_pid = os.fork()
print('here')
if child_pid == 0:
# we are inside the child process
child_process()
else:
# we are inside the parent process
print('we are inside the parent process')
print(f'Our child has the PID: {child_pid}')
while True:
pass
parent_process()
|
[
"yan.zverev@twosixlabs.com"
] |
yan.zverev@twosixlabs.com
|
d42e0583769db834a0be290c9d0fb655a1d72e33
|
831a7ab57d30f41677e5b6333ed9422f8e418056
|
/stats_recorder.py
|
6557c442f45a28311d710b96b4d8ba53036b8eab
|
[] |
no_license
|
Eric-D-Stevens/multi_GPU_DataParallel_Tasks
|
9e1302ca5bf037940fe2f993ffde9f853ebe28cc
|
0e4807dda350c2c4f0936491c2f556b1b0680dd5
|
refs/heads/master
| 2021-07-25T14:53:22.485333
| 2021-05-08T03:33:21
| 2021-05-08T03:33:21
| 248,920,958
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,281
|
py
|
from train_conv import train_conv
from train_linear import train_linear
import json
def get_stats(training_function, epochs:int, dev_ids:list, batch_sizes:list):
stats = {}
print(training_function, dev_ids, batch_sizes)
for i in range(len(dev_ids)):
cur_dev_ids = dev_ids[:i+1]
print('\tCur Devs:', cur_dev_ids)
stats[len(cur_dev_ids)] = {}
for batch_size in batch_sizes:
print('\t\tBatch Size:', batch_size)
stats[len(cur_dev_ids)][batch_size] = training_function(epochs, batch_size, cur_dev_ids)
print(stats[len(cur_dev_ids)][batch_size])
return stats
epochs = 20
dev_ids = [0,1,2,3]
batch_sizes = [10,20,30,50,70,100,150,200,300,400,500,600]
if __name__ == '__main__':
linear_stats = get_stats(train_linear, epochs=epochs, dev_ids=dev_ids, batch_sizes=batch_sizes)
conv_stats = get_stats(train_conv, epochs=epochs, dev_ids=dev_ids, batch_sizes=batch_sizes)
print(linear_stats)
linear_json = json.dumps(linear_stats, indent=4)
with open('linear.json', 'w') as f:
f.write(linear_json)
print(conv_stats)
conv_json = json.dumps(conv_stats, indent=4)
with open('conv.json', 'w') as f:
f.write(conv_json)
|
[
"noreply@github.com"
] |
Eric-D-Stevens.noreply@github.com
|
775b6d1787b44c591555b4920d8378aa3bbe8a60
|
1fea2f6cdb70f07070cb12108bc44d64ee194287
|
/sistema/pdf/PyPDF2/__init__.py
|
106550e90f02332eb09ce4a55cd37cc271a36c83
|
[] |
no_license
|
JovannyRch/sistema_inventario
|
dcd5c84100ddee1fa9d71c41f69246f31e9186d7
|
0b28285e55f4a0e3a8c828573d179f9cb87dfd3c
|
refs/heads/main
| 2023-06-22T21:28:52.725753
| 2021-07-24T14:22:05
| 2021-07-24T14:22:05
| 388,941,580
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 215
|
py
|
from .pdf import PdfFileReader, PdfFileWriter
from .merger import PdfFileMerger
from .pagerange import PageRange, parse_filename_page_ranges
from ._version import __version__
__all__ = ["pdf", "PdfFileMerger"]
|
[
"jovannyrch@gmail.com"
] |
jovannyrch@gmail.com
|
4e9fa6e4cf9984c883a29ade772f7dcf70428533
|
9b9e854539452755e6860e524fa08b74ec0c7212
|
/venv/bin/pip
|
7a118d6e981afb6be8b9d0d1c62bc3d832f3dcbf
|
[] |
no_license
|
IshaGupta18/Eventers
|
420e9b20088fa721780f662c322e95ba1b9eee2e
|
99e691a73588c7ca42c2144fb7656e79a745f1c2
|
refs/heads/master
| 2022-05-19T18:42:07.772156
| 2020-04-28T20:06:50
| 2020-04-28T20:06:50
| 257,850,773
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 233
|
#!/home/isha/precog/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"ishagupta1828@gmail.com"
] |
ishagupta1828@gmail.com
|
|
656130858e679d0fa9223d240ac10fab10590443
|
b424a13f032d5a607e6df4dd78bc47ad1d06a147
|
/scipy/optimize/tests/test_hungarian.py
|
339e10cc3800845d9fae5ef8673b4d3f970bcd73
|
[] |
no_license
|
EnjoyLifeFund/macSierra-py36-pkgs
|
1e7eeb9b55415da6eb12465d67730d76e9cc619a
|
0ac677fbbfa7b6d8c527fe2c759ba05117b07fd2
|
refs/heads/master
| 2021-01-20T10:23:50.044019
| 2017-09-05T02:53:26
| 2017-09-05T02:53:26
| 90,333,987
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 106
|
py
|
../../../../../../Cellar/scipy/0.19.1_1/lib/python3.6/site-packages/scipy/optimize/tests/test_hungarian.py
|
[
"raliclo@gmail.com"
] |
raliclo@gmail.com
|
ff3053a7a947771b9febb17d1aab1d88e658f525
|
f96a33a826e085d367bead1473a2077294be5c00
|
/Quicksort.py
|
8ad03c0554ef4a651514281fc030d8d9aac28e39
|
[] |
no_license
|
hao6699/sort_python
|
56c5a4df5f3e0cf4f47dfd1ec93363ca873c3265
|
aef363020a7b30f340f2ce8d80728b2bba54afbd
|
refs/heads/master
| 2020-03-31T04:21:58.980353
| 2018-10-19T11:26:11
| 2018-10-19T11:26:11
| 151,902,404
| 0
| 0
| null | 2018-10-07T03:34:24
| 2018-10-07T03:22:46
| null |
UTF-8
|
Python
| false
| false
| 419
|
py
|
A = [1,4,10,14,7,9,3,2,8,16]
def Quicksort(a,i,j):
if i<j:
dp = Partition(a,i,j)
Quicksort(a,i,dp-1)
Quicksort(a,dp+1,j)
def Partition(a,left,right):
temp = a[right]
i = left
j = left-1
while i < right:
if a[i]<temp:
j+=1
a[i],a[j] = a[j],a[i]
i+=1
a[j+1],a[right] = a[right],a[j+1]
return j+1
Quicksort(A,0,len(A)-1)
print(A)
|
[
"noreply@github.com"
] |
hao6699.noreply@github.com
|
7e9fc3608f232ee7c7bf4508343f95abc6d50391
|
f2992a03507a04117d21c3a5c77f75408e1e69ff
|
/app/models/googlecse.py
|
c1507327ddde2f06786443d6d9152c1571a5580e
|
[] |
no_license
|
rmartind/Okhrana
|
bac816530e43389c5661c7363ca0fb112a096f06
|
22dd19f745fe34820df14b655ecda214d3e5cfde
|
refs/heads/master
| 2020-03-25T16:23:16.510612
| 2018-08-09T23:31:40
| 2018-08-09T23:31:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 416
|
py
|
import requests
class googlecse(object):
def __init__(self, key):
self.base = 'https://www.googleapis.com/customsearch/v1/'
self.key = '#AIzaSyB-FwHME13Ss5LWTMmJ5Dq6PJo9ZRpRAAU'
def email(self, email):
print('Google email search')
def domain(self, domain):
"""Discover subdomains and sensitive info
using dorks(i.e inurl:, site:) via seclists
"""
|
[
"rmdavis@buffalo.edu"
] |
rmdavis@buffalo.edu
|
82b440f437db59514435074fd465555ff50caca4
|
043e26c11691e0c471eeefabb6ab8819e9acdca1
|
/Unit_1/transmittingImages.py
|
38808896aac6002621a6e547e331cc78b1ac3533
|
[] |
no_license
|
DrInnovatio/Data-Analysis-and-Visualisation-with-Python
|
5a6a6838e73c734e09be6d9080698735aa89fa2e
|
53fb20b7d92aa9539885fe9232e83254e06894fc
|
refs/heads/master
| 2023-03-11T12:11:45.697770
| 2021-03-03T13:44:09
| 2021-03-03T13:44:09
| 320,941,732
| 0
| 0
| null | 2021-03-03T13:44:09
| 2020-12-12T23:03:30
|
Python
|
UTF-8
|
Python
| false
| false
| 844
|
py
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
# Interpolation
image = cv2.imread('hair.jpg', 0)
plt.imshow(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
plt.show()
expand = cv2.resize(image, None, fx=2.0, fy=2.0, interpolation=cv2.INTER_CUBIC)
plt.imshow(cv2.cvtColor(expand, cv2.COLOR_BGR2RGB))
plt.show()
shrink = cv2.resize(image, None, fx=0.8, fy=0.8, interpolation=cv2.INTER_AREA)
plt.imshow(cv2.cvtColor(shrink, cv2.COLOR_BGR2RGB))
plt.show()
height, width = image.shape[:2]
M = np.float32([[1, 0, 50], [0, 1, 10]])
dst = cv2.warpAffine(image, M, (width, height))
plt.imshow(cv2.cvtColor(dst, cv2.COLOR_BGR2RGB))
plt.show()
# Image Rotation
M_2 = cv2.getRotationMatrix2D((width / 2, height / 2), 90, 0.5)
dst = cv2.warpAffine(image, M_2, (width, height))
plt.imshow(cv2.cvtColor(dst, cv2.COLOR_BGR2RGB))
plt.show()
|
[
"turing1723@gmail.com"
] |
turing1723@gmail.com
|
47639437eaf598e0f0571e7a1279625f12065282
|
7a894308db9182cc07687fceacd4f82dcacb8691
|
/agregationspatiale.py
|
b79ebcbb742cbc4c538e939ac971e8e074cacba1
|
[] |
no_license
|
Julie-Bouchard/Projet
|
539dc91be0d641e87c25fe90c136e8bcb01783fa
|
59bdc55d7b0afc0127c6b16fdf898649fc97b45a
|
refs/heads/master
| 2023-05-03T22:43:11.801503
| 2021-05-20T20:27:47
| 2021-05-20T20:27:47
| 356,306,325
| 0
| 0
| null | 2021-04-14T20:17:58
| 2021-04-09T14:53:25
| null |
UTF-8
|
Python
| false
| false
| 4,510
|
py
|
from operation import Operation
from transformation import Transformation
from donnees import Donnees
from chargementdedonneesjson import ChargementDeDonneesjson
from selectionvariablecsv import SelectionVariablecsv
class AgregationSpatiale_reg_dep(Transformation):
def __init__(self,echelle, Liste_agregat_bis, emplacement_json):
self.echelle=echelle
self.Liste_agregat_bis=Liste_agregat_bis
self.emplacement_json=emplacement_json
def transforme(self,table):
#On réalise un travail préliminaire qui permet d'avoir une dictionnaire qui lie Departement et région
Tableau1=ChargementDeDonneesjson(self.emplacement_json)
Tableau2=ChargementDeDonneesjson.transforme(Tableau1)
res={}
for i in Tableau2["Academie"]:
res[i["Code_Dpt"]] = i["Region"]
#Avant de réaliser l'agrégation on crée une table intermediaire où à la place
#des département on aura le nom des régions associés
#departement1=SelectionVariablecsv(self.table,self.echelle)
#departement2=SelectionVariablecsv.transforme(departement1)
departement=[]
s=table.noms_colonnes.index(self.echelle)
for i in range (0, len(table.lignes)):
departement.append(table.lignes[i][s])
region=[]
for depp in departement:
region.append(res['{}'.format(depp)])
'''
new=[[]*len(self.table.noms_colonnes)]*(len(self.table.lignes)+1)
for i in range (len(self.table.lignes)+1):
print(new)
for j in range(len(self.table.noms_colonnes)):
new[i].append(self.table.lignes[i-1][j])
'''
new=[table.noms_colonnes]
#à remodifier pour copier par element et non par liste
new+=[table.lignes[i] for i in range (0, len(table.lignes))]
'''
new=[[None]*len(self.table.noms_colonnes)]*(len(self.table.lignes)+1)
print(new)
for i in range (0,len(self.table.lignes)):
for j in range (len(self.table.noms_colonnes)):
new[i+1][j]=self.table.lignes[i][j]
#for j in range(len(self.table.noms_colonnes)):
# new[0][j]=self.table.noms_colonnes[j]
print(new)
'''
j=new[0].index(self.echelle)
new[0][j]="region"
for i in range(1,len(table.lignes)+1):
new[i][j]=region[i-1]
data=Donnees(new[0],new[1:])
#Maintenant on crée la table finale où on réalise l'agrégation
Table_agre=Donnees([],[[]])
Table_agre.noms_colonnes=data.noms_colonnes
Table_agre.enlever_ligne(0)
#Deux listes qui seront utile
#Complement est la liste composé des indices des variables qui ne sont pas des agregats
#Present est une liste qui permet de savoir si une region (en + des autres agregats) est bien dans la table final
#Si c'est le cas il suffira d'appliquer la fonction d'agregation aux autres variables
Present=[]
Complement=[]
for k in range(len(data.noms_colonnes)):
if k!=data.noms_colonnes.index("region") and k not in self.Liste_agregat_bis:
Complement.append(k)
for k in range(len(data.lignes)):
L=[data.lignes[k][0]]
for h in range(len(self.Liste_agregat_bis)):
L.append(data.lignes[k][self.Liste_agregat_bis[h]])
if L not in Present :
Table_agre.ajouter_ligne(data.lignes[k])
Present.append(L)
else:
indice=None
#On commence par chercher à quel ligne se trouve L dans la Table_agre
for f in range(len(Table_agre.lignes)):
Verif=[Table_agre.lignes[f][0]]
for q in self.Liste_agregat_bis:
Verif.append(Table_agre.lignes[f][q])
if L==Verif:
indice=f
for i in Complement:
Table_agre.lignes[indice][i]=str(int(Table_agre.lignes[indice][i])+int(data.lignes[k][i]))
return Table_agre
|
[
"noreply@github.com"
] |
Julie-Bouchard.noreply@github.com
|
4e71f4da2940fa657308122b69f4e0dc37b4ab01
|
3e5ecad4d2f681f2f4f749109cc99deea1209ea4
|
/ttffttff/Category2/starter2_answer.py
|
d203f6247ef241b35499fe03df058b488d2ddaa9
|
[] |
no_license
|
SunghoonSeok/Study
|
f41ede390079037b2090e6df20e5fb38f2e59b8f
|
50f02b9c9bac904cd4f6923b41efabe524ff3d8a
|
refs/heads/master
| 2023-06-18T06:47:55.545323
| 2021-07-05T00:47:55
| 2021-07-05T00:47:55
| 324,866,762
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,078
|
py
|
# ======================================================================
# There are 5 questions in this exam with increasing difficulty from 1-5.
# Please note that the weight of the grade for the question is relative
# to its difficulty. So your Category 1 question will score significantly
# less than your Category 5 question.
#
# Don't use lambda layers in your model.
# You do not need them to solve the question.
# Lambda layers are not supported by the grading infrastructure.
#
# You must use the Submit and Test button to submit your model
# at least once in this category before you finally submit your exam,
# otherwise you will score zero for this category.
# ======================================================================
#
# Basic Datasets Question
#
# Create a classifier for the Fashion MNIST dataset
# Note that the test will expect it to classify 10 classes and that the
# input shape should be the native size of the Fashion MNIST dataset which is
# 28x28 monochrome. Do not resize the data. Your input layer should accept
# (28,28) as the input shape only. If you amend this, the tests will fail.
#
import tensorflow as tf
def solution_model():
fashion_mnist = tf.keras.datasets.fashion_mnist
# YOUR CODE HERE
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
x_train = x_train.reshape(x_train.shape[0],x_train.shape[1],x_train.shape[2])/255.
x_test = x_test.reshape(x_test.shape[0],x_test.shape[1],x_test.shape[2])/255.
y_train = y_train.reshape(y_train.shape[0],)
y_test = y_test.reshape(y_test.shape[0],)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv1D, MaxPooling1D, BatchNormalization, Flatten, Dense
model = Sequential()
model.add(Conv1D(64,3,padding='same',activation='relu'))
model.add(BatchNormalization())
model.add(Conv1D(32,3,padding='same',activation='relu'))
model.add(MaxPooling1D(2))
model.add(BatchNormalization())
model.add(Flatten())
model.add(Dense(64,activation='relu'))
model.add(Dense(32,activation='relu'))
model.add(Dense(16,activation='relu'))
model.add(Dense(10,activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy',optimizer='adam',metrics=['acc'])
from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau
es = EarlyStopping(monitor='val_loss', patience=20, mode='min')
lr = ReduceLROnPlateau(monitor='val_loss', patience=10, mode='min')
model.fit(x_train, y_train, batch_size=32, epochs=200, validation_split=0.2, callbacks=[es,lr])
loss, acc = model.evaluate(x_test,y_test, batch_size=32)
print(loss, acc)
return model
# Note that you'll need to save your model as a .h5 like this.
# When you press the Submit and Test button, your saved .h5 model will
# be sent to the testing infrastructure for scoring
# and the score will be returned to you.
if __name__ == '__main__':
model = solution_model()
model.save("c:/study/ttffttff/category2/mymodel.h5")
# 0.45170438289642334 0.9003000259399414
|
[
"76455292+SunghoonSeok@users.noreply.github.com"
] |
76455292+SunghoonSeok@users.noreply.github.com
|
183cf3676dd77ae775779446bac409e5e863fc80
|
5241af5d03d08578949fe5746b222b1635130341
|
/forms/server.py
|
fc77c0a39563be26265391eae3ee8c2fc3518260
|
[] |
no_license
|
LisCoding/Learning-Flask
|
84975d8368eeabcf9d6d8224d2c02a0171bb638f
|
36a90af4200284da422aeff00cfbb74467b778ee
|
refs/heads/master
| 2021-06-23T21:44:50.142142
| 2017-08-04T20:09:13
| 2017-08-04T20:09:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 312
|
py
|
from flask import Flask, render_template, request, redirect
app = Flask(__name__)
@app.route('/')
def index():
return render_template("index.html")
@app.route("/process", methods=["POST"])
def process():
name = request.form["name"]
print "hello " + name
return redirect("/")
app.run(debug=True)
|
[
"cardozoliseth@gmail.com"
] |
cardozoliseth@gmail.com
|
e0d8b67b989ef724ac22cd07382eb1aacf3727a7
|
bc90d29138d8a1ca73ff45c7617c41fd1b355e9f
|
/institutepro/institutepro/urls.py
|
0f8ce5c5ce803eb1aa12dea4aa91b01640472bd2
|
[] |
no_license
|
RavulaKartheek/RemoteRepository
|
46a1d698cb00fd8074aae393c27b68450000d320
|
e474978f1cd1398e6de05d0bda9b3c2a73cc4ff6
|
refs/heads/main
| 2023-02-23T03:52:01.812135
| 2021-01-24T07:40:41
| 2021-01-24T07:50:48
| 332,165,993
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 944
|
py
|
"""institutepro URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from instituteapp import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.home_page, name='home'),
path('contact/', views.contact_page, name='contact'),
path('gallery/', views.gallery_page, name='gallery')
]
|
[
"interest.repetition@gmail.com"
] |
interest.repetition@gmail.com
|
d2cf16c8751a9eeea9e69af61794985f478ba576
|
b36d5380cffab55f1226531f5836458cf37b4601
|
/attic/wpfrontman/wp_frontman/management/commands/wpf_fixdb.py
|
99488a48ab2b9778d5db73114bae6a943a9790b4
|
[
"BSD-3-Clause"
] |
permissive
|
ludoo/wpkit
|
f859d1c22ffbbc9450dbd0861e08394e6aa97fc6
|
0447d941a438e143b0e51b5e73418a0206832823
|
refs/heads/master
| 2020-05-18T13:50:16.434623
| 2015-02-15T17:18:31
| 2015-02-15T17:18:31
| 30,834,235
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,328
|
py
|
from optparse import make_option
from MySQLdb import OperationalError
from django.conf import settings
from django.db import connection, DatabaseError
from django.core.management.base import BaseCommand, CommandError
from wp_frontman.lib.introspection import MySQLIntrospection
from wp_frontman.blog import Blog, DB_PREFIX
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
"--force", action="store_true", dest="force", default=False,
help="apply or revert changes instead of simply showing what would be done"
),
make_option(
"--revert", action="store_true", dest="revert", default=False,
help="revert any change previously applied"
),
make_option(
"--myisam", action="store_true", dest="myisam", default=False,
help="convert tables to MyISAM when reverting"
),
)
def handle(self, *args, **opts):
db = MySQLIntrospection(connection.settings_dict['NAME'], connection.cursor())
self.cursor = db.cursor
Blog.default_active()
self.dummy = not opts['force']
self.revert = opts['revert']
self._message("%s changes to tables in database %s (dummy %s)" % ('Applying' if not self.revert else 'Reverting', db.db, self.dummy), True, '=')
self.justify = 50 #max(len(t) for t in db.tables.keys()) + 1
if not self.revert:
self._innodb(db)
self._user_tables(db)
for b in Blog.site.blogs:
self._message("Blog id %s" % b)
print
Blog.factory(b)
self._posts_tables(db)
self._terms_table(db)
self._term_taxonomy_tables(db)
self._comments_tables(db)
else:
# reverse order
for b in Blog.site.blogs:
self._message("Blog id %s" % b)
print
Blog.factory(b)
self._comments_tables(db)
self._term_taxonomy_tables(db)
self._terms_table(db)
self._posts_tables(db)
self._user_tables(db)
if opts['myisam']:
self._innodb(db)
connection._commit()
def _user_tables(self, db):
u = db.tables['%susers' % DB_PREFIX]
um = db.tables['%susermeta' % DB_PREFIX]
self._message("Checking %s table" % um.name)
if not self.revert:
self._check_default_null(um, 'user_id')
self._check_foreign_key(um, 'user_id', '%s_ibfk1' % um.name, u.name, 'ID', fix_values='delete')
else:
self._drop_foreign_key(um, 'user_id', '%s_ibfk1' % um.name, u.name, 'ID', fix_values='delete')
self._remove_default_null(um, 'user_id')
print
def _comments_tables(self, db):
u = db.tables['%susers' % DB_PREFIX]
p = db.tables['%sposts' % Blog.get_active().db_prefix]
comments = db.tables['%scomments' % Blog.get_active().db_prefix]
cm = db.tables['%scommentmeta' % Blog.get_active().db_prefix]
if not self.revert:
self._message("Checking %s table" % comments.name)
self._check_default_null(comments, 'user_id', nullable=True)
self._check_foreign_key(comments, 'user_id', '%s_ibfk1' % comments.name, u.name, 'ID', 'cascade', 'set null', fix_values='update')
self._check_default_null(comments, 'comment_parent', nullable=True)
self._check_foreign_key(comments, 'comment_parent', '%s_ibfk2' % comments.name, comments.name, 'comment_ID', 'cascade', 'set null', fix_values='update')
self._check_trigger(
comments, "%s_bi" % comments.name,
"create trigger %s_bi before insert on %s for each row begin if new.comment_parent = '0' then set new.comment_parent = NULL; end if; if new.user_id = '0' then set new.user_id = NULL; end if; end" % (comments.name, comments.name)
)
self._check_default_null(comments, 'comment_post_ID')
self._check_orphaned_fk(comments, 'comment_post_ID', p, 'ID')
self._check_foreign_key(comments, 'comment_post_ID', '%s_ibfk3' % comments.name, p.name, 'ID', fix_values='delete')
print
self._message("Checking %s table" % cm.name)
self._check_default_null(cm, 'comment_id')
self._check_orphaned_fk(cm, 'comment_id', comments, 'comment_ID')
self._check_foreign_key(cm, 'comment_id', '%s_ibfk1' % cm.name, comments.name, 'comment_ID', fix_values='delete')
else:
self._message("Checking %s table" % cm.name)
self._drop_foreign_key(cm, 'comment_id', '%s_ibfk1' % cm.name, comments.name, 'comment_ID', fix_values='delete')
self._remove_default_null(cm, 'comment_id')
print
self._message("Checking %s table" % comments.name)
self._drop_foreign_key(comments, 'comment_post_ID', '%s_ibfk3' % comments.name, p.name, 'ID', fix_values='delete')
self._drop_foreign_key(comments, 'comment_parent', '%s_ibfk2' % comments.name, comments.name, 'comment_ID', 'cascade', 'set null', fix_values='update')
self._drop_foreign_key(comments, 'user_id', '%s_ibfk1' % comments.name, u.name, 'ID', 'cascade', 'set null', fix_values='update')
self._drop_trigger(comments, "%s_bi" % comments.name)
self._remove_default_null(comments, 'comment_post_ID')
self._remove_default_null(comments, 'comment_parent', nullable=True)
self._remove_default_null(comments, 'user_id', nullable=True)
print
def _term_taxonomy_tables(self, db):
terms = db.tables['%sterms' % Blog.get_active().db_prefix]
tt = db.tables['%sterm_taxonomy' % Blog.get_active().db_prefix]
self._message("Checking %s table" % tt.name)
if not self.revert:
self._check_default_null(tt, 'term_id')
fk = tt.field_foreign_keys.get('term_id')
if not fk:
print ("%s.%s orphaned" % (tt.name, 'term_id')).ljust(self.justify),
self._execute(
"delete from %s where term_id not in (select term_id from %s)" % (tt.name, terms.name),
"%(num)s removed"
)
self._check_foreign_key(tt, 'term_id', '%s_ibfk1' % tt.name, terms.name, 'term_id')
self._check_default_null(tt, 'parent', nullable=True)
self._check_foreign_key(tt, 'parent', '%s_ibfk2' % tt.name, terms.name, 'term_id', fix_values='update')
self._check_trigger(
tt, "%s_bi" % tt.name,
"create trigger %s_bi before insert on %s for each row begin if new.parent = '0' then set new.parent = NULL; end if; end" % (
tt.name, tt.name
)
)
else:
self._drop_foreign_key(tt, 'parent', '%s_ibfk2' % tt.name, terms.name, 'term_id', fix_values='update')
self._drop_foreign_key(tt, 'term_id', '%s_ibfk1' % tt.name, terms.name, 'term_id')
self._drop_trigger(tt, "%s_bi" % tt.name)
self._remove_default_null(tt, 'parent', nullable=True)
self._remove_default_null(tt, 'term_id')
print
def _terms_table(self, db):
terms = db.tables['%sterms' % Blog.get_active().db_prefix]
self._message("Checking %s table" % terms.name)
print ("%s.%s index" % (terms.name, 'term_order')).ljust(self.justify),
if not self.revert:
if 'term_order' in terms.fields:
idx = terms.field_indexes.get('term_order')
if not idx:
self._execute(
"alter table %s add index term_order (term_order)" % terms.name,
"added"
)
else:
print "not found, nothing to do"
print
else:
idx = terms.field_indexes.get('term_order')
if idx:
print "found, will not remove"
else:
print "not found, will not add"
print
def _posts_tables(self, db):
p = db.tables['%sposts' % Blog.get_active().db_prefix]
pm = db.tables['%spostmeta' % Blog.get_active().db_prefix]
users_name = '%susers' % DB_PREFIX
if not self.revert:
self._message("Checking %s table" % p.name)
self._check_default_null(p, 'post_author')
self._check_default_null(p, 'post_parent', nullable=True)
self._check_foreign_key(p, 'post_author', '%s_ibfk1' % p.name, users_name, 'ID', fix_values='update')
self._check_foreign_key(p, 'post_parent', '%s_ibfk2' % p.name, p.name, 'ID', 'set NULL', 'set NULL', fix_values='update')
self._check_trigger(
p, "%s_bi" % p.name,
"create trigger %s_bi before insert on %s for each row begin if new.post_parent = '0' then set new.post_parent = NULL; end if; end" % (
p.name, p.name
)
)
self._check_trigger(
p, "%s_bu" % p.name,
"create trigger %s_bu before update on %s for each row begin if new.post_parent = '0' then set new.post_parent = NULL; end if; end" % (
p.name, p.name
)
)
print
self._message("Checking %s table" % pm.name)
self._check_default_null(pm, 'post_id')
self._check_foreign_key(pm, 'post_id', '%s_ibfk1' % pm.name, p.name, 'ID', fix_values='delete')
print
else:
self._message("Checking %s table" % pm.name)
self._drop_foreign_key(pm, 'post_id', '%s_ibfk1' % pm.name, p.name, 'ID', fix_values='delete')
self._remove_default_null(pm, 'post_id')
print
self._message("Checking %s table" % p.name)
self._drop_foreign_key(p, 'post_author', '%s_ibfk1' % p.name, users_name, 'ID', fix_values='update')
self._drop_foreign_key(p, 'post_parent', '%s_ibfk2' % p.name, p.name, 'ID', 'set NULL', 'set NULL', fix_values='update')
self._drop_trigger(p, "%s_bi" % p.name)
self._drop_trigger(p, "%s_bu" % p.name)
self._remove_default_null(p, 'post_author')
self._remove_default_null(p, 'post_parent', nullable=True)
print
def _drop_trigger(self, table, trigger_name):
print ("%s trigger" % trigger_name).ljust(self.justify),
trigger = table.triggers.get(trigger_name)
if trigger is None:
print "ok (no trigger %s)" % trigger_name
else:
self._execute("drop trigger %s" % trigger_name, "dropped")
def _check_trigger(self, table, trigger_name, q):
print ("%s trigger" % trigger_name).ljust(self.justify),
trigger = table.triggers.get(trigger_name)
if trigger is None:
self._execute(q, "created")
else:
print "already defined (%s %s)" % (trigger['action_timing'].lower(), trigger['event_manipulation'].lower())
def _remove_default_null(self, table, column_name, datatype="bigint unsigned", nullable=False):
print ("%s.%s default" % (table.name, column_name)).ljust(self.justify),
field = table.fields[column_name]
if field['column_default'] is not None or field['is_nullable'] != nullable:
print "ok (%s)" % table.fields[column_name]['column_default']
else:
self._execute(
"alter table %s change %s %s %s default '0'" % (
table.name, column_name, column_name, datatype
),
"restore default value"
)
def _check_orphaned_fk(self, source_table, source_field, dest_table, dest_field, delete=True):
if self.revert:
return
print ("%s.%s orphaned FKs" % (source_table.name, source_field)).ljust(self.justify),
if delete:
self._execute(
"delete from %s where %s not in (select distinct %s from %s)" % (
source_table.name, source_field, dest_field, dest_table.name
),
"removed %(num)s orphaned fk values"
)
else:
self._execute(
"update %s set %s=NULL where %s not in (select distinct %s from %s)" % (
source_table.name, source_field, source_field, dest_field, dest_table.name
),
"changed %(num)s orphaned fk values to NULL"
)
def _check_default_null(self, table, column_name, datatype="bigint unsigned", nullable=False):
print ("%s.%s default" % (table.name, column_name)).ljust(self.justify),
field = table.fields[column_name]
if field['column_default'] is not None or field['is_nullable'] != nullable:
self._execute(
"alter table %s change %s %s %s %s" % (
table.name, column_name, column_name, datatype,
"not null" if not nullable else "default NULL"
),
"remove default value"
)
else:
print "ok (%s)" % table.fields[column_name]['column_default']
def _drop_foreign_key(self, table, column_name, fk_name, dest_tn, dest_cn, update='cascade', delete='cascade', fix_values=None):
fk = table.field_foreign_keys.get(column_name)
if fk and fix_values:
self._fix_old_defaults(table, column_name, old='NULL', new='0', update=fix_values=='update')
print ("%s.%s fk" % (table.name, column_name)).ljust(self.justify),
if fk:
self._execute(
"alter table %s drop foreign key %s" % (table.name, fk_name),
"dropped (%s)" % fk_name
)
else:
print "ok (no foreign key %s)" % fk_name
def _check_foreign_key(self, table, column_name, fk_name, dest_tn, dest_cn, update='cascade', delete='cascade', fix_values=None):
fk = table.field_foreign_keys.get(column_name)
if not fk and fix_values:
self._fix_old_defaults(table, column_name, update=fix_values=='update')
print ("%s.%s fk" % (table.name, column_name)).ljust(self.justify),
if fk:
print "ok (%s)" % ", ".join(v['constraint_name'] for v in fk)
else:
self._execute(
"alter table %s add constraint %s foreign key (%s) references %s (%s) on update %s on delete %s" % (
table.name, fk_name, column_name,
dest_tn, dest_cn,
update, delete
),
"added (%s)" % fk_name
)
def _fix_old_defaults(self, table, column_name, old=0, new='NULL', update=True):
print ("%s.%s default" % (table.name, column_name)).ljust(self.justify),
if update:
self._execute(
"update %s set %s=%s where %s=%s" % (
table.name, column_name, new, column_name, old
),
"changed %%(num)s old default values to %s" % new
)
else:
self._execute(
"delete from %s where %s=%s" % (
table.name, column_name, old
),
"removed %(num)s old default values"
)
def _innodb(self, db):
self._message("Checking for InnoDB tables")
for t in db.tables.values():
if not t.name.startswith(DB_PREFIX):
continue
print t.name.ljust(self.justify),
innodb = (t.engine.lower() == 'innodb')
if self.revert:
if innodb:
self._execute(
"alter table %s engine MyISAM" % t.name,
"change from %s to MyISAM" % t.engine
)
else:
print 'ok (%s)' % t.engine
elif innodb:
print 'ok (%s)' % t.engine
else:
self._execute(
"alter table %s engine innodb" % t.name,
"change from %s to InnoDB" % t.engine
)
if not self.dummy:
db.reset()
print
def _message(self, m, double=False, line_char='-'):
if double:
print
print m
print len(m) * line_char
if double:
print
def _execute(self, q, message):
if self.dummy:
print "/* start */ %s /* end */" % q
return
try:
res = self.cursor.execute(q)
except (DatabaseError, OperationalError), e:
connection._rollback()
raise CommandError("Cannot execute statement %s error %s" % (q, e))
if message is not None:
print message % dict(num=res)
#print " " * self.justify,
#print "/* start */ %s /* end */" % q
return res
|
[
"ludo@qix.it"
] |
ludo@qix.it
|
ba23764aca34e8d36e34ddfc4a9a2783a0ce0e69
|
ff6f60d02ed8d024f7b2db5c9eb4b1196ebf166b
|
/mysite/blog2/views.py
|
1ac554cf7587771bcbecbe82b9610dffa2c47921
|
[] |
no_license
|
cekong/learnit
|
43b707e347ff552754b6592e01dd106c98cd0cc5
|
b4111d6fee95960f7b7ca5421b7159cb6122ad2a
|
refs/heads/master
| 2020-03-25T13:53:37.848843
| 2019-08-29T06:46:48
| 2019-08-29T06:46:48
| 143,848,485
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,364
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
from . import models
# Create your views here.
def hello(request):
# return HttpResponse('<html>hello wb</html>')
articles=models.Article.objects.all()
return render(request,'blog2/index1.html',{'articles': articles})
def article_page(request,article_id):
article_id=int(article_id)
article=models.Article.objects.get(pk=article_id)
return render(request,'blog2/article_page.html',{'article': article})
def edit_page(request,article_id):
if str(article_id)=='0':
return render(request, 'blog2/edit_page.html')
article = models.Article.objects.get(pk=article_id)
return render(request, 'blog2/edit_page.html',{'article': article})
def edit_action(request):
title=request.POST.get('title','TITLE')
content=request.POST.get('content','CONTENT')
article_id=request.POST.get('article_id','0')
if article_id=='0':
models.Article.objects.create(title=title,content=content)
articles=models.Article.objects.all()
return render(request,'blog2/index1.html',{'articles': articles})
else:
article = models.Article.objects.get(pk=article_id)
article.title=title
article.content=content
article.save()
return render(request, 'blog2/article_page.html', {'article': article})
|
[
"noreply@github.com"
] |
cekong.noreply@github.com
|
7b2104662ff47a3d681d40dcbd56236af3110c9a
|
04ffa72c844d7cfd67ee812689590d4baf99545f
|
/TimeSeries/main.py
|
b2795333b3b67a3a820d16378f18284849c638fb
|
[] |
no_license
|
ericdfournier/bokehviz
|
63580056f102fbef1cdbc7dbb490de7d4b3c22c0
|
ed4f3d330e78c0324e59eaf59d2e6a7ee59bac6e
|
refs/heads/master
| 2021-06-14T02:30:09.829807
| 2017-02-13T19:05:20
| 2017-02-13T19:05:20
| 74,779,502
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,546
|
py
|
''' Create a simple stocks correlation dashboard.
Choose stocks to compare in the drop down widgets, and make selections
on the plots to update the summary and histograms accordingly.
.. note::
Running this example requires downloading sample data. See
the included `README`_ for more information.
Use the ``bokeh serve`` command to run the example by executing:
bokeh serve stocks
at your command prompt. Then navigate to the URL
http://localhost:5006/stocks
.. _README: https://github.com/bokeh/bokeh/blob/master/examples/app/stocks/README.md
'''
try:
from functools import lru_cache
except ImportError:
# Python 2 does stdlib does not have lru_cache so let's just
# create a dummy decorator to avoid crashing
print ("WARNING: Cache for this example is available on Python 3 only.")
def lru_cache():
def dec(f):
def _(*args, **kws):
return f(*args, **kws)
return _
return dec
from os.path import dirname, join
import pandas as pd
from bokeh.io import curdoc
from bokeh.layouts import row, column
from bokeh.models import ColumnDataSource
from bokeh.models.widgets import PreText, Select
from bokeh.plotting import figure
DATA_DIR = join(dirname(__file__), 'daily')
DEFAULT_TICKERS = ['AAPL', 'GOOG', 'INTC', 'BRCM', 'YHOO']
def nix(val, lst):
return [x for x in lst if x != val]
@lru_cache()
def load_ticker(ticker):
fname = join(DATA_DIR, 'table_%s.csv' % ticker.lower())
data = pd.read_csv(fname, header=None, parse_dates=['date'],
names=['date', 'foo', 'o', 'h', 'l', 'c', 'v'])
data = data.set_index('date')
return pd.DataFrame({ticker: data.c, ticker+'_returns': data.c.diff()})
@lru_cache()
def get_data(t1, t2):
df1 = load_ticker(t1)
df2 = load_ticker(t2)
data = pd.concat([df1, df2], axis=1)
data = data.dropna()
data['t1'] = data[t1]
data['t2'] = data[t2]
data['t1_returns'] = data[t1+'_returns']
data['t2_returns'] = data[t2+'_returns']
return data
# set up widgets
stats = PreText(text='', width=500)
ticker1 = Select(value='AAPL', options=nix('GOOG', DEFAULT_TICKERS))
ticker2 = Select(value='GOOG', options=nix('AAPL', DEFAULT_TICKERS))
# set up plots
source = ColumnDataSource(data=dict(date=[], t1=[], t2=[], t1_returns=[], t2_returns=[]))
source_static = ColumnDataSource(data=dict(date=[], t1=[], t2=[], t1_returns=[], t2_returns=[]))
tools = 'pan,wheel_zoom,xbox_select,reset'
corr = figure(plot_width=350, plot_height=350,
tools='pan,wheel_zoom,box_select,reset')
corr.circle('t1_returns', 't2_returns', size=2, source=source,
selection_color="orange", alpha=0.6, nonselection_alpha=0.1, selection_alpha=0.4)
ts1 = figure(plot_width=900, plot_height=200, tools=tools, x_axis_type='datetime', active_drag="xbox_select")
ts1.line('date', 't1', source=source_static)
ts1.circle('date', 't1', size=1, source=source, color=None, selection_color="orange")
ts2 = figure(plot_width=900, plot_height=200, tools=tools, x_axis_type='datetime', active_drag="xbox_select")
ts2.x_range = ts1.x_range
ts2.line('date', 't2', source=source_static)
ts2.circle('date', 't2', size=1, source=source, color=None, selection_color="orange")
# set up callbacks
def ticker1_change(attrname, old, new):
ticker2.options = nix(new, DEFAULT_TICKERS)
update()
def ticker2_change(attrname, old, new):
ticker1.options = nix(new, DEFAULT_TICKERS)
update()
def update(selected=None):
t1, t2 = ticker1.value, ticker2.value
data = get_data(t1, t2)
source.data = source.from_df(data[['t1', 't2', 't1_returns', 't2_returns']])
source_static.data = source.data
update_stats(data, t1, t2)
corr.title.text = '%s returns vs. %s returns' % (t1, t2)
ts1.title.text, ts2.title.text = t1, t2
def update_stats(data, t1, t2):
stats.text = str(data[[t1, t2, t1+'_returns', t2+'_returns']].describe())
ticker1.on_change('value', ticker1_change)
ticker2.on_change('value', ticker2_change)
def selection_change(attrname, old, new):
t1, t2 = ticker1.value, ticker2.value
data = get_data(t1, t2)
selected = source.selected['1d']['indices']
if selected:
data = data.iloc[selected, :]
update_stats(data, t1, t2)
source.on_change('selected', selection_change)
# set up layout
widgets = column(ticker1, ticker2, stats)
main_row = row(corr, widgets)
series = column(ts1, ts2)
layout = column(main_row, series)
# initialize
update()
curdoc().add_root(layout)
curdoc().title = "Stocks"
|
[
"me@ericdfournier.com"
] |
me@ericdfournier.com
|
ec4ddab77e779d3e3484437c20276a64c43f7995
|
172063f6830d5a3b4db8df433f8041648c19e1b6
|
/7. Reverse Integer.py
|
198a4a02f71be2aad93e7fa11c4ead80e93771e4
|
[] |
no_license
|
HangZhongZH/LeetCodeAlgorithms
|
a1f4e9b48d5e1cc2e63b5fa4d4f48c80702ae847
|
bf12ca629868f053658c8ed4cf6fe8bf79806cf0
|
refs/heads/master
| 2022-01-24T12:05:13.920190
| 2019-08-20T23:55:29
| 2019-08-20T23:55:29
| 197,986,883
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 880
|
py
|
# 7. Reverse Integer
class Solution(object):
def reverse(self, x):
"""
:type x: int
:rtype: int
"""
x_str = str(x)
x_list = list(x_str)
reverse_list = []
for i in range(len(x_list)):
reverse_list.append(x_list.pop())
listLength = len(reverse_list)
for i in range(len(reverse_list)):
if i != listLength - 1:
if reverse_list[0] == '0':
reverse_list = reverse_list[1: ]
elif i == listLength - 1:
reverse_list == 0
else:
break
if reverse_list[-1] == '-':
reverse_list = list('-') + reverse_list[: -1]
reverse_num = int(''.join(reverse_list))
if reverse_num < -2**31 or reverse_num > 2**31 - 1:
reverse_num = 0
return reverse_num
|
[
"noreply@github.com"
] |
HangZhongZH.noreply@github.com
|
601ef03bcb0dc802900e3cff5546aaaa9f4225d6
|
6fde5ad1212df526e84e8dfd85d3c54a7e48c843
|
/general/views.py
|
6b0e218e5655cad75c59c5d54d492ca8ebade39f
|
[] |
no_license
|
aynulislam/SNS_API
|
6ed73e93726c2de4509be82741f568e3104f4042
|
cd7a608c90593b029d2c98d61a7c70205d3727b6
|
refs/heads/master
| 2022-03-05T04:54:45.863387
| 2019-11-27T06:49:29
| 2019-11-27T06:49:29
| 223,138,312
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 701
|
py
|
from django.contrib.auth.models import User
from django.http import JsonResponse
from rest_framework.views import APIView
from .models import SnAction
from .serializers import ReactSerializers
from rest_framework.response import Response
class ActionView(APIView):
@staticmethod
def get(request):
react = SnAction.objects.all().filter(action_type=1)
serializer = ReactSerializers(react, many=True)
return Response(serializer.data)
class SearchView(APIView):
@staticmethod
def get(request, key):
print("hello")
search = User.objects.filter(username__icontains=key).values('id', 'username')
return JsonResponse({"data": list(search)})
|
[
"noreply@github.com"
] |
aynulislam.noreply@github.com
|
dd97a8fb61d29ac6802bfb439f2ef333e9910f92
|
b1303152c3977a22ff9a0192c0c32310e65a6d77
|
/python/725.split-linked-list-in-parts.py
|
bc110b0a0434d89ff7f0ea300c178d465ed4835b
|
[
"Apache-2.0"
] |
permissive
|
stavanmehta/leetcode
|
1b8da1c2bfacaa76ddfb96b8dbce03bf08c54c27
|
1224e43ce29430c840e65daae3b343182e24709c
|
refs/heads/master
| 2021-07-15T16:02:16.107962
| 2021-06-24T05:39:14
| 2021-06-24T05:39:14
| 201,658,706
| 0
| 0
|
Apache-2.0
| 2021-06-24T05:39:15
| 2019-08-10T16:59:32
|
Java
|
UTF-8
|
Python
| false
| false
| 234
|
py
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def splitListToParts(self, root: ListNode, k: int) -> List[ListNode]:
|
[
"noreply@github.com"
] |
stavanmehta.noreply@github.com
|
1b84f710500099ce648274233cb7fb1c83c80ed9
|
481510c904d3ad0027e54a064d1f8bcb7630018f
|
/asm.py
|
54b9f24899d58d451f3e9b0e869adbeeaba5ea48
|
[] |
no_license
|
corywalker/cpu-homebrew
|
4b6ad3eb24a19986781fbc78403ec93c363c63d9
|
0bcd14f0f37c6bc224d384a1729337497ca11c7a
|
refs/heads/master
| 2020-05-20T06:00:53.919965
| 2012-06-21T03:36:20
| 2012-06-21T03:36:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,184
|
py
|
#!/usr/bin/env python
ENV = globals().copy() # Clean user environment
COMMANDS = [] # Result buffer
OP_SHIFT = 12
SLOT1_SHIFT = OP_SHIFT - 4
SLOT2_SHIFT = SLOT1_SHIFT - 4
from inspect import getouterframes, currentframe
def here():
'''Get file and line number in source file'''
try:
return getouterframes(currentframe())[3][1:3]
except:
return "???", 0
def out(type, file, line, msg):
'''Output message'''
print "%s:%d: %s: %s" % (file, line, type, msg)
def error(file, line, msg):
'''Print error message'''
out("error", file, line, msg)
def warn(file, line, msg):
'''Print warning message'''
out("warning", file, line, msg)
class ASM:
'''Base ASM instruction'''
def __init__(self):
self.file, self.line = here()
COMMANDS.append(self)
class lri(ASM):
'''lri instruction'''
def __init__(self, dest, imm):
ASM.__init__(self)
self.code = 1
self.dest = dest
if imm >= (1 << 8): # Check that imm is valid
warn(self.file, self.line, "0x%X too big, will truncate" % imm)
addr &= ((1 << 8) - 1) # Mask all bits above 8
self.imm = imm
def genbits(self):
return (self.code << OP_SHIFT) | \
(self.imm << SLOT2_SHIFT) | \
(self.dest)
class ALU3(ASM):
'''ALU instruction with 3 operands'''
def __init__(self, src1, src2, dest):
ASM.__init__(self)
self.src1 = src1
self.src2 = src2
self.dest = dest
def genbits(self):
return (self.code << OP_SHIFT) | \
(self.src1 << SLOT1_SHIFT) | \
(self.src2 << SLOT2_SHIFT) | \
(self.dest)
class add(ALU3):
'''`add' instruction'''
code = 4
class sub(ALU3):
'''`add' instruction'''
code = 5
class andd(ALU3):
'''`add' instruction'''
code = 6
class orr(ALU3):
'''`add' instruction'''
code = 7
class xor(ALU3):
'''`add' instruction'''
code = 8
class nor(ALU3):
'''`add' instruction'''
code = 9
class sll(ALU3):
'''`add' instruction'''
code = 10
class srl(ALU3):
'''`add' instruction'''
code = 11
'''
class sub(ALU3):
code = 1
class move(ASM):
def __init__(self, src, dest):
ASM.__init__(self)
self.src = src
self.dest = dest
def genbits(self):
return (2 << OP_SHIFT) | \
(self.src << SLOT1_SHIFT) | \
(self.dest << SLOT2_SHIFT)
class MemOp(ASM):
def __init__(self, reg, addr):
ASM.__init__(self)
self.reg = reg
if addr >= (1 << 16): # Check that address is valid
warn(self.file, self.line, "0x%X too big, will truncate" % addr)
addr &= ((1 << 16) - 1) # Mask all bits above 16
self.addr = addr
def genbits(self):
return (self.code << OP_SHIFT) | \
(self.reg << SLOT1_SHIFT) | \
self.addr
class load(MemOp):
code = 3
class store(MemOp):
code = 4
class jmp(ASM):
def __init__(self, dest):
ASM.__init__(self)
self.dest = dest
def genbits(self):
return (5 << OP_SHIFT) | self.dest
'''
def label(name):
'''Setting a label'''
ENV[name] = len(COMMANDS)
# Setup user environment
# Add registers
for i in range(16):
ENV["r%d" % i] = i
# Add operators
#for op in (add, sub, move, load, store, label, jmp):
for op in (add, lri, label):
ENV[op.__name__] = op
def parse(fname):
'''Parse file '''
global COMMANDS
COMMANDS = []
execfile(fname, ENV, {})
return COMMANDS
from os.path import splitext, isfile
from array import array
from sys import exc_info, byteorder
from optparse import OptionParser
parser = OptionParser(usage="usage: %prog [options] FILE", version="0.1")
parser.add_option("-o", "--output", help="output file", dest="outfile",
default="")
parser.add_option("-g", help="create debug file", dest="debug",
action="store_true", default=0)
opts, args = parser.parse_args()
if len(args) != 1:
parser.error("wrong number of arguments") # Will exit
infile = args[0]
if not isfile(infile):
raise SystemExit("can't find %s" % infile)
try:
commands = parse(infile)
except SyntaxError, e:
error(e.filename, e.lineno, e.msg)
raise SystemExit(1)
except Exception, e:
# Get last traceback and print it
# Most of this code is taken from traceback.py:print_exception
etype, value, tb = exc_info()
while tb: # Find last traceback
last = tb
tb = tb.tb_next
lineno = last.tb_lineno # Line number
f = last.tb_frame
co = f.f_code
error(co.co_filename, lineno, e)
etype = value = tb = None # Release objects (not sure this is required ...)
raise SystemExit(1)
a = array("H")
for cmd in commands:
a.append(cmd.genbits())
if byteorder == "little":
a.byteswap()
if not opts.outfile:
opts.outfile = splitext(infile)[0] + ".o"
open(opts.outfile, "wb").write(a.tostring())
if opts.debug: # Emit debug information
dbg = open(splitext(infile)[0] + ".dbg", "w")
for cmd in commands:
print >> dbg, "%s:%s" % (cmd.file, cmd.line)
|
[
"cory@cory-VirtualBox.(none)"
] |
cory@cory-VirtualBox.(none)
|
06312eaa4fcce65f02c55cba06bee1bb4e07a6f1
|
f13acd0d707ea9ab0d2f2f010717b35adcee142f
|
/ABC/abc151-abc200/abc159/c.py
|
ff7a9181d2d4bd0618d47a01e26cfe67aed60412
|
[
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
KATO-Hiro/AtCoder
|
126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7
|
bf43320bc1af606bfbd23c610b3432cddd1806b9
|
refs/heads/master
| 2023-08-18T20:06:42.876863
| 2023-08-17T23:45:21
| 2023-08-17T23:45:21
| 121,067,516
| 4
| 0
|
CC0-1.0
| 2023-09-14T21:59:38
| 2018-02-11T00:32:45
|
Python
|
UTF-8
|
Python
| false
| false
| 183
|
py
|
# -*- coding: utf-8 -*-
def main():
import sys
input = sys.stdin.readline
l = int(input())
print((l / 3) ** 3)
if __name__ == '__main__':
main()
|
[
"k.hiro1818@gmail.com"
] |
k.hiro1818@gmail.com
|
95156244bec32278de1ed247ee32efde2d250668
|
12b5d9bd39cd364d8fd9f0af68610b8cbfbb434b
|
/trajopt/varunani/benchmark_time.py
|
c12c250311861e02405e47b3182f4042d432838c
|
[
"MIT"
] |
permissive
|
grossmanlev/trajopt
|
a8a2856948fc9460001ac59ccd175ea5f6d9ca0b
|
3a01b26e24efe8f365a323292007e72f7ab147c2
|
refs/heads/master
| 2021-07-09T02:46:19.205237
| 2021-01-07T05:30:57
| 2021-01-07T05:30:57
| 223,448,806
| 0
| 0
|
MIT
| 2019-11-22T17:02:17
| 2019-11-22T17:02:16
| null |
UTF-8
|
Python
| false
| false
| 2,698
|
py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import time as timer
import numpy as np
import pickle
import argparse
import os
from datetime import datetime
class Model(nn.Module):
def __init__(self,layers,nonlin=nn.Tanh()):
super(Model, self).__init__()
layerslist = []
for i in range(len(layers)-1):
layerslist.append(nn.Linear(layers[i],layers[i+1]))
layerslist.append(nonlin)
self.netwk = nn.Sequential(*layerslist)
def forward(self,x):
return self.netwk(x)
def bmark_time(model,x,y,fwd=True,ups=False):
if fwd: # for forward
rn = timer.time()
outp = model(x)
runtime = timer.time()-rn
else: # for backward
model.zero_grad()
outp = model(x)
loss = (outp-y)**2 # assuming least squares loss
loss = loss.sum()
if ups:
optimizer = optim.SGD(model.netwk.parameters(),lr=0.001)
rn = timer.time()
loss.backward()
if ups:
optimizer.step()
runtime = timer.time()-rn
return runtime
parser = argparse.ArgumentParser(description='Benchmark network speed.')
parser.add_argument('-l','--layers', nargs='+', type=int, required=True) # layer sizes (including input and output)
parser.add_argument('-bs','--batch_size', type=int, required=True) # size of batch in forward pass
parser.add_argument('-i','--iterations', type=int, required=True) # number of iterations to average out over
parser.add_argument('-p','--passtype', type=str, required=True) # options: fwd, bwd, both
parser.add_argument('-u','--update_bwd',type=bool,required=False) # options: True, False
parser.add_argument('-a','--act_func',type=str,required=True) # options: tanh, ReLU, softmax
# Example command: python3 benchmark_time.py -l 14 128 128 1 -bs 32 -i 100 -p both -u False -a tanh
args = parser.parse_args()
layers = args.layers
bs = args.batch_size
iters = args.iterations
ups = False
if args.update_bwd:
ups = args.update_bwd
if args.act_func == 'tanh':
act_func = nn.Tanh()
elif args.act_func == 'ReLU':
act_func = nn.ReLU()
elif args.act_func == 'softmax':
act_func = nn.Softmax()
print("NEW RUN:", layers, bs, iters, ups, act_func)
model = Model(layers,nonlin=act_func)
runtimefwd = 0
runtimebwd = 0
for j in range(iters):
X = torch.randn(bs,layers[0])
Y = torch.randn(bs)
if (args.passtype == 'fwd') or (args.passtype == 'both'):
runtimefwd += bmark_time(model,X,Y,fwd=True,ups=ups)
if (args.passtype == 'bwd') or (args.passtype == 'both'):
runtimebwd += bmark_time(model,X,Y,fwd=False,ups=ups)
if (args.passtype == 'fwd') or (args.passtype == 'both'):
print('Forward Time:',runtimefwd/iters)
if (args.passtype == 'bwd') or (args.passtype == 'both'):
print('Backward Time:',runtimebwd/iters)
|
[
"anirudhtx@gmail.com"
] |
anirudhtx@gmail.com
|
9df4aaa8bc755703b78e83ad0c1f73ac55aef433
|
12f65a81036fa7a6aeaa5c742558f9b4a03b5fa1
|
/mmelemental/models/proc/base.py
|
0b142f0c34f4f72f2f692f30cfc788818c895003
|
[
"BSD-3-Clause"
] |
permissive
|
pk-organics/MMElemental
|
bef8b24d39c5d4d52f140eb53090f5270a1d4ea2
|
840e8fcafe2a07d4729cb0507ab1f30dc9c60541
|
refs/heads/main
| 2023-04-03T04:17:36.083582
| 2021-04-09T21:10:53
| 2021-04-09T21:10:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,329
|
py
|
from mmelemental.models.base import ProtoModel
from mmelemental.models.molecule.mm_mol import Molecule
from mmelemental.models.collect.sm_ensem import Ensemble
from mmelemental.models.collect.mm_traj import Trajectory
from mmelemental.models.forcefield import ForceField
from pydantic import Field, constr
from typing import List, Union, Dict, Optional, Any
__all__ = ["ProcInput", "ProcOutput"]
mmschema_proc_input_default = "mmschema_proc_input"
class ProcInput(ProtoModel):
""" Basic input model for procedures."""
# Generic fields
engine: Optional[str] = Field(
None,
description="Engine name to use in the procedure e.g. OpenMM.",
)
engine_version: Optional[str] = Field(
None, description="Supported engine version. e.g. >= 3.4.0."
)
component: Optional[str] = Field(
None,
description="Component name to use in the procedure e.g. mmic_openmm.",
)
schema_name: constr(
strip_whitespace=True, regex="^(mmschema_proc_input)$"
) = Field( # type: ignore
mmschema_proc_input_default,
description=(
f"The MMSchema specification to which this model conforms. Explicitly fixed as {mmschema_proc_input_default}."
),
)
schema_version: int = Field( # type: ignore
0,
description="The version number of ``schema_name`` to which this model conforms.",
)
kwargs: Optional[Dict[str, Any]] = Field(
None, description="Additional keyword arguments to pass to the constructors."
)
class ProcOutput(ProtoModel):
""" Basic output model for procedures."""
component: str = Field(
None,
description="Component name used in the procedure e.g. mmic_openmm.",
)
engine: Optional[str] = Field(
None,
description="Engine name used in the procedure e.g. OpenMM.",
)
engine_version: Optional[str] = Field(
None, description="Engine version used in the procedure e.g. >= 3.4.0."
)
warnings: Optional[List[str]] = Field(
None, description="Warning messages generated from the conversion."
)
stdout: str = Field(None, description="Standard output.")
stderr: Optional[str] = Field(None, description="Standard error.")
log: Optional[str] = Field(None, description="Logging output.")
|
[
"andrew.gaam@gmail.com"
] |
andrew.gaam@gmail.com
|
74a230381b0da6b448c62ed8ef18f28429ebfb68
|
3083da01ff08588ba9b07a6c977767097baed469
|
/shell_finished.py
|
39e3dc08f90a1bf2ddea4f99b0975e043e0b72ea
|
[] |
no_license
|
denipash/Python-Intro
|
ee7f68bdefe24068ad858fc66d00c9dd9e959e80
|
2b938e70f64fbf46cea04bd8a8ed632e3f4404f3
|
refs/heads/main
| 2023-06-17T16:41:38.936265
| 2021-07-19T19:44:15
| 2021-07-19T19:44:15
| 387,561,599
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,048
|
py
|
#
# File for working with filesystem shell methods
#
import os
from os import path
import shutil
from shutil import make_archive
from zipfile import ZipFile
def main():
# make a duplicate of an existing file
if path.exists("textfile.txt"):
# get the path to the file in the current directory
src = path.realpath("textfile.txt");
# # let's make a backup copy by appending "bak" to the name
dst = src + ".bak"
# # now use the shell to make a copy of the file
shutil.copy(src,dst)
# # copy over the permissions, modification times, and other info
shutil.copystat(src, dst)
# # rename the original file
os.rename("textfile.txt", "newfile.txt")
# now put things into a ZIP archive
root_dir,tail = path.split(src)
shutil.make_archive("archive", "zip", root_dir)
# more fine-grained control over ZIP files
with ZipFile("testzip.zip","w") as newzip:
newzip.write("newfile.txt")
newzip.write("textfile.txt.bak")
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
denipash.noreply@github.com
|
4507a8da5fc803c57a35b67d8fbc507596406516
|
a1dc5354362a7357078b282bcf1f768bb0d297b4
|
/cut_optimizer/algorithms/euler_path.py
|
5e44102b7ae7382d5e31c2b8d878bebd1a373b38
|
[] |
no_license
|
marcinsulikowski/cut-optimizer
|
6fa33b54822c6555ea7c18ac294a21588db2606e
|
fa59ed8df69fc206d1e0080926d560199d9bbc4e
|
refs/heads/master
| 2022-10-20T17:16:22.381089
| 2020-06-15T22:29:54
| 2020-06-15T22:29:54
| 272,199,312
| 1
| 0
| null | 2020-06-15T19:16:15
| 2020-06-14T12:37:51
|
Python
|
UTF-8
|
Python
| false
| false
| 2,111
|
py
|
"""Euler path finding."""
import random
import sys
from typing import List, Tuple
from cut_optimizer.graph import Edge, Graph, Vertex
class NoEulerPathFound(Exception):
"""Raised when no Euler path can be found."""
def euler_path(graph: Graph, start: Vertex) -> List[Edge]:
"""Return an Euler path in the graph starting from `start`.
:raise: NoEulerPathFound if there's no Euler path starting at `start`.
:return: list of edges which form the path.
"""
assert start in graph.vertices
recursion_limit = sys.getrecursionlimit()
sys.setrecursionlimit(len(graph.edges) + recursion_limit)
try:
path, _end = _euler_path_for_connected_component(graph.clone(), start)
return path
finally:
sys.setrecursionlimit(recursion_limit)
def _euler_path_for_connected_component(
graph: Graph, start: Vertex
) -> Tuple[List[Edge], Vertex]:
# We'll maintain an invariant that `result` is a path from `start` to
# `result_end`. If we find a cycle from `start` to `start`, we prepend
# it to `result`. If we find a path from `start` to a dead end somewhere
# else, we'll append it to `result` and move `result_end` to a dead end.
result: List[Edge] = []
result_end = start
while graph.neighbors[start]:
edge = random.choice(list(graph.neighbors[start]))
graph.remove_edge(edge)
path, end = _euler_path_for_connected_component(
graph, edge.other_end(start)
)
if end == start:
# A cycle from start to start - prepend it to the result
result = [edge] + path + result
else:
# A path from start to somewhere else. If `result` already
# ends somewhere other than `start`, it means that the graph
# has more than one vertex of odd degree other than `start`.
# In this case, there's no Euler part starting at `start`.
if result_end != start:
raise NoEulerPathFound(start)
result = result + [edge] + path
result_end = end
return result, result_end
|
[
"marcin.k.sulikowski@gmail.com"
] |
marcin.k.sulikowski@gmail.com
|
758130829ae910e3e28c6f74fab294072d1e441a
|
9baa9f1bedf7bc973f26ab37c9b3046824b80ca7
|
/venv-bck/lib/python2.7/site-packages/click/testing.py
|
b4f9cf19475547e9017fae1f0034f8bb1fc8f5c2
|
[] |
no_license
|
shakthydoss/suriyan
|
58774fc5de1de0a9f9975c2ee3a98900e0a5dff4
|
8e39eb2e65cc6c6551fc165b422b46d598cc54b8
|
refs/heads/master
| 2020-04-12T05:36:59.957153
| 2017-01-08T06:12:13
| 2017-01-08T06:12:13
| 59,631,349
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,003
|
py
|
import contextlib
import shutil
import sys
import tempfile
import os
from ._compat import iteritems, PY2
# If someone wants to vendor click, we want to ensure the
# correct package is discovered. Ideally we could use a
# relative import here but unfortunately Python does not
# support that.
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]
if PY2:
from cStringIO import StringIO
else:
import io
from ._compat import _find_binary_reader
class EchoingStdin(object):
def __init__(self, input, output):
self._input = input
self._output = output
def __getattr__(self, x):
return getattr(self._input, x)
def _echo(self, rv):
self._output.write(rv)
return rv
def read(self, n=-1):
return self._echo(self._input.read(n))
def readline(self, n=-1):
return self._echo(self._input.readline(n))
def readlines(self):
return [self._echo(x) for x in self._input.readlines()]
def __iter__(self):
return iter(self._echo(x) for x in self._input)
def __repr__(self):
return repr(self._input)
def make_input_stream(input, charset):
# Is already an input stream.
if hasattr(input, 'read'):
if PY2:
return input
rv = _find_binary_reader(input)
if rv is not None:
return rv
raise TypeError('Could not find binary reader for input stream.')
if input is None:
input = b''
elif not isinstance(input, bytes):
input = input.encode(charset)
if PY2:
return StringIO(input)
return io.BytesIO(input)
class Result(object):
"""Holds the captured result of an invoked CLI script."""
def __init__(self, runner, output_bytes, exit_code, exception,
exc_info=None):
#: The runner that created the result
self.runner = runner
#: The output as bytes.
self.output_bytes = output_bytes
#: The exit code as integer.
self.exit_code = exit_code
#: The exception that happend if one did.
self.exception = exception
#: The traceback
self.exc_info = exc_info
@property
def output(self):
"""The output as unicode string."""
return self.output_bytes.decode(self.runner.charset, 'replace') \
.replace('\r\n', '\n')
def __repr__(self):
return '<Result %s>' % (
self.exception and repr(self.exception) or 'okay',
)
class CliRunner(object):
"""The CLI runner provides functionality to invoke a Click command line
script for unittesting purposes in a isolated environment. This only
works in single-threaded systems without any concurrency as it changes the
global interpreter state.
:param charset: the character set for the input and output data. This is
UTF-8 by default and should not be changed currently as
the reporting to Click only works in Python 2 properly.
:param env: a dictionary with environment variables for overriding.
:param echo_stdin: if this is set to `True`, then reading from stdin writes
to stdout. This is useful for showing examples in
some circumstances. Note that regular prompts
will automatically echo the input.
"""
def __init__(self, charset=None, env=None, echo_stdin=False):
if charset is None:
charset = 'utf-8'
self.charset = charset
self.env = env or {}
self.echo_stdin = echo_stdin
def get_default_prog_name(self, cli):
"""Given a command object it will return the default program name
for it. The default is the `name` attribute or ``"root"`` if not
set.
"""
return cli.name or 'root'
def make_env(self, overrides=None):
"""Returns the environment overrides for invoking a script."""
rv = dict(self.env)
if overrides:
rv.update(overrides)
return rv
@contextlib.contextmanager
def isolation(self, input=None, env=None, color=False):
"""A context manager that sets up the isolation for invoking of a
command line tool. This sets up stdin with the given input data
and `os.environ` with the overrides from the given dictionary.
This also rebinds some internals in Click to be mocked (like the
prompt functionality).
This is automatically done in the :meth:`invoke` method.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param input: the input stream to put into sys.stdin.
:param env: the environment overrides as dictionary.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
input = make_input_stream(input, self.charset)
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
old_forced_width = clickpkg.formatting.FORCED_WIDTH
clickpkg.formatting.FORCED_WIDTH = 80
env = self.make_env(env)
if PY2:
sys.stdout = sys.stderr = bytes_output = StringIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
else:
bytes_output = io.BytesIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
input = io.TextIOWrapper(input, encoding=self.charset)
sys.stdout = sys.stderr = io.TextIOWrapper(
bytes_output, encoding=self.charset)
sys.stdin = input
def visible_input(prompt=None):
sys.stdout.write(prompt or '')
val = input.readline().rstrip('\r\n')
sys.stdout.write(val + '\n')
sys.stdout.flush()
return val
def hidden_input(prompt=None):
sys.stdout.write((prompt or '') + '\n')
sys.stdout.flush()
return input.readline().rstrip('\r\n')
def _getchar(echo):
char = sys.stdin.read(1)
if echo:
sys.stdout.write(char)
sys.stdout.flush()
return char
default_color = color
def should_strip_ansi(stream=None, color=None):
if color is None:
return not default_color
return not color
old_visible_prompt_func = clickpkg.termui.visible_prompt_func
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func
old__getchar_func = clickpkg.termui._getchar
old_should_strip_ansi = clickpkg.utils.should_strip_ansi
clickpkg.termui.visible_prompt_func = visible_input
clickpkg.termui.hidden_prompt_func = hidden_input
clickpkg.termui._getchar = _getchar
clickpkg.utils.should_strip_ansi = should_strip_ansi
old_env = {}
try:
for key, value in iteritems(env):
old_env[key] = os.environ.get(value)
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
yield bytes_output
finally:
for key, value in iteritems(old_env):
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
sys.stdout = old_stdout
sys.stderr = old_stderr
sys.stdin = old_stdin
clickpkg.termui.visible_prompt_func = old_visible_prompt_func
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func
clickpkg.termui._getchar = old__getchar_func
clickpkg.utils.should_strip_ansi = old_should_strip_ansi
clickpkg.formatting.FORCED_WIDTH = old_forced_width
def invoke(self, cli, args=None, input=None, env=None,
catch_exceptions=True, color=False, **extra):
"""Invokes a command in an isolated environment. The arguments are
forwarded directly to the command line script, the `extra` keyword
arguments are passed to the :meth:`~clickpkg.Command.main` function of
the command.
This returns a :class:`Result` object.
.. versionadded:: 3.0
The ``catch_exceptions`` parameter was added.
.. versionchanged:: 3.0
The result object now has an `exc_info` attribute with the
traceback if available.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param cli: the command to invoke
:param args: the arguments to invoke
:param input: the input data for `sys.stdin`.
:param env: the environment overrides.
:param catch_exceptions: Whether to catch any other exceptions than
``SystemExit``.
:param extra: the keyword arguments to pass to :meth:`main`.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
exc_info = None
with self.isolation(input=input, env=env, color=color) as out:
exception = None
exit_code = 0
try:
cli.main(args=args or (),
prog_name=self.get_default_prog_name(cli), **extra)
except SystemExit as e:
if e.code != 0:
exception = e
exc_info = sys.exc_info()
exit_code = e.code
if not isinstance(exit_code, int):
sys.stdout.write(str(exit_code))
sys.stdout.write('\n')
exit_code = 1
except Exception as e:
if not catch_exceptions:
raise
exception = e
exit_code = -1
exc_info = sys.exc_info()
finally:
sys.stdout.flush()
output = out.getvalue()
return Result(runner=self,
output_bytes=output,
exit_code=exit_code,
exception=exception,
exc_info=exc_info)
@contextlib.contextmanager
def isolated_filesystem(self):
"""A context manager that creates a temporary folder and changes
the current working directory to it for isolated filesystem tests.
"""
cwd = os.getcwd()
t = tempfile.mkdtemp()
os.chdir(t)
try:
yield t
finally:
os.chdir(cwd)
try:
shutil.rmtree(t)
except (OSError, IOError):
pass
|
[
"shakthydoss@gmail.com"
] |
shakthydoss@gmail.com
|
4b93b4750607a1a5feb12f0a41bfe9c767679e8b
|
411a34b1ccab79f040c84b6d8b325c38267c5df2
|
/models/users.py
|
a045ff9a11cb8272ad25cd0d5fbaa996e556f349
|
[] |
no_license
|
thecattest/live-market-back
|
02dda49ac9b81cc31d9c3f9f2701686f8556a40c
|
261f7113577761f041c214abb0ca8de547a2d39b
|
refs/heads/master
| 2023-03-24T05:31:53.976926
| 2021-03-21T08:37:07
| 2021-03-21T08:37:07
| 349,664,753
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,151
|
py
|
import sqlalchemy
from sqlalchemy import orm
from .db_session import SqlAlchemyBase
from sqlalchemy_serializer import SerializerMixin
from flask_login import UserMixin
from hashlib import md5
from werkzeug.security import check_password_hash, generate_password_hash
class User(SqlAlchemyBase, SerializerMixin, UserMixin):
__tablename__ = 'users'
id = sqlalchemy.Column(sqlalchemy.Integer,
primary_key=True, autoincrement=True)
login = sqlalchemy.Column(sqlalchemy.String(100), unique=True, nullable=False)
hashed_password = sqlalchemy.Column(sqlalchemy.String, nullable=True)
twitch_nickname = sqlalchemy.Column(sqlalchemy.String, nullable=False)
products = sqlalchemy.Column(sqlalchemy.JSON, unique=False, nullable=True)
stream_started = sqlalchemy.Column(sqlalchemy.Boolean, default=False, nullable=False)
def set_password(self, password):
self.hashed_password = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.hashed_password, password)
def __repr__(self):
return f"<User {self.id} {self.login}>"
|
[
"vodopyanov999@gmail.com"
] |
vodopyanov999@gmail.com
|
b7cb638e72a0315ea5b16e4cb6f18ecdcb1072f8
|
88fadcb1b8df4907a7bd7e0ad66908b824a8f724
|
/components/webapp/src/network.py
|
febe05c49281de9ec640e5e333be6047646c751e
|
[
"Apache-2.0"
] |
permissive
|
butuzov/kubeflow-pipline-pytorch-tacatron
|
fe65b9838dde4fe1130cae49d87ea0255029664b
|
2800a4b66852a2220ca957e63e91aa7a19ea05df
|
refs/heads/master
| 2020-07-22T14:54:45.257922
| 2019-09-09T07:30:34
| 2019-09-09T07:33:11
| 207,239,109
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,462
|
py
|
#-*- coding: utf-8 -*-
from module import *
from text import symbols
import hyperparams as hp
import random
class Encoder(nn.Module):
"""
Encoder
"""
def __init__(self, embedding_size):
"""
:param embedding_size: dimension of embedding
"""
super(Encoder, self).__init__()
self.embedding_size = embedding_size
self.embed = nn.Embedding(len(symbols), embedding_size)
self.prenet = Prenet(embedding_size, hp.hidden_size * 2, hp.hidden_size)
self.cbhg = CBHG(hp.hidden_size)
def forward(self, input_):
input_ = torch.transpose(self.embed(input_),1,2)
prenet = self.prenet.forward(input_)
memory = self.cbhg.forward(prenet)
return memory
class MelDecoder(nn.Module):
"""
Decoder
"""
def __init__(self):
super(MelDecoder, self).__init__()
self.prenet = Prenet(hp.num_mels, hp.hidden_size * 2, hp.hidden_size)
self.attn_decoder = AttentionDecoder(hp.hidden_size * 2)
def forward(self, decoder_input, memory):
# Initialize hidden state of GRUcells
attn_hidden, gru1_hidden, gru2_hidden = self.attn_decoder.inithidden(decoder_input.size()[0])
outputs = list()
# Training phase
if self.training:
# Prenet
dec_input = self.prenet.forward(decoder_input)
timesteps = dec_input.size()[2] // hp.outputs_per_step
# [GO] Frame
prev_output = dec_input[:, :, 0]
for i in range(timesteps):
prev_output, attn_hidden, gru1_hidden, gru2_hidden = self.attn_decoder.forward(prev_output, memory,
attn_hidden=attn_hidden,
gru1_hidden=gru1_hidden,
gru2_hidden=gru2_hidden)
outputs.append(prev_output)
if random.random() < hp.teacher_forcing_ratio:
# Get spectrum at rth position
prev_output = dec_input[:, :, i * hp.outputs_per_step]
else:
# Get last output
prev_output = prev_output[:, :, -1]
# Concatenate all mel spectrogram
outputs = torch.cat(outputs, 2)
else:
# [GO] Frame
prev_output = decoder_input
for i in range(hp.max_iters):
prev_output = self.prenet.forward(prev_output)
prev_output = prev_output[:,:,0]
prev_output, attn_hidden, gru1_hidden, gru2_hidden = self.attn_decoder.forward(prev_output, memory,
attn_hidden=attn_hidden,
gru1_hidden=gru1_hidden,
gru2_hidden=gru2_hidden)
outputs.append(prev_output)
prev_output = prev_output[:, :, -1].unsqueeze(2)
outputs = torch.cat(outputs, 2)
return outputs
class PostProcessingNet(nn.Module):
"""
Post-processing Network
"""
def __init__(self):
super(PostProcessingNet, self).__init__()
self.postcbhg = CBHG(hp.hidden_size,
K=8,
projection_size=hp.num_mels,
is_post=True)
self.linear = SeqLinear(hp.hidden_size * 2,
hp.num_freq)
def forward(self, input_):
out = self.postcbhg.forward(input_)
out = self.linear.forward(torch.transpose(out,1,2))
return out
class Tacotron(nn.Module):
"""
End-to-end Tacotron Network
"""
def __init__(self):
super(Tacotron, self).__init__()
self.encoder = Encoder(hp.embedding_size)
self.decoder1 = MelDecoder()
self.decoder2 = PostProcessingNet()
def forward(self, characters, mel_input):
memory = self.encoder.forward(characters)
mel_output = self.decoder1.forward(mel_input, memory)
linear_output = self.decoder2.forward(mel_output)
return mel_output, linear_output
|
[
"butuzov@made.ua"
] |
butuzov@made.ua
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.