blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b6d33955bebbc64147d3979731aa39d330a4225c
|
5b3d819753ad2b2b2ec1e7a47a1e8e15ad9f191d
|
/src/classifiers.py
|
810dfde902fa1f2f5817e3c1fc90df978af8cab2
|
[] |
no_license
|
dmelis/interpretwoe
|
517d48f959fb1a9a54619c681a4762fc9282f3be
|
3debdf583ca2771627d390fec6fcfc1b547d4de0
|
refs/heads/master
| 2023-08-31T04:45:04.119795
| 2021-10-20T16:01:51
| 2021-10-20T16:01:51
| 144,336,557
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 664
|
py
|
import sklearn.naive_bayes
import sklearn.ensemble
from sklearn.metrics import accuracy_score,precision_score,recall_score,f1_score
import pickle as pkl
def factory(dataset, model_type, load_trained=False):
if not load_trained:
if dataset == 'boston' and model_type == 1:
classifier = sklearn.naive_bayes.GaussianNB()
if dataset == 'online_news' and model_type == 2:
classifier = sklearn.ensemble.RandomForestClassifier(n_estimators=100)
else:
if dataset == 'online_news' and model_type == 2:
classifier = pkl.load( open('../models/userstudy/news_classifier.pkl', 'rb'))
return classifier
|
[
"dalvmel@mit.edu"
] |
dalvmel@mit.edu
|
49240bb2697840e83be185607f3e0edbe71c7580
|
d40343711cd7af3e466d89785cb0f43f687f58d8
|
/Leetcode/Minimum_Numbers_of_Function_Calls_to_Make.py
|
eb8c37f5d941da5ded624f1cb7e34cbe9336ecda
|
[] |
no_license
|
Ashoksugu7/DSA
|
d0cd8708afa5f259646a028dc907a673ef694acf
|
b3963727d0cbc13e9b642e0a1a141d92d2a485df
|
refs/heads/master
| 2023-02-25T20:39:50.458716
| 2021-01-24T18:01:38
| 2021-01-24T18:01:38
| 281,011,744
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,033
|
py
|
"""
Minimum Numbers of Function Calls to Make Target Array
User Accepted:0
User Tried:0
Total Accepted:0
Total Submissions:0
Difficulty:Medium
Your task is to form an integer array nums from an initial array of zeros arr that is the same size as nums.
Return the minimum number of function calls to make nums from arr.
The answer is guaranteed to fit in a 32-bit signed integer.
Example 1:
Input: nums = [1,5]
Output: 5
Explanation: Increment by 1 (second element): [0, 0] to get [0, 1] (1 operation).
Double all the elements: [0, 1] -> [0, 2] -> [0, 4] (2 operations).
Increment by 1 (both elements) [0, 4] -> [1, 4] -> [1, 5] (2 operations).
Total of operations: 1 + 2 + 2 = 5.
Example 2:
Input: nums = [2,2]
Output: 3
Explanation: Increment by 1 (both elements) [0, 0] -> [0, 1] -> [1, 1] (2 operations).
Double all the elements: [1, 1] -> [2, 2] (1 operation).
Total of operations: 2 + 1 = 3.
Example 3:
Input: nums = [4,2,5]
Output: 6
Explanation: (initial)[0,0,0] -> [1,0,0] -> [1,0,1] -> [2,0,2] -> [2,1,2] -> [4,2,4] -> [4,2,5](nums).
Example 4:
Input: nums = [3,2,2,4]
Output: 7
Example 5:
Input: nums = [2,4,8,16]
Output: 8
Constraints:
1 <= nums.length <= 10^5
0 <= nums[i] <= 10^9
"""
class Solution:
def alleven(self, nums):
temp=True
for i in range(len(nums)):
if nums[i] %2 !=0:
return False
return temp
def minOperations(self, nums):
count=0
n=len(nums)
zero_num=[0]*n
print(nums)
while(zero_num != nums):
for i in range(n):
if nums[i]%2 ==1 and nums[i]!=0 and zero_num != nums:
nums[i]-=1
count+=1
print(nums, count)
if self.alleven(nums) and zero_num != nums:
nums=[ int(i/2) for i in nums]
count+=1
print(nums, count)
return count
obj=Solution()
print(obj.minOperations([2,4,8,16]))
|
[
"realsugu@gmail.com"
] |
realsugu@gmail.com
|
32a3ad79591182c9ba50204847a810b156de646f
|
87914d69a6e13f83cae06e96567b5b99499fd407
|
/gamse/pipelines/common.py
|
dce153f4876ac9e00dfa4e8b65babfe1ea17d05f
|
[
"Apache-2.0"
] |
permissive
|
wangleon/gamse
|
10c38c0bbd44c2342e54e789d5eb7568f7c0c920
|
eda4c82fc304485b08fdb931b3acdbbb75a67e85
|
refs/heads/master
| 2023-06-15T23:05:20.990993
| 2023-06-08T02:46:47
| 2023-06-08T02:46:47
| 113,046,520
| 10
| 2
|
Apache-2.0
| 2023-06-08T02:46:48
| 2017-12-04T13:32:14
|
Python
|
UTF-8
|
Python
| false
| false
| 5,556
|
py
|
import os
import re
import time
import configparser
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as tck
from ..utils.obslog import read_obslog
def load_config(pattern, verbose=True):
"""Load the config file.
Args:
pattern (str):
verbose (bool):
Returns:
config
"""
# load config files
config = configparser.ConfigParser(
inline_comment_prefixes = (';','#'),
interpolation = configparser.ExtendedInterpolation(),
)
# find local config file
for fname in os.listdir(os.curdir):
if re.match(pattern, fname):
config.read(fname)
if verbose:
message = 'Load congfig file: "{}"'.format(fname)
print(message)
break
return config
def load_obslog(pattern, fmt='obslog', verbose=True):
"""Find and read the observing log file.
Args:
pattern (str): Pattern of the filename of observing log.
fmt (str):
verbose (bool):
Returns:
:class:`astropy.io.Table`: Observing log table.
"""
# find observing log in the current workin gdirectory
logname_lst = [fname for fname in os.listdir(os.curdir)
if re.match(pattern, fname)]
if len(logname_lst)==0:
print('No observation log found')
return None
elif len(logname_lst)==1:
select_logname = logname_lst[0]
elif len(logname_lst)>1:
nlog = len(logname_lst)
# maximum length of log filename
maxlen = max([len(logname) for logname in logname_lst])
# maximum length of log number
maxdgt = len(str(nlog))
fmt_string = (' - [{{:{:d}d}}] {{:{:d}s}} '
'Last modified in {{:s}}').format(maxdgt, maxlen)
# build a list of (filename, modified time)
nametime_lst = [(logname, os.path.getmtime(logname))
for logname in logname_lst]
# sort with last modified time
nametime_lst = sorted(nametime_lst, key=lambda v:v[1])
# print lognames one by one
for i, (logname, mtime) in enumerate(nametime_lst):
t = time.localtime(mtime)
time_str = '{0:02d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}'.format(
*t)
print(fmt_string.format(i, logname, time_str))
# repeat the loop until user give a valid logname ID
while(True):
string = input('Select an observing log: ')
if string.isdigit() and int(string) < nlog:
select_logname = nametime_lst[int(string)][0]
break
elif len(string.strip())==0:
print('Warning: no logfile selected')
else:
print('Warning: {} is not a valid log ID'.format(string))
else:
pass
if verbose:
message = 'Load obslog file: "{}"'.format(select_logname)
print(message)
logtable = read_obslog(select_logname, fmt=fmt)
return logtable
def plot_spectra1d():
"""Plot 1d spectra.
"""
config = read_config('')
obslog_file = find_log(os.curdir)
log = read_log(obslog_file)
section = config['data']
midproc = section['midproc']
report = section['report']
steps_string = config['reduction']['steps']
step_lst = steps_string.split(',')
suffix = config[step_lst[-1].strip()]['suffix']
image_path = 'images'
if not os.path.exists(image_path):
os.mkdir(image_path)
color_lst = 'rgbcmyk'
for item in log:
if item.imagetype == 'sci':
filename = os.path.join(midproc, '%s%s.fits'%(item.fileid, suffix))
if not os.path.exists(filename):
continue
data = fits.getdata(filename)
omin = data['order'].min()
omax = data['order'].max()
order_lst = np.arange(omin, omax+1)
for io, order in enumerate(order_lst):
if io%10 == 0:
fig = plt.figure(figsize=(14.14,10), dpi=150)
ax = fig.add_axes([0.055+(io%2)*0.50,
0.06 + (4-int((io%10)/2.))*0.188, 0.43, 0.16])
wavemin, wavemax = 1e9, 0
channels = sorted(np.unique(data['channel']))
for ich, channel in enumerate(channels):
mask1 = (data['channel']==channel)
mask2 = (data['order']==order)
mask = mask1*mask2
if mask.sum()==0:
continue
row = data[mask][0]
wave = row['wavelength']
flux = row['flux']
color = color_lst[ich%7]
ax.plot(wave, flux, color+'-', lw=0.7, alpha=0.7)
wavemin = min(wavemin, wave.min())
wavemax = max(wavemax, wave.max())
ax.set_xlabel(u'Wavelength (\xc5)')
x1, x2 = wavemin, wavemax
y1, y2 = ax.get_ylim()
ax.text(0.97*x1+0.03*x2, 0.8*y2, 'Order %d'%order)
ax.set_xlim(x1, x2)
ax.set_ylim(0, y2)
if io%10 == 9:
fig.savefig(os.path.join(image_path, 'spec_%s_%02d.png'%(item.fileid, int(io/10.))))
plt.close(fig)
fig.savefig(os.path.join(image_path, 'spec_%s_%02d.png'%(item.fileid, int(io/10.))))
plt.close(fig)
|
[
"wang.leon@gmail.com"
] |
wang.leon@gmail.com
|
162ac1a38bcb646310eea02176cfec098ea658c5
|
31beaa6ffbfa80b1dfcc6e863a127d30366c716d
|
/online_trajectory_process/scripts/downsampling_interpolation_process.py
|
e192cc7af9e86ec03fb849e0777ce643de078852
|
[] |
no_license
|
ThanasisTs/trajectory_process_utils
|
26492b3050839cc4968c413e82ea63acaa2efe18
|
6f7c538f75bfac71f74c2e01afb3b082646b7e07
|
refs/heads/master
| 2023-03-11T23:14:41.341566
| 2021-03-03T16:18:13
| 2021-03-03T16:18:13
| 258,867,312
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,776
|
py
|
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Point
from geometry_msgs.msg import PointStamped
from keypoint_3d_matching_msgs.msg import Keypoint3d_list
import math
import numpy as np
from scipy.spatial import distance
x, y, z = [], [], []
xRaw, yRaw, zRaw = [], [], []
xV_tmp, yV_tmp, zV_tmp = [], [], []
pub, pub_stamp = None, None
ds_thres = 0.012
ip_thres = 0.024
start_flag = False
end_flag = False
init_point = True
start_threshold = 24
count = 0
sum_time = 0
num_points = 0
times = []
init_point = True
count_points = []
count_inter = 0
time_debug = {}
timestamp = None
x_init, y_init, z_init = [], [], []
num_points_std, std_threshold = None, None
outlier_dis = None
# Interpolate points in the line segment between p1 and p2
def interpolation(p1, p2, dis, dur):
global timestamp, x, y, z, ds_thres, pub, num_points, count_inter, outlier_dis
num_inter_points = dis//ds_thres
pub_rate = dur/(num_inter_points+1)
# rospy.loginfo("Num of points %d"%num_inter_points)
rospy.loginfo("Duration and number of points and pub rate: %f %d %f"%(dur, num_points, pub_rate))
# rospy.loginfo("Duration and fake Duration %f %f"%(dur, num_points*0.047))
# pub_rate = (num_points+1)*0.047/(num_inter_points)
try:
time_inter = np.linspace(times[-1], timestamp, num_inter_points)
except Exception as e:
rospy.loginfo(e)
for index, i in enumerate(np.linspace(0,1,num_inter_points + 1)):
if i==0 or i==1:
continue
x.append((1-i)*p1[0] + i*p2[0])
y.append((1-i)*p1[1] + i*p2[1])
z.append((1-i)*p1[2] + i*p2[2])
times.append(time_inter[index])
point = PointStamped()
point.point.x = (1-i)*p1[0] + i*p2[0]
point.point.y = (1-i)*p1[1] + i*p2[1]
point.point.z = (1-i)*p1[2] + i*p2[2]
rospy.sleep(pub_rate)
pub.publish(point)
def callback(data):
global timestamp, init_point, times, num_points, sum_time, x, y, z, xRaw, yRaw, zRaw, xV_tmp, yV_tmp, zV_tmp, start_threshold, ds_thres, ip_thres, init_point, end_flag, start_flag, count
global x_init, y_init, z_init, num_points_std, std_threshold
start_time = rospy.get_time()
# Get RWrist keypoint
for i in range(len(data.keypoints)):
if (data.keypoints[i].name == "RWrist"):
x_tmp = data.keypoints[i].points.point.x
y_tmp = data.keypoints[i].points.point.y
z_tmp = data.keypoints[i].points.point.z
timestamp = data.keypoints[i].points.header.stamp.to_sec()
if len(x_init) >= 1 and abs(x_init[-1]-x_tmp) < outlier_dis and abs(y_init[-1]-y_tmp) < outlier_dis and abs(z_init[-1]-z_tmp) < outlier_dis:
x_init.append(x_tmp)
y_init.append(y_tmp)
z_init.append(z_tmp)
elif len(x_init) == 0:
x_init.append(x_tmp)
y_init.append(y_tmp)
z_init.append(z_tmp)
count_points.append(count)
count += 1
# Average the 15 first points to get the first point
# in order to avoid the case where the first point is outlier
if init_point and len(x_init) == 15:
point = PointStamped()
point.point.x = np.mean(x_init)
point.point.y = np.mean(y_init)
point.point.z = np.mean(y_init)
pub.publish(point)
init_point = False
rospy.loginfo("Published initial point")
if not init_point:
if not end_flag:
# Check for outliers or zeros (invalid trajectory points)
if x_tmp != 0 and y_tmp != 0 and z_tmp != 0:
if len(xRaw) == 0 or (len(xRaw) >= 1 and abs(xRaw[-1] - x_tmp) < outlier_dis and abs(yRaw[-1] - y_tmp) < outlier_dis and abs(zRaw[-1] - z_tmp) < outlier_dis):
xRaw.append(x_tmp)
yRaw.append(y_tmp)
zRaw.append(z_tmp)
if len(xV_tmp) == start_threshold:
del xV_tmp[0]
del yV_tmp[0]
del zV_tmp[0]
xV_tmp.append(x_tmp)
yV_tmp.append(y_tmp)
zV_tmp.append(z_tmp)
if len(xV_tmp) >= 2:
std_x = np.std(xV_tmp)
std_y = np.std(yV_tmp)
std_z = np.std(zV_tmp)
if (not start_flag) and (std_x > std_threshold or std_y > std_threshold or std_z > std_threshold):
print("Start movement at sample %d" %count)
start_flag = True
# If motion has started, check if there is a need to downsample
# the points (high points density) or interpolate points (sparse points)
if start_flag:
if len(x) == 0:
x.append(x_tmp)
y.append(y_tmp)
z.append(z_tmp)
times.append(timestamp)
time_debug[count] = timestamp
point = PointStamped()
point.point.x = x_tmp
point.point.y = y_tmp
point.point.z = z_tmp
pub.publish(point)
else:
dis = distance.euclidean(list(zip(x, y, z))[-1], [x_tmp, y_tmp, z_tmp])
if dis > ds_thres:
if dis < ip_thres:
x.append(x_tmp)
y.append(y_tmp)
z.append(z_tmp)
times.append(timestamp)
point = PointStamped()
point.point.x = x_tmp
point.point.y = y_tmp
point.point.z = z_tmp
time_debug[count] = timestamp
pub.publish(point)
else:
interpolation(list(zip(x, y, z))[-1], [x_tmp, y_tmp, z_tmp], dis, timestamp-times[-1])
num_points = 0
end_time = rospy.get_time()
sum_time += end_time - start_time
else:
num_points += 1
# print (num_points)
def movement_detection_node():
global pub, std_threshold, num_points_std, outlier_dis
rospy.init_node("movement_detection_downsampling_node")
num_points_std = rospy.get_param('trajectory_process/num_points_std', 25)
std_threshold = rospy.get_param('trajectory_process/std_threshold', 0.01)
outlier_dis = rospy.get_param("raw_poitns/outlier_dis", 0.1)
pub = rospy.Publisher("trajectory_points", PointStamped, queue_size=10, latch=True)
sub = rospy.Subscriber("transform_topic", Keypoint3d_list, callback)
rospy.spin()
if __name__ == "__main__":
movement_detection_node()
|
[
"thtsitos@gmail.com"
] |
thtsitos@gmail.com
|
55bd083fb1dc1c3a0325dd006dbf44eb622be8ef
|
c6f260cc6aa09647fae97d344a375ddf28bfe839
|
/orgs/views.py
|
54ea027b0465550810960cc318b238412295de86
|
[
"MIT"
] |
permissive
|
jpaav/comm
|
573f36bea67297ab0b1d060947d939acede1c894
|
514d62b8c3ed301dee559538825ad8f253e25fc8
|
refs/heads/master
| 2020-03-19T12:59:48.104353
| 2018-06-28T22:57:42
| 2018-06-28T22:57:42
| 136,554,741
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,114
|
py
|
from django.contrib.auth.models import User
from django.contrib.sites.shortcuts import get_current_site
from django.http import HttpResponse
from django.shortcuts import render, redirect
from django.utils.encoding import force_text, force_bytes
from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode
from comm import settings
from orgs.forms import CreateOrgForm, CreateTagForm, CreateResidentForm, UpdateTagForm, UpdateResidentForm
from orgs.models import Org
from orgs.tokens import join_org_token
from patientlog.models import Log, Tag, Resident
def org(request, org_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
if request.user not in org.members.all():
return render(request, 'accounts/not_authorized.html')
if request.user == org.owner:
uid = urlsafe_base64_encode(force_bytes(org.pk))
token = join_org_token.make_token(org)
return render(request, 'orgs/org.html', {'org': org, 'uid': uid, 'token': token,
'domain': get_current_site(request).domain})
return render(request, 'orgs/org.html', {'org': org})
def create_org(request):
if not request.user.is_authenticated():
return redirect('/login/')
if request.method == 'GET':
form = CreateOrgForm()
else:
form = CreateOrgForm(request.POST)
if form.is_valid():
org = form.save(commit=False)
org.owner = request.user
org.save()
return redirect('/orgs/')
return render(request, 'orgs/create_org.html', {'form': form})
def org_dash(request):
if not request.user.is_authenticated():
return redirect('/login/')
approved_orgs = Org.objects.filter(members=request.user)
unapproved_orgs = Org.objects.filter(unapproved=request.user)
return render(request, 'orgs/org_dash.html', {'approved_orgs': approved_orgs, 'unapproved_orgs': unapproved_orgs})
def get_form_kwargs(self):
kwargs = super(self).get_form_kwargs()
kwargs.update({'org': Org()})
return kwargs
def logs(request, org_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
if request.user not in org.members.all():
return render(request, 'accounts/not_authorized.html')
logs = Log.objects.filter(org=org)
if logs.count() == 1 and settings.SIMPLE_UI:
return redirect('/logs/' + str(logs.first().id))
return render(request, 'orgs/logs.html', {'logs': logs})
def tags(request, org_id):
if not request.user.is_authenticated():
return redirect('/login/')
org = Org.objects.get(pk=org_id)
if not request.user == org.owner:
return render(request, 'accounts/not_authorized.html')
tags = Tag.objects.filter(org=org)
if request.method == 'GET':
create_form = CreateTagForm()
else:
create_form = CreateTagForm(request.POST)
if create_form.is_valid():
tag = create_form.save(commit=False)
tag.org = org
tag.save()
return render(request, 'orgs/tags.html', {'create_form': create_form, 'tags': tags})
def residents(request, org_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
residents = Resident.objects.filter(org=org)
if request.user not in org.members.all():
return render(request, 'accounts/not_authorized.html')
if request.method == 'GET':
create_form = CreateResidentForm(org=org)
else:
create_form = CreateResidentForm(request.POST, org=org)
if create_form.is_valid():
resident = create_form.save(commit=False)
resident.org = org
resident.save()
for advocate in create_form.cleaned_data['advocates']:
resident.advocates.add(advocate)
return redirect('/orgs/' + str(org_id) + '/residents/' + str(resident.id))
return render(request, 'orgs/residents.html', {'create_form': create_form, 'residents': residents})
def join(request, uidb64, token):
if not request.user.is_authenticated():
return redirect('/login/')
# Tries to decode the uid and use it as a key to find a user
try:
uid = force_text(urlsafe_base64_decode(uidb64))
org = Org.objects.get(pk=uid)
# Catches if the activation link is bad
except(TypeError, ValueError, OverflowError, Org.DoesNotExist):
org = None
if org.unapproved.filter(pk=request.user.id).exists():
return HttpResponse('You are already unapproved for this organization.')
if org.members.filter(pk=request.user.id).exists():
return HttpResponse('You are already a member for this organization.')
if org is not None and join_org_token.check_token(org, token):
# Adds current user to org
org.unapproved.add(request.user)
return render(request, 'orgs/join_confirmed.html', {'org_name': org.name})
else:
return HttpResponse('Activation link is invalid!')
def approve(request, org_id, user_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'user'})
if request.user == org.owner:
if org.unapproved.get(pk=user_id) is not None:
org.unapproved.remove(user)
org.members.add(user)
return redirect('/orgs/' + str(org_id))
else:
return render(request, 'accounts/not_authorized.html')
def residents_detail(request, org_id, res_id):
if not request.user.is_authenticated():
return redirect('/login/')
org = Org.objects.get(pk=org_id)
try:
detail = Resident.objects.get(pk=res_id)
except Resident.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'resident'})
residents = Resident.objects.filter(org=org)
if request.method == 'GET':
create_form = CreateResidentForm(org=org)
update_form = UpdateResidentForm(
initial={
'name': detail.name,
'room': detail.room,
'timestamp_admitted': detail.timestamp_admitted,
'timestamp_left': detail.timestamp_left,
'advocates': detail.advocates.all(),
},
org=org
)
else:
create_form = CreateResidentForm(request.POST, org=org)
update_form = UpdateResidentForm(request.POST, org=org)
if update_form.is_valid():
update = update_form.save(commit=False)
detail.name = update['name']
detail.room = update['room']
detail.timestamp_admitted = update['timestamp_admitted']
detail.timestamp_left = update['timestamp_left']
detail.save()
detail.residents.clear()
for advocate in update_form.cleaned_data['advocates']:
detail.advocates.add(advocate)
return redirect('/orgs/' + str(org_id) + '/residents/' + str(res_id))
if create_form.is_valid():
resident = create_form.save(commit=False)
resident.org = org
resident.save()
for advocate in create_form.cleaned_data['advocates']:
resident.advocates.add(advocate)
return redirect('/orgs/' + str(org_id) + '/residents/' + str(res_id))
return render(request, 'orgs/residents.html',
{'create_form': create_form, 'update_form': update_form, 'residents': residents, 'detail': detail})
def residents_delete(request, org_id, res_id):
if not request.user.is_authenticated():
return redirect('/login/')
org = Org.objects.get(pk=org_id)
if not request.user == org.owner:
return render(request, 'accounts/not_authorized.html')
try:
resident = Resident.objects.get(pk=res_id)
resident.delete()
except Resident.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'resident'})
return redirect('/orgs/' + str(org_id) + '/residents/')
def tags_delete(request, org_id, tag_id):
if not request.user.is_authenticated():
return redirect('/login/')
org = Org.objects.get(pk=org_id)
if not request.user == org.owner:
return render(request, 'accounts/not_authorized.html')
try:
tag = Tag.objects.get(pk=tag_id)
tag.delete()
except Tag.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'tag'})
return redirect('/orgs/' + str(org_id) + '/tags/')
def tags_detail(request, org_id, tag_id):
if not request.user.is_authenticated():
return redirect('/login/')
org = Org.objects.get(pk=org_id)
if not request.user == org.owner:
return render(request, 'accounts/not_authorized.html')
try:
detail = Tag.objects.get(pk=tag_id)
except Tag.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'resident'})
tags = Tag.objects.filter(org=org)
if request.method == 'GET':
create_form = CreateTagForm()
update_form = UpdateTagForm(
initial={
'title': detail.title,
'color': detail.color,
'importance': detail.importance,
'should_email': detail.should_email
}
)
else:
create_form = CreateTagForm(request.POST)
update_form = UpdateTagForm(request.POST)
if update_form.is_valid():
update = update_form.save(commit=False)
detail.title = update.title
detail.color = update.color
detail.importance = update.importance
detail.should_email = update.should_email
detail.save()
return redirect('/orgs/' + str(org_id) + '/tags/' + str(tag_id))
if create_form.is_valid():
tag = create_form.save(commit=False)
tag.org = org
tag.save()
return redirect('/orgs/' + str(org_id) + '/tags/' + str(tag_id))
return render(request, 'orgs/tags.html',
{'create_form': create_form, 'update_form': update_form, 'tags': tags, 'detail': detail})
def unapprove(request, org_id, user_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'user'})
if request.user == org.owner:
if org.members.get(pk=user_id) is not None:
org.members.remove(user)
org.unapproved.add(user)
return redirect('/orgs/' + str(org_id))
else:
return render(request, 'accounts/not_authorized.html')
def remove_unapproved(request, org_id, user_id):
if not request.user.is_authenticated():
return redirect('/login/')
try:
org = Org.objects.get(pk=org_id)
except Org.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'org'})
try:
user = User.objects.get(pk=user_id)
except User.DoesNotExist:
return render(request, 'patientlogs/object_does_not_exist.html', {'obj_type': 'user'})
if request.user == org.owner:
if org.unapproved.get(pk=user_id) is not None:
org.unapproved.remove(user)
return redirect('/orgs/' + str(org_id))
else:
return render(request, 'accounts/not_authorized.html')
|
[
"joepaavola@gmail.com"
] |
joepaavola@gmail.com
|
423ad321a753ab4c0c03d697b64e240e9e2d244e
|
d898284595e462f0f06e9439b9cead382497be06
|
/dumpcomputer/demo.py
|
527fd50a1a4dbbb231ab0e8072aa15c98bcec766
|
[] |
no_license
|
yingjianjian/learner
|
d41cb2af76b5ab6973b652421c08f831dfa8431f
|
fc023e0adbabeaff952e521d5e87f46f2e080549
|
refs/heads/master
| 2020-12-30T10:49:28.197998
| 2017-08-01T03:08:11
| 2017-08-01T03:08:11
| 98,859,630
| 0
| 0
| null | 2017-08-01T03:08:11
| 2017-07-31T07:15:50
|
Python
|
UTF-8
|
Python
| false
| false
| 5,209
|
py
|
#!/usr/bin/env python
#1 Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
#1 Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
import base64
from binascii import hexlify
import getpass
import os
import select
import socket
import sys
import time
import traceback
from paramiko.py3compat import input
import paramiko
import interactive
def agent_auth(transport, username):
"""
Attempt to authenticate to the given transport using any of the private
keys available from an SSH agent.
"""
agent = paramiko.Agent()
agent_keys = agent.get_keys()
if len(agent_keys) == 0:
return
for key in agent_keys:
print('Trying ssh-agent key %s' % hexlify(key.get_fingerprint()))
try:
transport.auth_publickey(username, key)
print('... success!')
return
except paramiko.SSHException:
print('... nope.')
def manual_auth(username, hostname,pw):
'''default_auth = 'p'
auth = input('Auth by (p)assword, (r)sa key, or (d)ss key? [%s] ' % default_auth)
if len(auth) == 0:
auth = default_auth
if auth == 'r':
default_path = os.path.join(os.environ['HOME'], '.ssh', 'id_rsa')
path = input('RSA key [%s]: ' % default_path)
if len(path) == 0:
path = default_path
try:
key = paramiko.RSAKey.from_private_key_file(path)
except paramiko.PasswordRequiredException:
password = getpass.getpass('RSA key password: ')
key = paramiko.RSAKey.from_private_key_file(path, password)
t.auth_publickey(username, key)
elif auth == 'd':
default_path = os.path.join(os.environ['HOME'], '.ssh', 'id_dsa')
path = input('DSS key [%s]: ' % default_path)
if len(path) == 0:
path = default_path
try:
key = paramiko.DSSKey.from_private_key_file(path)
except paramiko.PasswordRequiredException:
password = getpass.getpass('DSS key password: ')
key = paramiko.DSSKey.from_private_key_file(path, password)
t.auth_publickey(username, key)
else:
pw = getpass.getpass('Password for %s@%s: ' % (username, hostname))'''
pw=base64.b64decode(pw)
t.auth_password(username, pw)
# setup logging
paramiko.util.log_to_file('demo.log')
username = ''
if len(sys.argv) > 1:
hostname = sys.argv[1]
if hostname.find('@') >= 0:
username, hostname = hostname.split('@')
else:
hostname = raw_input('Hostname: ')
if len(hostname) == 0:
print('*** Hostname required.')
sys.exit(1)
port = 22
if hostname.find(':') >= 0:
hostname, portstr = hostname.split(':')
port = int(portstr)
# now connect
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((hostname, port))
except Exception as e:
print('*** Connect failed: ' + str(e))
traceback.print_exc()
sys.exit(1)
try:
t = paramiko.Transport(sock)
try:
t.start_client()
except paramiko.SSHException:
print('*** SSH negotiation failed.')
sys.exit(1)
try:
keys = paramiko.util.load_host_keys(os.path.expanduser('~/.ssh/known_hosts'))
except IOError:
try:
keys = paramiko.util.load_host_keys(os.path.expanduser('~/ssh/known_hosts'))
except IOError:
print('*** Unable to open host keys file')
keys = {}
# check server's host key -- this is important.
key = t.get_remote_server_key()
if hostname not in keys:
print('*** WARNING: Unknown host key!')
elif key.get_name() not in keys[hostname]:
print('*** WARNING: Unknown host key!')
elif keys[hostname][key.get_name()] != key:
print('*** WARNING: Host key has changed!!!')
sys.exit(1)
else:
print('*** Host key OK.')
# get username
''' if username == '':
default_username = getpass.getuser()
username = input('Username [%s]: ' % default_username)
if len(username) == 0:
username = default_username'''
username = sys.argv[2]
password = sys.argv[3]
author=sys.argv[4]
agent_auth(t, username)
if not t.is_authenticated():
manual_auth(username, hostname,password)
if not t.is_authenticated():
print('*** Authentication failed. :(')
t.close()
sys.exit(1)
chan = t.open_session()
chan.get_pty()
chan.invoke_shell()
print('*** Here we go!\n')
interactive.interactive_shell(chan,hostname,username,author)
chan.close()
t.close()
except Exception as e:
print('*** Caught exception: ' + str(e.__class__) + ': ' + str(e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
|
[
"root@vm33-ysj.(none)"
] |
root@vm33-ysj.(none)
|
3c8caf718092e757093f24383c72c9fef8b37672
|
4d1437ba20e513c56aa3fc8b84946f15133370b9
|
/src/lcd_maker.py
|
6dd5aa6e23b93300dcae7b9b0e73b97fd832114e
|
[] |
no_license
|
lvlarco/name_sign
|
ceba9df255915ecf02d53c01015a79a2edfa7eae
|
7814d784a06045267b20b66f516938458bd29c56
|
refs/heads/master
| 2021-06-27T20:12:08.059582
| 2020-10-07T20:18:49
| 2020-10-07T20:18:49
| 166,124,370
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,263
|
py
|
import RPi.GPIO as GPIO
import time
from RPLCD.gpio import CharLCD
import requests, json
from pprint import pprint
import harvesine as hv
import fetch_weather as fw
GPIO.setwarnings(False)
#configure LCD and define GPIO mode
cols = 16
rows = 2
gpio_mode = "BOARD"
if gpio_mode == "BOARD":
lcd = CharLCD(pin_rs = 19, pin_rw = None, pin_e = 16, pins_data = [21,18,23,24],
numbering_mode = GPIO.BOARD, cols=cols, rows=rows, dotsize=8)
else:
lcd = CharLCD(pin_rs = 10, pin_rw = None, pin_e = 23, pins_data = [9,24,11,8], numbering_mode = GPIO.BCM)
#define weather API access
api_key = "19c2e8d2c714c0c7423d8126fe94224f"
base_url = "https://api.openweathermap.org/data/2.5/weather?q="
city_name = "boston"
units = "imperial"
complete_url = base_url + city_name + "&units=" + units + "&appid=" + api_key
response = requests.get(complete_url)
x = response.json()
wait_time = 3
# temperature = int(round(fw.fetch_weather(x)))
# day_status = hv.day_status(temperature)
temperature = "56"
day_status = "hot"
if units == "imperial":
temp_units = " degrees F"
else:
temp_units = " degrees"
#first screen
message01 = "Weather forecast"
message11 = ""
pos01 = hv.center_cursor(message01, cols)
pos11 = hv.center_cursor(message11, cols)
#second screen
message02 = str("It is so " + day_status)
message12 = str("in " + city_name.capitalize() + " today")
pos02 = hv.center_cursor(message02, cols)
pos12 = hv.center_cursor(message12, cols)
#third screen
message03 = "We are at"
message13 = "{0}{1}".format(str(temperature), temp_units)
pos03 = hv.center_cursor(message03, cols)
pos13 = hv.center_cursor(message13, cols)
while True:
lcd.clear()
lcd.cursor_pos = (0, pos01)
lcd.write_string(message01)
lcd.cursor_pos = (1, pos11)
lcd.write_string(message11)
time.sleep(wait_time)
lcd.clear()
lcd.cursor_pos = (0, pos02)
lcd.write_string("It is so " + day_status)
lcd.cursor_pos = (1, pos12)
lcd.write_string("in " + city_name.capitalize() + " today")
time.sleep(wait_time)
lcd.clear()
lcd.cursor_pos = (0, pos03)
lcd.write_string(message03)
lcd.cursor_pos = (1, pos13)
lcd.write_string(str(temperature) + temp_units)
time.sleep(wait_time)
lcd.close()
GPIO.cleanup()
|
[
"marco.campos001@gmail.com"
] |
marco.campos001@gmail.com
|
a18e9a1b42952ec22c0953b9bebd3e5d9d1bb746
|
cef31bc305699e6f9ac0afc318cf90b26d3f48c8
|
/venv/bin/pip3
|
737eff885dfa5acf709874cc64ba6931761bb05d
|
[] |
no_license
|
nail1021734/apriori_algorithm
|
18bd31106b2210af2f180385f4eb74a3476d2457
|
8adbd11ed63ab205efff8ceaa4317184851dc52e
|
refs/heads/master
| 2023-01-04T10:39:51.651370
| 2020-10-24T13:11:05
| 2020-10-24T13:11:05
| 302,863,165
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 257
|
#!/home/neil/Desktop/aprioi_algorithm/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from pip._internal.cli.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"nail1021734@gmail.com"
] |
nail1021734@gmail.com
|
|
0312f021af7a059e4433dda7db189643c71a65fc
|
5d72b2c36e2f5ef5aa53cb017bf93f53164d1cbd
|
/PCA.py
|
4d4bc52f1e3403944fca25fb4f7020545014e2ba
|
[] |
no_license
|
artvive/Machine-Learning-basics
|
c87747a55391b263d86e506576b9556da6319a65
|
aa808a051232568e97a9060d97964069f4a3c44f
|
refs/heads/master
| 2021-01-11T17:19:45.970747
| 2017-04-22T17:00:04
| 2017-04-22T17:00:04
| 79,749,750
| 0
| 0
| null | 2017-04-22T17:00:05
| 2017-01-22T22:17:39
|
Python
|
UTF-8
|
Python
| false
| false
| 476
|
py
|
import numpy as np
from scipy.linalg import eigh
class PCA(object):
"""docstring for PCA"""
def __init__(self, n_components=2):
super(PCA, self).__init__()
self.n_components = n_components
def fit_transform(self, x):
dim = x.shape[1]
x = np.copy(x)
x = x - np.mean(x, axis=0)
cov = np.cov(x.transpose())
val, vect = eigh(cov, eigvals=(dim - self.n_components, dim - 1))
return np.matmul(x, vect)
|
[
"arthur.vive@student.ecp.fr"
] |
arthur.vive@student.ecp.fr
|
9a831b0de04f242a14d0827843a05ec645d4709f
|
e7e497b20442a4220296dea1550091a457df5a38
|
/main_project/AdHot/feed_tpl_admin/feedtpladmin/model/applypublish.py
|
f8438691ee2721113ee4bb79595f504a3ae8cd6a
|
[] |
no_license
|
gunner14/old_rr_code
|
cf17a2dedf8dfcdcf441d49139adaadc770c0eea
|
bb047dc88fa7243ded61d840af0f8bad22d68dee
|
refs/heads/master
| 2021-01-17T18:23:28.154228
| 2013-12-02T23:45:33
| 2013-12-02T23:45:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,159
|
py
|
from sqlalchemy import Column
from sqlalchemy import types
from sqlalchemy.types import Integer, String
from feedtpladmin.model.meta import Base
from sqlalchemy.sql.expression import text
class ApplyPublish(Base):
__tablename__ = "apply_publish"
apply_id = Column(Integer, primary_key=True)
stype_id = Column(Integer)
version = Column(Integer)
tpl_id = Column(Integer)
pm_names = Column(String(128))
pm_emails = Column(String(128))
dev_names = Column(String(128))
dev_emails = Column(String(128))
publish_desc = Column(String(512))
apply_time = Column(types.TIMESTAMP())
status = Column(Integer)
def __init__(self, apply_id, stype_id, version, tpl_id, pm_names, pm_emails, dev_names, dev_emails, publish_desc, status, apply_time):
self.apply_id = apply_id
self.stype_id = stype_id
self.version = version
self.tpl_id = tpl_id
self.pm_names = pm_names
self.pm_emails = pm_emails
self.dev_names = dev_names
self.dev_emails = dev_emails
self.publish_desc = publish_desc
self.status = status
self.apply_time = apply_time
def __repr__(self):
return "<FeedKeys %s>" % (self.apply_id)
|
[
"liyong19861014@gmail.com"
] |
liyong19861014@gmail.com
|
b3eef9bf297a294647d6016f103c422c4600ba40
|
07dfe52200c70ac0c8e86da1344a1e3284bf2164
|
/animeScript.py
|
939612ec3380e47e8480546fbc5f7b86cbf8a331
|
[] |
no_license
|
kearnie/horriblesubs_book
|
934d7f3b6a31103f0f3496a09d88bbad2c00a347
|
b565649066b6a683321f907a0ae32034217667f9
|
refs/heads/master
| 2021-06-10T05:12:50.560320
| 2017-02-14T11:03:30
| 2017-02-14T11:03:30
| 72,269,805
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,794
|
py
|
import subprocess
import shutil
import shlex
import re
import ass
import os
def takeSnapshots(fileName,ep):
amountPerEp = 10
episodeTime = 20*60
ignoreTime = 60*3
interval = int((episodeTime - 2*ignoreTime)/amountPerEp - 0.01)
base = "kearnie/screencaps/"
extractTimes = [i for i in range(ignoreTime,episodeTime-ignoreTime,interval)]
for i in range(len(extractTimes)):
time = extractTimes[i]
args = ["mpv","-ao","null","-sid","no","-ss",str(int(time)),"-frames","1","-vo","image",
"--vo-image-format=png", fileName]
try:
subprocess.run(args)
shutil.move("00000001.png",base + "%d.png" % (ep*amountPerEp+i))
except:
print("fal")
trackRegex = re.compile("mkvextract:\s(\d)")
removeBrackets = regex = re.compile(".*?\((.*?)\}")
def getSubtitleTracks(fileName):
output = subprocess.check_output(["mkvinfo",fileName],universal_newlines=True).splitlines()
currentTrack = None
sub_tracks = []
for line in output:
if "Track number:" in line:
trackNumber = trackRegex.search(line).group(1)
currentTrack = trackNumber
if "S_TEXT/ASS" in line:
sub_tracks.append(currentTrack)
return sub_tracks
def exportSRT(fileName, track):
srtName = fileName + "-%s.srt" % track
args = ["mkvextract", "tracks",fileName, "%s:%s" % (track,srtName)]
subprocess.run(args)
return srtName
def cleanLine(line):
newLine = ""
inBracket = False
lastBackSlash = False
for c in line:
if c == "{":
inBracket = True
elif c == "}":
inBracket == False
elif not inBracket:
if c == "\\":
lastBackSlash = True
elif c != "N" or not lastBackSlash:
newLine += c
lastBackSlash = False
return newLine
def extractTextFromSubtitles(fileName):
tracks = getSubtitleTracks(fileName)
output = ""
for track in tracks:
srtName = exportSRT(fileName, track)
lines = []
with open(srtName,"r") as f:
doc = ass.parse(f)
for event in doc.events:
lines.append(cleanLine(event.text))
combined = "\n".join(lines)
if "in" in combined or "to" in combined or "for" in combined:
output += combined
return output
def extractFromFile(fileName,ep):
os.makedirs("kearnie/screencaps/",exist_ok=True)
text = extractTextFromSubtitles(fileName)
with open("kearnie/subs.txt","a") as f:
f.write(text)
takeSnapshots(fileName,ep)
def extractSeries():
ep = 0
for filename in os.listdir("."):
if filename.endswith(".mkv"):
extractFromFile(filename,ep)
ep += 1
extractSeries()
|
[
"Kearnie Lin"
] |
Kearnie Lin
|
7d5200dc07c4ab8e35337e63713f0a2e2b852c85
|
8778a5a8188e238d83264b65d04ec727147d9440
|
/auto_log/autolog.py
|
a31c30c25e94b080339d3c891cccb1f33e7e61b0
|
[] |
no_license
|
RainFrost1/AutoLog
|
21bcb2e0a6b5e08225fd095f55c9d5986b12c65f
|
a1d1cf9b0e519601a48d047218c7a34525989fef
|
refs/heads/main
| 2023-06-20T08:57:51.051738
| 2021-07-20T03:30:40
| 2021-07-20T03:30:40
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,674
|
py
|
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import time
import pynvml
import psutil
import GPUtil
import os
import paddle
from pathlib import Path
import logging
from .env import get_env_info
from .utils import Times
from .device import MemInfo, SubprocessGetMem
class RunConfig:
def __init(self,
run_devices="cpu",
ir_optim=False,
enable_tensorrt=False,
enable_mkldnn=False,
cpu_threads=0,
enable_mem_optim=True):
self.run_devices = run_devices
self.ir_optim = ir_optim
self.enable_mkldnn = enable_mkldnn
self.enable_tensorrt = enable_tensorrt
self.cpu_math_library_num_threads = self.cpu_threads
self.enable_mem_optim = enable_mem_optim
class AutoLogger(RunConfig):
def __init__(self,
model_name,
model_precision,
batch_size,
data_shape,
save_path,
inference_config=None,
pids=None,
process_name=None,
gpu_ids=None,
time_keys=['preprocess_time', 'inference_time', 'postprocess_time'],
warmup=0,
logger=None,
**kwargs):
super(AutoLogger, self).__init__()
self.autolog_version = 1.0
self.save_path = save_path
self.model_name = model_name
self.precision = model_precision
self.batch_size = batch_size
self.data_shape = data_shape
self.paddle_infer_config = inference_config
self.config_status = self.parse_config(self.paddle_infer_config)
self.time_keys = time_keys
self.times = Times(keys=time_keys,warmup=warmup)
self.get_paddle_info()
self.logger = self.init_logger() if logger is None else logger
self.get_mem = SubprocessGetMem(pid=pids, gpu_id=gpu_ids)
self.start_subprocess_get_mem()
self.pids = pids
self.gpu_ids = gpu_ids
def start_subprocess_get_mem(self):
self.get_mem.get_mem_subprocess_run(0.2)
def end_subprocess_get_mem(self):
self.get_mem.get_mem_subprocess_end()
cpu_infos = self.get_mem.cpu_infos
gpu_infos = self.get_mem.gpu_infos
self.cpu_infos = cpu_infos[str(self.pids)]
if self.gpu_ids is None:
self.gpu_infos = {}
else:
self.gpu_infos = gpu_infos[str(self.gpu_ids)]
return self.cpu_infos, self.gpu_infos
def init_logger(self):
"""
benchmark logger
"""
# Init logger
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
log_output = f"{self.save_path}"
if not os.path.exists(os.path.dirname(log_output)):
os.makedirs(os.path.dirname(log_output))
logging.basicConfig(
level=logging.INFO,
format=FORMAT,
handlers=[
logging.FileHandler(
filename=log_output, mode='w'),
logging.StreamHandler(),
])
logger = logging.getLogger(__name__)
logger.info(
f"Paddle Inference benchmark log will be saved to {log_output}")
return logger
def parse_config(self, config) -> dict:
"""
parse paddle predictor config
args:
config(paddle.inference.Config): paddle inference config
return:
config_status(dict): dict style config info
"""
config_status = {}
config_status['runtime_device'] = "gpu" if config.use_gpu() else "cpu"
config_status['ir_optim'] = config.ir_optim()
config_status['enable_tensorrt'] = config.tensorrt_engine_enabled()
config_status['precision'] = self.precision
config_status['enable_mkldnn'] = config.mkldnn_enabled()
config_status[
'cpu_math_library_num_threads'] = config.cpu_math_library_num_threads(
)
return config_status
def get_paddle_info(self):
self.paddle_version = paddle.__version__
self.paddle_commit = paddle.__git_commit__
def report(self, identifier=None):
#TODO: support multi-model report
"""
print log report
args:
identifier(string): identify log
"""
if identifier:
identifier = f"[{identifier}]"
else:
identifier = ""
# report time
_times_value = self.times.value(key=self.time_keys, mode='mean')
preprocess_time_ms = round(_times_value['preprocess_time'] * 1000, 4)
inference_time_ms = round(_times_value['inference_time'] * 1000, 4)
postprocess_time_ms = round(_times_value['postprocess_time'] * 1000, 4)
data_num = self.times._num_counts()
total_time_s = round(self.times._report_total_time(mode='sum'), 4)
# report memory
cpu_infos, gpu_infos = self.end_subprocess_get_mem()
cpu_rss_mb = self.cpu_infos['cpu_rss']
gpu_rss_mb = self.gpu_infos['used'] if self.gpu_ids is not None else None
gpu_util = self.gpu_infos['util'] if self.gpu_ids is not None else None
# report env
envs = get_env_info()
self.logger.info("\n")
self.logger.info(
"---------------------- Env info ----------------------")
# envs['nvidia_driver_version'] envs['cudnn_version']envs['cuda_version'] envs['os_info']
self.logger.info(f"{identifier} OS_version: {envs['os_info']}")
self.logger.info(f"{identifier} CUDA_version: {envs['cuda_version']}")
self.logger.info(f"{identifier} CUDNN_version: {envs['cudnn_version']}")
self.logger.info(f"{identifier} drivier_version: {envs['nvidia_driver_version']}")
self.logger.info(
"---------------------- Paddle info ----------------------")
self.logger.info(f"{identifier} paddle_version: {self.paddle_version}")
self.logger.info(f"{identifier} paddle_commit: {self.paddle_commit}")
self.logger.info(f"{identifier} log_api_version: {self.autolog_version}")
self.logger.info(
"----------------------- Conf info -----------------------")
self.logger.info(
f"{identifier} runtime_device: {self.config_status['runtime_device']}"
)
self.logger.info(
f"{identifier} ir_optim: {self.config_status['ir_optim']}")
self.logger.info(f"{identifier} enable_memory_optim: {True}")
self.logger.info(
f"{identifier} enable_tensorrt: {self.config_status['enable_tensorrt']}"
)
self.logger.info(
f"{identifier} enable_mkldnn: {self.config_status['enable_mkldnn']}")
self.logger.info(
f"{identifier} cpu_math_library_num_threads: {self.config_status['cpu_math_library_num_threads']}"
)
self.logger.info(
"----------------------- Model info ----------------------")
self.logger.info(f"{identifier} model_name: {self.model_name}")
self.logger.info(f"{identifier} precision: {self.precision}")
self.logger.info(
"----------------------- Data info -----------------------")
self.logger.info(f"{identifier} batch_size: {self.batch_size}")
self.logger.info(f"{identifier} input_shape: {self.data_shape}")
self.logger.info(f"{identifier} data_num: {data_num}")
self.logger.info(
"----------------------- Perf info -----------------------")
self.logger.info(
f"{identifier} cpu_rss(MB): {cpu_rss_mb}, gpu_rss(MB): {gpu_rss_mb}, gpu_util: {gpu_util}%"
)
self.logger.info(
f"{identifier} total time spent(s): {total_time_s}")
self.logger.info(
f"{identifier} preprocess_time(ms): {preprocess_time_ms}, inference_time(ms): {inference_time_ms}, postprocess_time(ms): {postprocess_time_ms}"
)
def print_help(self):
"""
print function help
"""
print("""Usage:
==== Print inference benchmark logs. ====
config = paddle.inference.Config()
model_info = {'model_name': 'resnet50'
'precision': 'fp32'}
data_info = {'batch_size': 1
'shape': '3,224,224'
'data_num': 1000}
perf_info = {'preprocess_time_s': 1.0
'inference_time_s': 2.0
'postprocess_time_s': 1.0
'total_time_s': 4.0}
resource_info = {'cpu_rss_mb': 100
'gpu_rss_mb': 100
'gpu_util': 60}
log = PaddleInferBenchmark(config, model_info, data_info, perf_info, resource_info)
log('Test')
""")
# if __name__ == "__main__":
# get_os_info()
# print(envs['os_info'])
# get_cudnn_info()
# print(envs['cudnn_version'])
|
[
"liuvv0203@outlook.com"
] |
liuvv0203@outlook.com
|
9dc7b3340119e22850b84c68782778ccc8616e18
|
b6302c926fbcd65cd7300aca7af5ae6e05aee8dc
|
/02_reto_programas_ramificados.py
|
604be5301a9b7e11f2aae64719728f69f810751e
|
[] |
no_license
|
danhiel98/pensamento-computacional-python
|
0efc899ebc1abbf56f5dbbb794d254449adc4a83
|
1f2b9209a768738f90826372db595bb7f619af31
|
refs/heads/master
| 2022-12-21T15:35:30.388084
| 2020-09-23T19:47:35
| 2020-09-23T19:47:35
| 298,077,898
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 488
|
py
|
nombre1 = input('Introduzca el nombre del primer usuario: ')
edad1 = int(input('Introduzca la edad del primer usuario: '))
nombre2 = input('Introduzca el nombre del segundo usuario: ')
edad2 = int(input('Introduzca la edad del segundo usuario: '))
if edad1 > edad2:
print(f'La edad de {nombre1} es mayor que la de {nombre2}')
elif edad1 < edad2:
print(f'La edad de {nombre2} es mayor que la de {nombre1}')
else:
print(f'La edad de {nombre1} es la misma que la de {nombre2}')
|
[
"danhiel.garcia98@gmail.com"
] |
danhiel.garcia98@gmail.com
|
13f183ad4d985b6f8f788da3e3cdd36f339659ea
|
e730d4ca31ab039ba1b6cef12feba7fee2b47415
|
/src/old/multimodal_code/train_tfidf.py
|
420d8c58c13e0679ddd3195bee514dad7d468fc3
|
[
"Apache-2.0"
] |
permissive
|
trungnt13/digisami_journal
|
f1f49ffbdba17cd864c4cf9fe32eb238db2924e5
|
671486d0fe7b65cad80daf8e8b96d475245c5fed
|
refs/heads/master
| 2021-03-24T12:14:20.139103
| 2018-11-17T18:09:49
| 2018-11-17T18:09:49
| 97,585,287
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,835
|
py
|
from __future__ import print_function, division, absolute_import
from odin.utils import ArgController, stdio, get_logpath, get_modelpath
args = ArgController(
).add('-ds', 'sami, estonia, finnish', 'estonia'
# for training
).add('-bs', 'batch size', 8
).add('-lr', 'learning rate', 0.0001
).add('-epoch', 'number of epoch', 8
# for features
).parse()
# Identical name for model
MODEL_NAME = (args['ds'][:3] + '_texts')
# store log
stdio(path=get_logpath(name=MODEL_NAME, override=True))
import os
os.environ['ODIN'] = 'float32,gpu,tensorflow,cnmem=0.2,seed=1208'
from six.moves import cPickle
import numpy as np
np.random.seed(1208)
from odin import backend as K, nnet as N, fuel as F
from odin import training, visual
from odin.utils import Progbar
from odin.basic import has_roles, WEIGHT, PARAMETER
from utils import get_data, laugh_labels, evaluate, CODE_PATH, SEED
# ===========================================================================
# Load data
# ===========================================================================
print('Model:', MODEL_NAME)
tokenizer = cPickle.load(
open(os.path.join(CODE_PATH, 'nlp', '%s_tokenizer' % args['ds']), 'r'))
data_matrix = cPickle.load(
open(os.path.join(CODE_PATH, 'nlp', '%s_matrix' % args['ds']), 'r'))
for i, j in tokenizer.summary.iteritems():
print(i, ':', j)
# ===========================================================================
# Extract data
# ===========================================================================
X = []
y = []
longest_conversation = data_matrix['longest_conversation'][0]
for f, data in data_matrix.iteritems():
if f == 'longest_conversation':
continue
for (topic, topic_seq, convs_seq,
topic_tfidf, convs_tfidf,
laugh, alllaugh, time) in data:
shape = (longest_conversation, topic_tfidf.shape[1])
x = np.zeros(shape=shape)
x[-convs_tfidf.shape[0]:] = convs_tfidf
# ====== store ====== #
X.append(x.reshape((1,) + shape))
y.append(len(alllaugh))
# ====== finalize data ====== #
X = np.concatenate(X, axis=0)
y = np.array(y, dtype='float32')
y = (y - np.min(y)) / (np.max(y) - np.min(y))
print('Data Shape:', X.shape, y.shape)
# ====== train test split ====== #
np.random.seed(SEED)
n = len(y)
idx = np.random.permutation(n)
X = X[idx]; y = y[idx]
SPLIT = 0.8
X_train = X[:int(SPLIT * n)]
y_train = y[:int(SPLIT * n)]
X_valid = X[int(SPLIT * n):]
y_valid = y[int(SPLIT * n):]
print('Training:', X_train.shape)
print('Validing:', X_valid.shape)
# ===========================================================================
# Different model
# ===========================================================================
def model1():
f = N.Sequence([
N.Dimshuffle(pattern=(0, 1, 'x', 2)),
N.Conv(num_filters=512, filter_size=(5, 1), pad='valid',
strides=(1, 1), activation=K.linear),
N.BatchNorm(activation=K.relu),
N.Conv(num_filters=256, filter_size=(5, 1), pad='valid',
strides=(1, 1), activation=K.linear),
N.BatchNorm(activation=K.relu),
N.Flatten(outdim=3),
N.CudnnRNN(num_units=128, rnn_mode='lstm', input_mode='linear',
num_layers=2,
direction_mode='unidirectional'),
N.BatchNorm(axes='auto'),
N.Flatten(outdim=2),
N.Dense(1, activation=K.sigmoid),
], debug=True, name=MODEL_NAME)
return f
def model3():
f = N.Sequence([
N.Dimshuffle(pattern=(0, 1, 2, 'x')),
N.Conv(num_filters=32, filter_size=(5, 126), pad='valid',
strides=(1, 1), activation=K.linear),
N.BatchNorm(activation=K.relu),
N.Pool(pool_size=(2, 5), mode='max'),
N.Flatten(outdim=3),
N.CudnnRNN(num_units=256, rnn_mode='lstm', input_mode='linear',
num_layers=2,
direction_mode='unidirectional'),
N.BatchNorm(axes='auto'),
N.Flatten(outdim=2),
N.Dense(1, activation=K.sigmoid),
], debug=True, name=MODEL_NAME)
return f
def model2():
f = N.Sequence([
N.CudnnRNN(num_units=256, rnn_mode='lstm', input_mode='linear',
num_layers=2,
direction_mode='bidirectional'),
N.BatchNorm(axes=0),
N.Flatten(outdim=2),
N.Dense(1, activation=K.sigmoid),
], debug=True, name=MODEL_NAME)
return f
# ===========================================================================
# Create model
# ===========================================================================
X_ = K.placeholder(shape=(None,) + X_train.shape[1:], name='X')
y_ = K.placeholder(shape=(None,), name='y', dtype='float32')
f = model2()
K.set_training(1); y_pred_train = f(X_)
K.set_training(0); y_pred_eval = f(X_)
# ====== weights and params ====== #
weights = [w for w in f.parameters if has_roles(w, WEIGHT)]
L1 = K.L1(weights)
L2 = K.L2(weights)
params = f.parameters
print('Params:', [p.name for p in params])
# ====== cost function ====== #
cost_train = K.mean(K.binary_crossentropy(y_pred_train, y_))
cost_pred_1 = K.mean(K.binary_crossentropy(y_pred_eval, y_))
cost_pred_2 = K.mean(K.squared_error(y_pred_eval, y_))
optimizer = K.optimizers.RMSProp(lr=args['lr'])
updates = optimizer.get_updates(cost_train, params)
print('Building train function ...')
f_train = K.function([X_, y_], cost_train, updates)
print('Building score function ...')
f_eval = K.function([X_, y_], [cost_pred_1, cost_pred_2])
print('Building pred function ...')
f_pred = K.function(X_, y_pred_eval)
# ===========================================================================
# Create traning
# ===========================================================================
print("Preparing main loop ...")
main = training.MainLoop(batch_size=args['bs'], seed=12082518, shuffle_level=2)
main.set_save(
get_modelpath(name=MODEL_NAME, override=True),
[f, args]
)
main.set_task(f_train, data=(X_train, y_train),
epoch=args['epoch'], name='Train')
main.set_subtask(f_eval, data=(X_valid, y_valid),
freq=0.6, name='Valid')
main.set_callback([
training.ProgressMonitor(name='Train', format='Results: {:.4f}'),
training.ProgressMonitor(name='Valid', format='Results: {:.4f}, {:.4f}'),
# training.NaNDetector(name='Train', patience=3, rollback=True),
training.History(),
training.EarlyStopGeneralizationLoss(name='Valid', threshold=5, patience=5),
])
main.run()
# ===========================================================================
# Visualization
# ===========================================================================
main['History'].print_batch('Train')
main['History'].print_epoch('Valid')
try:
print('[Train] Benchmark batch:', main['History'].benchmark('Train', 'batch_end').mean)
print('[Train] Benchmark epoch:', main['History'].benchmark('Train', 'epoch_end').mean)
print('[Valid] Benchmark batch:', main['History'].benchmark('Valid', 'batch_end').mean)
print('[Valid] Benchmark epoch:', main['History'].benchmark('Valid', 'epoch_end').mean)
except:
pass
# ===========================================================================
# Evaluate
# ===========================================================================
from sklearn.metrics import accuracy_score, f1_score, confusion_matrix
def report(ytrue, ypred):
print()
print("Accuracy:", accuracy_score(ytrue, ypred))
print("F1:", f1_score(ytrue, ypred))
print("Confustion:")
print(confusion_matrix(ytrue, ypred))
f = cPickle.load(open(get_modelpath(name=MODEL_NAME, override=False), 'r'))[0]
y_pred = f_pred(X_valid).ravel()
y_true = y_valid
for i, j in zip(y_pred, y_true):
print(i, j)
report(y_true >= 0.1, y_pred >= 0.1)
report(y_true >= 0.5, y_pred >= 0.5)
|
[
"nickartin13@gmail.com"
] |
nickartin13@gmail.com
|
bc817a1f834cee5b11559787a13f9d98dbbc6e45
|
6dcb820a5bb558e794eede6c74c7bf0a92170a6b
|
/Player/astar.py
|
478bc76c7d82e92961b41d7ee0ff4fc5a925d43b
|
[
"MIT"
] |
permissive
|
xenofyxus/bc18-scaffold
|
d69644ffc2bdeeb11cd2e9f547551978f449de19
|
21cb58d470908aa739ff802f828dbb1ee9e0d3dc
|
refs/heads/master
| 2020-05-14T06:04:32.903759
| 2019-04-24T23:23:02
| 2019-04-24T23:23:02
| 181,704,175
| 0
| 0
| null | 2019-04-16T14:18:38
| 2019-04-16T14:18:37
| null |
UTF-8
|
Python
| false
| false
| 3,197
|
py
|
import battlecode as bc
import sys
import math
directions = [dir for dir in bc.Direction if dir is not bc.Direction.Center]
class Node():
"""A node class for A* Pathfinding"""
def __init__(self, parent=None, position=None):
self.parent = parent
self.position = position
self.g = 0
self.h = 0
self.f = 0
def __eq__(self, other):
return ((self.position.x == other.position.x) and (self.position.y == other.position.y))
def astar(maze,friendly_units, start, end, max_path_length=math.inf):
"""Returns a list of tuples as a path from the given start to the given end in the given maze"""
# Create start and end node
start_node = Node(None, start)
start_node.g = start_node.h = start_node.f = 0
end_node = Node(None, end)
end_node.g = end_node.h = end_node.f = 0
# Initialize both open and closed list
open_list = []
closed_list = []
# Add the start node
open_list.append(start_node)
# Loop until you find the end
while len(open_list) > 0:
# Get the current node
current_node = open_list[0]
current_index = 0
for index, item in enumerate(open_list):
if item.f < current_node.f:
current_node = item
current_index = index
# Pop current off open list, add to closed list
open_list.pop(current_index)
closed_list.append(current_node)
# Found the goal
if current_node == end_node or current_node.g > max_path_length :
path = []
current = current_node
while current is not None:
path.append(current.position)
current = current.parent
return path[::-1] # Return reversed path
# Generate children
for dir in directions: # Adjacent squares
# Get node position
node_position = current_node.position.add(dir)
# Make sure within range
if node_position.x > (len(maze) - 1) or node_position.x < 0 or node_position.y > (len(maze[len(maze)-1]) -1) or node_position.y < 0:
continue
# Make sure walkable terrain
if not maze[node_position.x][node_position.y] or friendly_units[node_position.x][node_position.y]:
continue
# Create new node
new_node = Node(current_node, node_position)
if new_node in closed_list:
continue
new_node.g = current_node.g + 1
new_node.h = ((new_node.position.x - end_node.position.x) ** 2) + ((new_node.position.y - end_node.position.y) ** 2)
new_node.f = new_node.g + new_node.h
found_node = False
for (index, node) in enumerate(open_list):
if node == new_node:
found_node = True
if new_node.g < node.g:
open_list[index] = new_node
break
if not found_node:
open_list.append(new_node)
return []
|
[
"alex.lindstrom92@hotmail.com"
] |
alex.lindstrom92@hotmail.com
|
aad379ec45612f9fc3a2c8c9759a8ae8f9c97972
|
a23d09676dcd3af65793ba1b71213e47f4bc06d7
|
/start.py
|
9a751701c32ce887ad946177dc7b74dbadcf27c5
|
[] |
no_license
|
macnaer/YoutubeDownloader
|
913d36cb5bd10825c55b999403fdc76aa577b50f
|
417cc5dc1c0fe3b88e9f460b9eb3226edf03381c
|
refs/heads/master
| 2020-12-23T20:26:29.627077
| 2020-02-02T20:30:49
| 2020-02-02T20:30:49
| 237,264,525
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 45
|
py
|
from lib.YouDownloader import start
start()
|
[
"master@rivne.itstep.org"
] |
master@rivne.itstep.org
|
a80686c1c0e866961160612e3415c4ec00d47d91
|
98c441f8e676690ff5edab22441bdca485e95b5d
|
/thesis/code/numpy_dace/scipy_dace.py
|
99dda541a9cf492e008ade2637601f21caba0875
|
[] |
no_license
|
drewblount/2014-2015
|
096cc62c162340b7e48a2405d4c9d1493ab09901
|
64b04a9b3e1b3e138e6589efa99f6129393004ad
|
refs/heads/master
| 2016-09-06T03:14:17.005613
| 2015-04-30T16:35:24
| 2015-04-30T16:35:24
| 24,079,375
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,859
|
py
|
## This is a copy of dace.py, but using scipy.linalg instead
## of numpy.matrix. Two reasons for the switch: scipy alwaY
## uses BLAS, numpy not alwaY, and scipy.linalg makes classes
## less ambiguous
## see: http://docs.scipy.org/doc/scipy-0.14.0/reference/tutorial/linalg.html
## The goal of this file is to build a minimum working model of the
## EGO algorithm as put forth in Jones, Schonlau, Welch 1998
from math import exp, pi
import numpy as np
from scipy import linalg as la
from random import random
import scipy.optimize as op
## X is a vector of the input values which have been evaluated already
## by the black box function, and Y is the vector of corresponding outputs.
## Q and P are regression terms--thetas and P from Jones Eq(1)
## returns the DACE predictor function as defined in Jones etc Eq (7)
## the type of each variable is assumed to
def dace_predictor(X, Y, P, Q, verbose=False):
# include a length check? |X| = |Y|, and |Q| = |P| = dim(elt of X)
# makes X, Y are numpy arraY
X, Y = np.array(X), np.array(Y)
R = corr_matrix(X, P, Q)
# R is now a numpy array
R_inv = la.inv(R)
# naming vars so they aren't computed more than once -- Y is transposed
# to change it from a row matrix to a vector
R_inv_y = R_inv.dot(Y)
ones = np.array( [[ 1 ]for i in range(len(X)) ] )
# ones is a vector (column matrix)
ones_T_R_inv = ones.T.dot(R_inv)
# Regression term -- Jones Eq 5
mu_hat = ones.T.dot(R_inv_y) / (ones_T_R_inv.dot(ones))
## 2d array -> float
mu_hat = mu_hat[0][0]
if verbose: print('mu_hat = %.4f' % mu_hat)
corr = corr_func(P, Q)
def pred_func(x_new):
# vector of correlations between x_new and X
r = np.array( [ [corr(x_new, x_old)] for x_old in X] )
# a transcription of Eq 7
temp = R_inv.dot( ones * mu_hat )
temp2 = R_inv_y - temp
temp3 = r.T.dot(temp2)
t3val = temp3[0][0]
return (mu_hat + t3val)
return pred_func
## Jones Eq(1)--takes vectors of regressors Q (thetas in Jones) and P,
## and returns a dist_funcance function for input vectors
def dist_func(P, Q):
def dist(x1, x2):
diff = [ abs( x1[i] - x2[i] ) for i in range( len(x1) ) ]
return sum( [ Q[i] * (diff[i] ** P[i]) for i in range( len(Q) ) ] )
return dist
## Jones Eq(2)--takes vectors of regression terms, returns a
## correlation function between input vectors
def corr_func(P, Q):
dist = dist_func(Q,P)
def corr(x1, x2):
return np.exp(-dist(x1, x2))
return corr
## Returns R, a matrix whose i,jth entry is the correlation between
## x_i and x_j. First makes a 2d list, then transforms it to a numpy matrix
## according to stackexchange: http://stackoverflow.com/questions/7133885/fastest-way-to-grow-a-numpy-numeric-array
## it's fastest to construct with python lists
def corr_matrix(X, P, Q):
corr = corr_func(P, Q)
out_arr = []
for i in range(len(X)):
this_row = []
for j in range(len(X)):
## save time by exploiting diagonal symmetry
if i > j: this_row.append(out_arr[j][i])
elif i < j: this_row.append(corr(X[i],X[j]))
else: this_row.append(1)
out_arr.append(this_row)
return np.array(out_arr)
## the best prediction of the mean mu, Jones Eq(5), given the output vect Y
## and the correlation matrix R
def mu_hat(Y, R_inv):
ones = np.array( [[ 1 ]for i in range(len(Y)) ] )
ones_T_R_inv = ones.T.dot(R_inv)
return ones_T_R_inv.dot(Y) / ones_T_R_inv.dot(ones)
## the best prediction of the stdev, jones Eq(6). Assumes Y is already a
## column matrix in numpy. R is the correlation matrix.
def stdev_hat(Y, R_inv, mu):
n = len(Y)
ones = np.array( [[ 1 ]for i in range(len(Y)) ] )
return ( (Y - ones*mu).T.dot( R_inv.dot(Y - ones*mu) ) ) / n
## The concentrated likelihood function, EQ 4-6 from Jones et al.
## takes args X, Y (evaluated inputs and outputs)
## ands P, Q (regression variables) and returns the
## likelihood of observing the X, Y given the
## P, Q. This is the function we wish to optimize when choosing P, Q
## to maximize likelihood.
def conc_likelihood(X, Y, P, Q):
R = corr_matrix(X, P, Q)
R_inv = la.inv(R)
mu = mu_hat(Y, R_inv)
stdev = stdev_hat(Y, R_inv, mu)
n = len(Y)
ones = np.array( [[ 1 ]for i in range(len(X)) ] )
# linear term
lin_term = 1 / ( (2 * pi * stdev)**(n/2.0) * la.det(R) ** (0.5) )
# combining the right half of 4 with 6 gives this simplified expression
exp_term = exp(n/2.0)
return lin_term*exp_term
## for a given X, Y, finds P and Q that optimize the above function
#def max_likelihood_params(X, Y):
|
[
"dblount@reed.edu"
] |
dblount@reed.edu
|
10218606bc41a31bc1a6be5c657e397952013755
|
7608a4c0e2252f53aa632e89b9dcab94ad195391
|
/crop/forms.py
|
43761cc37f1bf266d84a48af2f19d527217c209c
|
[] |
no_license
|
gokulacko/agri
|
505369099aab4098e8adcfee3ec2d9708be59d4e
|
604bde1f3583456fcd6a1d83122e77cd688a9cad
|
refs/heads/master
| 2020-04-30T16:29:14.225155
| 2019-04-03T13:53:17
| 2019-04-03T13:53:17
| 176,949,516
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,781
|
py
|
import crop.models as m
from django import forms
from django.forms import widgets
class LandForm(forms.ModelForm):
water = forms.CharField(widget=forms.TextInput(attrs={'class': 'water', 'placeholder':'Water(Rate 0 to 10)'}))
avg_temp = forms.CharField(widget=forms.TextInput(attrs={'class': 'avg_temp', 'placeholder':'Average Temperature(Celsius)'}))
pincode = forms.CharField(widget=forms.TextInput(attrs={'class': 'pincode', 'placeholder':'Pincode'}))
# water = forms.CharField(widget=forms.TextInput(attrs={'class': 'water', 'placeholder':'Water(Rate 0 to 10)'}))
class Meta:
model = m.Land
fields = [
# 'area',
'water',
'avg_temp',
'pincode',
'soil_type',
'ph',
# 'user'
]
class SoilForm(forms.ModelForm):
class Meta:
model = m.SoilTest
fields = [
'ph',
'phosporus',
'potassium',
'nitrogen',
'sulfate',
'boron',
'copper',
'iron',
'zinc',
'magnesium',
'land',
]
class CropForm(forms.ModelForm):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
description = forms.CharField(widget=forms.Textarea(attrs={'class': 'description', 'placeholder':'Description:'}))
duration = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'duration', 'placeholder':'Duration(In days):'}))
min_temp = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'min_temp', 'placeholder':'Minimun temperature(Celcius):'}))
max_temp = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'max_temp', 'placeholder':'Max Temperature(celcius):'}))
month_plant = forms.CharField(widget=forms.TextInput(attrs={'class': 'month_plant', 'placeholder':'Time to Plant:'}))
cultivation = forms.CharField(widget=forms.TextInput(attrs={'class': 'cultivation', 'placeholder':'Cultivation:'}))
harvest = forms.CharField(widget=forms.Textarea(attrs={'class': 'harvest', 'placeholder':'Harvest:'}))
water = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'water', 'placeholder':'Water(Rate 0 to 10):'}))
ph_min = forms.DecimalField(widget=forms.TextInput(attrs={'class': 'ph_min', 'placeholder':'Minimum Ph:'}))
ph_min = forms.DecimalField(widget=forms.TextInput(attrs={'class': 'ph_min', 'placeholder':'Minimum Ph:'}))
class Meta:
model = m.Crop
fields = '__all__'
class VarityForm(forms.ModelForm):
name = models.CharField(max_length=30)
description = models.TextField(blank=True, null=True)
duration = models.IntegerField(blank=True, null=True)
min_temp = models.IntegerField(blank=True, null=True)
max_temp = models.IntegerField(blank=True, null=True)
month_plant = models.CharField(max_length=30, blank=True, null=True)
cultivation = models.TextField(blank=True, null=True)
water = models.IntegerField(blank=True, null=True)
harvest = models.TextField(blank=True, null=True)
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
description = forms.Textarea(widget=forms.Textarea(attrs={'class': 'description', 'placeholder':'Description:'}))
duration = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'duration', 'placeholder':'Duration(In days):'}))
min_temp = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'min_temp', 'placeholder':'Minimun temperature(Celcius):'}))
max_temp = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'max_temp', 'placeholder':'Max Temperature(celcius):'}))
month_plant = forms.CharField(widget=forms.TextInput(attrs={'class': 'month_plant', 'placeholder':'Time to Plant:'}))
cultivation = forms.CharField(widget=forms.TextInput(attrs={'class': 'cultivation', 'placeholder':'Cultivation:'}))
harvest = forms.Textarea(widget=forms.Textarea(attrs={'class': 'harvest', 'placeholder':'Harvest:'}))
water = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'water', 'placeholder':'Water(Rate 0 to 10):'}))
class Meta:
model = m.Varity
fields = '__all__'
class DiseaseForm(forms.ModelForm):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
Symptoms = forms.Textarea(widget=forms.TextInput(attrs={'class': 'symptoms', 'placeholder':'Symptoms:'}))
effect = forms.Textarea(widget=forms.TextInput(attrs={'class': 'effect', 'placeholder':'Effect:'}))
prevention = forms.Textarea(widget=forms.TextInput(attrs={'class': 'prevention', 'placeholder':'Prevention:'}))
class Meta:
model = m.Disease
fields = '__all__'
class SolutionForm(forms.ModelForm):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
Procedure = forms.Textarea(widget=forms.TextInput(attrs={'class': 'procedure', 'placeholder':'Procedure:'}))
quantity = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'quantity', 'placeholder':'Quantity:'}))
duration = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'duration', 'placeholder':'Duration:'}))
items = forms.Textarea(widget=forms.TextInput(attrs={'class': 'items', 'placeholder':'Items:'}))
class Meta:
model = m.Solution
fields = '__all__'
class ProfileForm(forms.ModelForm):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
area = forms.CharField(widget=forms.TextInput(attrs={'class': 'area', 'placeholder':'Area:'}))
city = forms.CharField(widget=forms.TextInput(attrs={'class': 'city', 'placeholder':'City:'}))
phone = forms.CharField(widget=forms.TextInput(attrs={'class': 'phone', 'placeholder':'Phone:'}))
alt_phone = forms.CharField(widget=forms.TextInput(attrs={'class': 'alt_phone', 'placeholder':'Alternate Phone:'}))
pincode = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'pincode', 'placeholder':'Pincode:'}))
address = forms.CharField(widget=forms.Textarea(attrs={'class': 'address', 'placeholder':'Address:'}))
class Meta:
model = m.Profile
fields = [
'name',
'area',
'city',
'pincode',
'address',
'phone',
'alt_phone',
'is_farmer',
'is_buyer',
]
class ProductForm(forms.ModelForm):
name = forms.CharField(widget=forms.TextInput(attrs={'class': 'name', 'placeholder':'Name:'}))
quantity = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'quantity', 'placeholder':'Quantity:'}))
expected_price = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'expected_price', 'placeholder':'Expected price:'}))
class Meta:
model = m.Product
fields = [
'name',
'quantity',
'expected_price',
]
class BuyerForm(forms.ModelForm):
expected_product = forms.CharField(widget=forms.TextInput(attrs={'class': 'expected_product', 'placeholder':'Expected product:'}))
quantity = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'quantity', 'placeholder':'Quantity:'}))
expected_price = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'expected_price', 'placeholder':'Expected price:'}))
class Meta:
model = m.Buyer
fields = [
'expected_product',
'quantity',
'expected_price',
]
class EventForm(forms.ModelForm):
event_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'event_name', 'placeholder':'Event Name:'}))
description = forms.CharField(widget=forms.Textarea(attrs={'class': 'description', 'placeholder':'Description'}))
location = forms.CharField(widget=forms.Textarea(attrs={'class': 'location', 'placeholder':'Location'}))
# pincode = forms.IntegerField(widget=forms.TextInput(attrs={'class': 'pincode', 'placeholder':'Pincode'}))
class Meta:
model = m.Event
fields = [
'event_name',
'description',
'location',
'date',
'time',
]
|
[
"gokulakrishna@admins-MacBook-Pro-6.local"
] |
gokulakrishna@admins-MacBook-Pro-6.local
|
859f09c0d3a81ea9d6185389e14eba77f8111778
|
31780af7a5558523def1aae5f25df3e0b084be9b
|
/Ex8.py
|
83c3f99e15c117b70d491154a61cb1d530c30620
|
[] |
no_license
|
sevilaybayatli/PYTHS19
|
1796615ff939f2e98ce657feeaa3efd47a2e66c6
|
ae0607e215a0d8205475d124c0362c39881e5eda
|
refs/heads/master
| 2020-07-23T16:12:17.922548
| 2020-03-23T22:03:00
| 2020-03-23T22:03:00
| 207,624,169
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
py
|
#Exercise Question 8: Find all occurrences of “USA” in given string ignoring the case
#input_str = "Welcome to USA. usa awesome, isn't it?"
#The USA count is: 2
import sys
inputStr= "Welcome to USA USA. usa usa awesome, isn't it?"
words=inputStr.split()
count=0
for string in words:
if(string=='usa'):
count+=1
if (string=='USA'):
count+=1
print("the number of usa or USA in input string: ", count)
|
[
"sevilaybayatli@gmail.com"
] |
sevilaybayatli@gmail.com
|
792c101ee370e6e54da0bf9882f8979fe03c7a1f
|
113c576d565bb22da1c7d9b6a7e2d2ae41040d0c
|
/wsgi_dashboard.py
|
80e29ee034bf777f843654367e99643e90052c21
|
[] |
no_license
|
kaiXlee/itinerum-dashboard-api
|
e250d6cd730b2603fffe7110858867fd223d0d7f
|
9501f6ecf3b65558bb27df336c1f26e0e8e6a13e
|
refs/heads/master
| 2023-02-21T09:29:24.411765
| 2021-01-18T13:43:56
| 2021-01-18T13:43:56
| 330,678,909
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 451
|
py
|
#!/usr/bin/env python
# Kyle Fitzsimmons, 2016
'''WSGI entry script for allowing API to be managed by gunicorn'''
from dashboard.server import create_app
application = app = create_app()
if __name__ == "__main__":
if app.config.get('CONF') in ['development', 'testing']:
app.run(host=app.config['APP_HOST'],
port=app.config['APP_PORT'],
debug=True)
else:
app.run(port=app.config['APP_PORT'])
|
[
"kyle96921@hotmail.com"
] |
kyle96921@hotmail.com
|
559458267e3ecc40de1fd72a828827e2bf280126
|
04be75e9d1cca617216f0323e0521e83132f5a1d
|
/ex16_read.py
|
f989e948b3fedf5c22438376faccc763b06ca370
|
[] |
no_license
|
musalappa/python_exercises
|
38d1c791b668028d8a73fdebbd5332267999f30a
|
459c01547c962404785296d2cb8d75e7344454a8
|
refs/heads/master
| 2016-09-13T16:30:34.512697
| 2016-04-29T05:27:55
| 2016-04-29T05:27:55
| 57,356,202
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 199
|
py
|
from sys import argv
script, filename = argv
file = open(filename, 'r')
line1 = file.readline()
print line1
line2 = file.readline()
print line2
line3 = file.readline()
print line3
file.close()
|
[
"sai@localhost.localdomain"
] |
sai@localhost.localdomain
|
7e480e4dc6bdcdfadec1cb67702ec7c01682db41
|
0e8bca906671e3f00e7053ace4caedac4ac5170d
|
/server/routes.py
|
ac250bb43a5b53d9b773f4ba65c5e248bc6b2f3b
|
[
"MIT"
] |
permissive
|
akatama-ai/exp
|
f1be5439319fc54d0d41507f4cfca707a9d8a941
|
3853e1731621069c4e68c56830712e2a565c7381
|
refs/heads/master
| 2022-12-25T22:26:07.120431
| 2020-10-13T15:17:50
| 2020-10-13T15:17:50
| 303,739,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 488
|
py
|
from flask import jsonify, render_template
from server import stats
from server import utils
def init(app):
@app.route("/stats")
def app_stats():
return jsonify(stats.info())
@app.route("/")
def frontend():
return render_template("index.html")
@app.route("/explorer")
def explorer():
return render_template("chain.html")
@app.errorhandler(404)
def page_404(error):
return jsonify(utils.dead_response("Method not found"))
|
[
"support@quantex.live"
] |
support@quantex.live
|
d0048b864ae3aa6a2d0bcf6304faa67c5134e843
|
5e90faf04599d8ed1d051c7bbfe60da7b97ec465
|
/aprior/ap_exec.py
|
a9ebbba42b85e6d1397f5d9c727628b12493656f
|
[] |
no_license
|
sc1000y/dataminingLearn
|
beb2dbe225d35098e802a42f95ec5916b9880d17
|
078243a877ff4860be5f7c60781ff9b98e1ad6c1
|
refs/heads/master
| 2021-05-07T07:23:23.940589
| 2017-11-01T11:33:57
| 2017-11-01T11:33:57
| 109,125,296
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,206
|
py
|
# -*- coding:utf-8 -*-
import sys
import IDBKit
def getDataSet():
db=IDBKit.IDBKit();
RAWNAME=["BREAD","BUTTER","NOODLE","CHIP","COKE"]
sql="select "+",".join(RAWNAME)+" from DM_EXEC1";
list=db.find(sql)
return list,RAWNAME
def createC1( dataSet ):
'''''
构建初始候选项集的列表,即所有候选项集只包含一个元素,
C1是大小为1的所有候选项集的集合
'''
C1 = []
for transaction in dataSet:
for item in transaction:
if item is not None:
if [ item ] not in C1:
C1.append( [ item ] )
C1.sort()
#return map( frozenset, C1 )
#return [var for var in map(frozenset,C1)]
return [frozenset(var) for var in C1]
def scanD( D, Ck, minSupport ):
'''''
计算Ck中的项集在数据集合D(记录或者transactions)中的支持度,
返回满足最小支持度的项集的集合,和所有项集支持度信息的字典。
'''
ssCnt = {}
for tid in D: # 对于每一条transaction
for can in Ck: # 对于每一个候选项集can,检查是否是transaction的一部分 # 即该候选can是否得到transaction的支持
if can.issubset( tid ):
ssCnt[ can ] = ssCnt.get( can, 0) + 1
numItems = float( len( D ) )
retList = []
supportData = {}
for key in ssCnt:
support = ssCnt[ key ] / numItems # 每个项集的支持度
if support >= minSupport: # 将满足最小支持度的项集,加入retList
retList.insert( 0, key )
supportData[ key ] = support # 汇总支持度数据
return retList, supportData
def aprioriGen( Lk, k ): # Aprior算法
'''''
由初始候选项集的集合Lk生成新的生成候选项集,
k表示生成的新项集中所含有的元素个数
'''
retList = []
lenLk = len( Lk )
for i in range( lenLk ):
for j in range( i + 1, lenLk ):
L1 = list( Lk[ i ] )[ : k - 2 ];
L2 = list( Lk[ j ] )[ : k - 2 ];
L1.sort();L2.sort()
if L1 == L2:
retList.append( Lk[ i ] | Lk[ j ] )
return retList
def apriori( dataSet, minSupport = 0.5 ):
C1 = createC1( dataSet ) # 构建初始候选项集C1
#D = map( set, dataSet ) # 将dataSet集合化,以满足scanD的格式要求
#D=[var for var in map(set,dataSet)]
D=[set(var) for var in dataSet]
L1, suppData = scanD( D, C1, minSupport ) # 构建初始的频繁项集,即所有项集只有一个元素
L = [ L1 ] # 最初的L1中的每个项集含有一个元素,新生成的
k = 2 # 项集应该含有2个元素,所以 k=2
while ( len( L[ k - 2 ] ) > 0 ):
Ck = aprioriGen( L[ k - 2 ], k )
Lk, supK = scanD( D, Ck, minSupport )
suppData.update( supK ) # 将新的项集的支持度数据加入原来的总支持度字典中
L.append( Lk ) # 将符合最小支持度要求的项集加入L
k += 1 # 新生成的项集中的元素个数应不断增加
return L, suppData # 返回所有满足条件的频繁项集的列表,和所有候选项集的支持度信息
def calcConf( freqSet, H, supportData, brl, minConf=0.7 ): # 规则生成与评价
'''''
计算规则的可信度,返回满足最小可信度的规则。
freqSet(frozenset):频繁项集
H(frozenset):频繁项集中所有的元素
supportData(dic):频繁项集中所有元素的支持度
brl(tuple):满足可信度条件的关联规则
minConf(float):最小可信度
'''
prunedH = []
for conseq in H:
conf = supportData[ freqSet ] / supportData[ freqSet - conseq ]
if conf >= minConf:
print(freqSet - conseq, '-->', conseq, 'conf:', conf)
brl.append( ( freqSet - conseq, conseq, conf ) )
prunedH.append( conseq )
return prunedH
def rulesFromConseq( freqSet, H, supportData, brl, minConf=0.7 ):
'''''
对频繁项集中元素超过2的项集进行合并。
freqSet(frozenset):频繁项集
H(frozenset):频繁项集中的所有元素,即可以出现在规则右部的元素
supportData(dict):所有项集的支持度信息
brl(tuple):生成的规则
'''
m = len( H[ 0 ] )
if len( freqSet ) > m + 1: # 查看频繁项集是否足够大,以到于移除大小为 m的子集,否则继续生成m+1大小的频繁项集
Hmp1 = aprioriGen( H, m + 1 )
Hmp1 = calcConf( freqSet, Hmp1, supportData, brl, minConf ) #对于新生成的m+1大小的频繁项集,计算新生成的关联规则的右则的集合
if len( Hmp1 ) > 1: # 如果不止一条规则满足要求(新生成的关联规则的右则的集合的大小大于1),进一步递归合并,
#这样做的结果就是会有“[1|多]->多”(右边只会是“多”,因为合并的本质是频繁子项集变大,
#而calcConf函数的关联结果的右侧就是频繁子项集)的关联结果
rulesFromConseq( freqSet, Hmp1, supportData, brl, minConf )
def generateRules( L, supportData, minConf=0.7 ):
'''''
根据频繁项集和最小可信度生成规则。
L(list):存储频繁项集
supportData(dict):存储着所有项集(不仅仅是频繁项集)的支持度
minConf(float):最小可信度
'''
bigRuleList = []
for i in range( 1, len( L ) ):
for freqSet in L[ i ]: # 对于每一个频繁项集的集合freqSet
H1 = [ frozenset( [ item ] ) for item in freqSet ]
if i > 1:# 如果频繁项集中的元素个数大于2,需要进一步合并,这样做的结果就是会有“[1|多]->多”(右边只会是“多”,
#因为合并的本质是频繁子项集变大,而calcConf函数的关联结果的右侧就是频繁子项集),的关联结果
rulesFromConseq( freqSet, H1, supportData, bigRuleList, minConf )
else:
calcConf( freqSet, H1, supportData, bigRuleList, minConf )
return bigRuleList
def Apriori():
print("Hello World!")
if __name__=="__main__":
myDat,RA=getDataSet()
#print(myDat)
L, suppData = apriori( myDat, 0.2 ) # 选择频繁项集
print(u"频繁项集L:", L)
print(u"所有候选项集的支持度信息:", suppData)
rules = generateRules( L, suppData, minConf=0.5 )
|
[
"sc1000y@qq.com"
] |
sc1000y@qq.com
|
f8a59288deec752423b2c72090556006824d29a1
|
639b7ed3b8632bf5fa33329d2250587cfaad0c49
|
/bin/smartserver/smartserver.py
|
0b83a17d7267aa79a3fdf44bd4972fe3e3807565
|
[
"BSD-3-Clause"
] |
permissive
|
martiancrow/x12_engine
|
597597b5d880594ca4dd19e1d2bc8b265183bb26
|
b68d636880f07b65b2224eeebb2300d67a1f8c4b
|
refs/heads/master
| 2021-01-01T19:26:58.017351
| 2017-07-31T17:20:08
| 2017-07-31T17:20:08
| 98,589,260
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,776
|
py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
'''
Filename : hello.py
author : Crow
creat time: 2017-06-03
updata time: 2017-06-03
'''
import sys
import datetime
#sys.path.append('/Volumes/DATA/virtualboxHost/ubuntu16.04_64/x12_engine/bin')
sys.path.append('./bin')
import x12_python
def smartserverinit():
#print("this is smartserverinit")
#path = '/Volumes/DATA/virtualboxHost/ubuntu16.04_64/x12_engine/bin/smartserver'
path = './bin/smartserver'
drvid = x12_python.x12_coap_udp_listen('ALL', 9095)
print("drvid = " + str(drvid))
routelist = [
{
'uri':'/time',
'path':path,
'module':'smartserver',
'func':'time'
},
{
'uri':'/login/:uid/:pwd',
'path':path,
'module':'smartserver',
'func':'login'
}
]
x12_python.x12_coap_route_add(drvid, routelist)
def time(send, msg):
print("this is time action")
#print(send)
#print(msg)
#print("")
#print(msg["HEAD"].keys())
#print(msg["HEAD"].items())
#print("")
#print('username = ' + msg['GET']['id'])
#print('password = ' + msg['GET']['pwd'])
res = {'code': 'Content', 'ver': 1, 'msgid': msg["HEAD"]["msgid"], 'tkl': 0, 't': 'ACK', 'uri-port': 9095, 'payload': str(datetime.datetime.now())}
#print(res)
sendlen = x12_python.x12_coap_msg_send(send, res)
#print('sendlen = ' + str(sendlen))
def login(send, msg):
print("this is login action")
print('username = ' + msg['GET']['uid'])
print('password = ' + msg['GET']['pwd'])
res = {'code': 'Content', 'ver': 1, 'msgid': msg["HEAD"]["msgid"], 'tkl': 0, 't': 'ACK', 'uri-port': 9095, 'payload': 'OK'}
#print(res)
sendlen = x12_python.x12_coap_msg_send(send, res)
#print('sendlen = ' + str(sendlen))
#print("----------------------login-------------------------")
if __name__ == '__main__':
smartinit()
|
[
"xiaolongliu@xiaolonutekiMBP.lan"
] |
xiaolongliu@xiaolonutekiMBP.lan
|
2a8492f318cc40ad1d0a3b439477ef6a062477ff
|
de706b4cd4c02ec71486fbd4a262aacf337e7a8c
|
/lesson_2/hw02.py
|
080ce0d466165b748b5c8c7f9cbfb6e621884b36
|
[] |
no_license
|
DuDaria/Homework_backend
|
c896a30af4db9c0e118f230b5f01c351d68ff695
|
fa9e06e79c7b61896e53417e8b91fd69e87d6061
|
refs/heads/master
| 2023-03-19T17:39:56.364082
| 2021-03-11T21:53:39
| 2021-03-11T21:53:39
| 319,733,707
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,250
|
py
|
__author__ = 'Дю Д.Н.'
# Задача-1: Запросите у пользователя его возраст.
# Если ему есть 18 лет, выведите: "Доступ разрешен",
# иначе "Извините, пользоваться данным ресурсом можно только с 18 лет"
age = int(input("Привет, Сколько Вам лет: "))
if age >= 18:
print ("Доступ разрешен!")
else:
print("Извините, пользоваться данным ресурсом можно только с 18 лет")
print("")
# Задача-2: Напишите программу, которая спрашивает "Четные или нечетные?",
# в зависимости от ответа, используя цикл с предусловием while или for in
# вывести в одну строку через пробел соотвествующие числа от 0 до 20
# Пример работы:
# $ "Четные или нечетные?"
# четные
# 0 2 4 6 8 10 12 14 16 18 20
# $ "Четные или нечетные?"
# нечетные
# 1 3 5 7 9 11 13 15 17 19
# $ "Четные или нечетные?"
# qwerty (или любая другая строка)
# Я не понимаю, что вы от меня хотите...
#1_Решение с while
even_or_odd = input ("Четные или нечетные? ")
a = 0
if even_or_odd == "четные":
while a < 21:
if a % 2 == 0:
print(a, end = " ")
a +=1
elif even_or_odd == "нечетные":
while a < 20:
if a % 2 == 1:
print(a, end = " ")
a +=1
else:
print("Я не понимаю, что вы от меня хотите...")
print("\n")
#2_Решение с for in
even_or_odd = input ("Четные или нечетные? ")
if even_or_odd == "четные":
for i in range(21):
if i%2 == 0:
print(i , end = " ")
elif even_or_odd == "нечетные":
for i in range(21):
if i%2 != 0:
print(i , end = " ")
else:
print("Я не понимаю, что вы от меня хотите...")
print("\n")
# Задача-3: Дано произвольное целое число, вывести самую большую цифру этого числа.
# Например, дается x = 58375.
# Нужно вывести максимальную цифру в данном числе, т.е. 8.
# Подразумевается, что мы не знаем это число заранее.
# Число приходит в виде целого беззнакового.
# Подсказки:
# * постарайтесь решить задачу с применением арифметики и цикла while;
# * при желании и понимании решите задачу с применением цикла for.
# арифметика и цикл while
x = 58375
m = x%10
x = x//10
while x > 0:
if x%10 > m:
m = x%10
x = x//10
print("max number = ", m)
print("")
#с применением цикла for
num = 58375
lst = list(str(num))
maximum = lst[0]
for number in lst:
if int(number) > int(maximum):
maximum = int(number)
print("max number = ", maximum)
|
[
"60156592+DuDaria@users.noreply.github.com"
] |
60156592+DuDaria@users.noreply.github.com
|
25475c8cd28fea1964462e2f3074d5c639fc00c0
|
9eb5c0e840d91b0752eadc3ca212886bd983619f
|
/andrewvoss.org/downloads/RoboPiLib.py
|
6e88efca825e081fd09eff77a63903724974844e
|
[
"MIT",
"LicenseRef-scancode-other-permissive"
] |
permissive
|
avoss19/Server
|
f5437a76a3ddbd029cd89ca512e97828f4458ebb
|
3e49ff2be6000d4488259d048f685aa24bb9b2bb
|
refs/heads/master
| 2021-05-06T20:39:21.205688
| 2018-04-09T03:54:12
| 2018-04-09T03:54:12
| 112,486,369
| 0
| 0
|
MIT
| 2017-12-11T04:28:08
| 2017-11-29T14:35:37
|
CSS
|
UTF-8
|
Python
| false
| false
| 8,406
|
py
|
#*****************************************************************************
#
# RoboPyLib_v0_98.py
#
# Copyright 2016 William Henning
#
# http://Mikronauts.com
#
# April 27, 2016: Initial Release
# May 20, 2016: First "Real" Release
# June 21, 2016: updated pulseGen
# March 3, 2017: added pwmWrite(pin,pulse,period)
#
# Pure Python support library for RoboPi firmware, for use with RoboPi
#
# This version does not rely on the C RoboPiLib.o library and swc, and as such
# is multi-platform and should work on any SBC
#
# Currently the resetRoboPi(pin) call is an ugly hack dependant on pigpiod
# it will be replaced by generic sysfs gpio handling for multi-platform use
#
# This is a work in progress, and subject to change. I used non-blocking I/O as
# I intend to add timeouts at a future date.
#
# The code is NOT publication quality, not enough comments, later versions will
# be nicer :)
#
#******************************************************************************
import serial # import the pyser library
#import pigpio # import the pigpio library
import time # import the time library
# control characters used to indicate start & end of packet
SOH = '\x01'
EOT = '\x04'
# packet identifiers
GETINFO = 0 # returns product string
READMODE = 11 # write pin mode
WRITEMODE = 12 # write pin mode
DIGREAD = 13 # digital read
DIGWRITE = 14 # digital write
ANREAD = 15 # analog read 10 bit result
ANREADRAW = 16 # analog read at raw resolution
ANWRITE = 17 # analog write 0..255 used for PWM
SERVOWRITE = 18 # write a servo 0..20000us, normally 500-2500us
SERVOREAD = 19 # read last value written to servo
READDIST = 20 # read distance sensor
PULSEGEN = 21 # start high frequency pulse train generator
PULSESTOP = 22 # stop pulse train generator
PWMWRITE = 23 # arbitrary PWM generator
# pin types
INPUT = 0
OUTPUT = 1
PWM = 16
SERVO = 32
# API
API_REVISION = 0.97
# Global variables
myaddr = 1 # default RoboPi address
ser = -1
#**********************************************************************************
#
# Low level packet handling routines
#
# buff = bytearray(100) # creates 0 filled 100 long array of bytes
# buff = bytearray([32] * 100) # creates 100 space (val 32) array
#
#**********************************************************************************
def putPacket(cmd, buffr, plen):
global myaddr
chk = int(myaddr)+int(cmd)+int(plen)
for i in range(0,plen):
chk += int(buffr[i])
packet = bytearray([SOH, myaddr, cmd, plen]) + buffr + bytearray([chk&255, EOT])
ser.write(packet)
def getPacket():
count=0
while (ser.read(1) != SOH):
count = count+1
# print "Received garbage chars", count
header = bytearray(ser.read(3))
addr = header[0]
cmd = header[1]
plen = header[2]
# print "Header ", addr, cmd, plen
checksum = addr + cmd + plen
# print "b4"
buf2 = bytearray(ser.read(plen))
# print "buf2 length is",len(buf2)
for i in range(0,plen):
# print "byte",i,"=",buf2[i]
checksum += buf2[i]
chk = bytearray(ser.read(1))[0]
if (checksum&255) != chk:
print "Checksum Error!"
# print "Waiting for EOT"
while (ser.read(1) != EOT):
count = count+1
# print "Dropped ", count
# 0 1 2 3 4
return [addr, cmd, plen, buf2, chk]
#**********************************************************************************
#
# Rest RoboPi - ugly hack version, next release will use sysfs to make it generic
#
# Does not seem to work when called right before / right after RoboPiInit
# suspect unusual interaction between python, pigpio, serial
# works fine stand-alone in resetRoboPi.py script
#
#**********************************************************************************
def RoboPiReset(pin): # pin is 17 on a Pi
# pi3 = pigpio.pi()
time.sleep(0.1)
# pi3.set_mode(pin, pigpio.OUTPUT)
# pi3.write(pin,0)
# time.sleep(0.01)
# pi3.write(pin,1)
# time.sleep(0.5)
# pi3.stop()
#**********************************************************************************
#
# RoboPi API - please see RoboPi User Manual for usage
#
#**********************************************************************************
def RoboPiExit():
global ser
if ser != -1:
ser.close()
#**********************************************************************************
def RoboPiInit(device, bps):
global ser
if device == '':
device = '/dev/ttyAMA0'
ser = serial.Serial(device,bps)
if ser == -1:
print "Error - Unable to open ", device
exit(1)
return ser
#**********************************************************************************
def Address(n):
global myaddr
myaddr = n & 255
#**********************************************************************************
def getProductID(): # max 255 bytes plus 0, TESTED OK
putPacket(GETINFO, bytearray(1), 1)
buff = getPacket()
return buff[3]
#**********************************************************************************
def getAPIRev():
return API_REVISION
#**********************************************************************************
def pinMode(pin, mode):
putPacket(WRITEMODE, bytearray([pin, mode]), 2);
getPacket()
#**********************************************************************************
def readMode(pin):
putPacket(READMODE, bytearray([pin]), 1)
buff = getPacket()
return buff[3][1]
#**********************************************************************************
def digitalWrite(pin, val):
putPacket(DIGWRITE, bytearray([pin, val]), 2)
getPacket()
#**********************************************************************************
def digitalRead(pin):
putPacket(DIGREAD, bytearray([pin]), 1)
buff = getPacket()
return buff[3][1]
#**********************************************************************************
def analogRead(pin):
putPacket(ANREAD, bytearray([pin]), 1)
buff = getPacket()
return int(buff[3][1]) | (int(buff[3][2]) << 8)
#**********************************************************************************
def analogReadRaw(pin):
putPacket(ANREADRAW, bytearray([pin]), 1)
buff = getPacket()
return int(buff[3][1]) | (int(buff[3][2]) << 8)
#**********************************************************************************
def analogWrite(pin, val):
putPacket(ANWRITE, bytearray([pin, val]), 2)
getPacket()
#**********************************************************************************
def servoWrite(pin, val):
putPacket(SERVOWRITE, bytearray([pin, val & 255, val >> 8]), 3)
getPacket()
#**********************************************************************************
def pwmWrite(pin, pulse, period):
if pulse < 0:
pulse = 0
if pulse >= period:
pulse = 0
digitalWrite(pin,1)
puls = pulse/5
perio = period/5
putPacket(PWMWRITE, bytearray([pin, puls & 255, puls >> 8, perio & 255, perio >> 8]), 5)
print getPacket()
#**********************************************************************************
def servoRead(pin):
putPacket(SERVOREAD, bytearray([pin]), 1)
buff = getPacket()
return int(buff[3][1]) | (int(buff[3][2]) << 8)
#**********************************************************************************
def readDistance(pin):
putPacket(READDIST, bytearray([pin]), 1)
buff = getPacket()
return int(buff[3][1]) | (int(buff[3][2]) << 8)
#**********************************************************************************
def w2ba(x):
return bytearray([x & 255, (x >> 8) & 255])
def wl2ba(lst):
cl = w2ba(lst[0])
for ix in range(1,len(lst)):
cl = cl + w2ba(lst[ix])
return cl
def pba(a):
for ix in range(0,len(a)):
print ix,a[ix]
#**********************************************************************************
def pulseGen(pin, dbg, stp, low_period, pls, pulse_list):
putPacket(PULSEGEN, bytearray([pin,dbg,stp,pls])+w2ba(low_period)+wl2ba(pulse_list),pls+pls+6)
buff = getPacket()
return buff[3][0]
#**********************************************************************************
def pulseList(pin, low_period, pulse_list):
return pulseGen(pin, 33, 33, low_period, len(pulse_list), pulse_list)
#**********************************************************************************
def pulseStop():
putPacket(PULSESTOP,bytearray([0]),1)
buff = getPacket()
return buff[3][0]
|
[
"voss1167@gmail.com"
] |
voss1167@gmail.com
|
dc0b3e92359841527eb725eab414c555165243b7
|
acecbde132f01594ba5917073f4e5c2e9c14baef
|
/basedata.py
|
5ed4918d12c683092ecf87eab65d57ef50c5adc9
|
[] |
no_license
|
Fandref/encyclopedia_api
|
f4f1dd1eebe92c3947183059166099cc896128f2
|
f848ec4ab616e6ef85bed73da14a461bdcb873da
|
refs/heads/master
| 2021-06-13T04:40:16.358545
| 2020-04-09T16:32:05
| 2020-04-09T16:32:05
| 254,424,621
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,885
|
py
|
import psycopg2
# from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
conn = psycopg2.connect(dbname = 'encyclopedia', user='postgres',
password='', host='127.0.0.1')
cursor = conn.cursor()
# cursor.execute('CREATE DATABASE encyclopedia')
cursor.execute('CREATE table IF NOT EXISTS world_areas (id serial PRIMARY KEY, name varchar(40) UNIQUE NOT NULL, description text)')
cursor.execute('CREATE table IF NOT EXISTS countries (id serial PRIMARY KEY, name varchar(40) UNIQUE NOT NULL, world_area_id integer NOT NULL, description text, FOREIGN KEY (world_area_id) REFERENCES world_areas (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS domains (id serial PRIMARY KEY, name varchar(40) UNIQUE NOT NULL, description text NOT NULL)')
cursor.execute('CREATE table IF NOT EXISTS kinds (id serial PRIMARY KEY, name varchar(40) UNIQUE NOT NULL, domain_id integer NOT NULL, description text, FOREIGN KEY(domain_id) REFERENCES domains (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS types (id serial PRIMARY KEY, name varchar(40) UNIQUE NOT NULL, kind_id integer NOT NULL, description text, FOREIGN KEY(kind_id) REFERENCES kinds (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS sub_types (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS types_sub_types (type_id integer NOT NULL, sub_type_id integer NOT NULL, FOREIGN KEY (type_id) REFERENCES types (id) ON DELETE CASCADE, FOREIGN KEY (sub_type_id) REFERENCES sub_types (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS classes (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS sub_types_classes (sub_type_id integer NOT NULL, class_id integer NOT NULL, FOREIGN KEY (sub_type_id) REFERENCES sub_types (id) ON DELETE CASCADE, FOREIGN KEY (class_id) REFERENCES classes (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS sub_classes (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS classes_sub_classes (class_id integer NOT NULL, sub_class_id integer NOT NULL, FOREIGN KEY (class_id) REFERENCES classes (id) ON DELETE CASCADE, FOREIGN KEY (sub_class_id) REFERENCES sub_classes (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS ordos (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS sub_classes_ordos (sub_class_id integer, ordo_id integer, FOREIGN KEY (sub_class_id) REFERENCES sub_classes (id) ON DELETE CASCADE, FOREIGN KEY (ordo_id) REFERENCES ordos (id));')
cursor.execute('CREATE table IF NOT EXISTS familias (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS ordos_familias (ordo_id integer, familia_id integer, FOREIGN KEY (ordo_id) REFERENCES ordos (id) ON DELETE CASCADE, FOREIGN KEY (familia_id) REFERENCES familias (id))')
cursor.execute('CREATE table IF NOT EXISTS genuses (id serial PRIMARY KEY, name varchar(40) UNIQUE , description text);')
cursor.execute('CREATE table IF NOT EXISTS familias_genuses (familia_id integer, genus_id integer, FOREIGN KEY (familia_id) REFERENCES familias (id) ON DELETE CASCADE, FOREIGN KEY (genus_id) REFERENCES genuses (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS specieses (id serial PRIMARY KEY, name varchar(40) UNIQUE , image varchar(255) NOT NULL, description text);')
cursor.execute('CREATE table IF NOT EXISTS genuses_specieses (genus_id integer, species_id integer, FOREIGN KEY (genus_id) REFERENCES genuses (id) ON DELETE CASCADE, FOREIGN KEY (species_id) REFERENCES specieses (id) ON DELETE CASCADE);')
cursor.execute('CREATE table IF NOT EXISTS countries_specieses (country_id integer, species_id integer, FOREIGN KEY (country_id) REFERENCES countries (id) ON DELETE CASCADE, FOREIGN KEY (species_id) REFERENCES specieses (id) ON DELETE CASCADE);')
#FUNCTION
# cursor.execute("""
# CREATE FUNCTION world_area_delete(int id) RETURNS integer
# AS 'select $1 + $2;'
# LANGUAGE SQL
# RETURNS NULL ON NULL INPUT;
# """)
# TRIGGERS
# cursor.execute(CREATE TRIGGER log_update
# AFTER UPDATE ON accounts
# FOR EACH ROW
# WHEN (OLD.* IS DISTINCT FROM NEW.*)
# EXECUTE PROCEDURE log_account_update();
# )
def check():
count = 0
s = ""
s += "SELECT"
s += " table_schema"
s += ", table_name"
s += " FROM information_schema.tables"
s += " WHERE"
s += " ("
s += " table_schema = 'public'"
s += " )"
s += " ORDER BY table_schema, table_name;"
cursor.execute(s)
list_tables = cursor.fetchall()
for t_name_table in list_tables:
print(t_name_table)
count += 1
print(count)
check()
conn.commit()
cursor.close()
conn.close()
|
[
"fazanwin@gmail.com"
] |
fazanwin@gmail.com
|
78a5a63f542ae9feddb4fac15b4b7755a0acc35a
|
4e28bca1ee83f734698d1fe39a0bc976b090f3d6
|
/Pyiiko/__init__.py
|
86fe9e233fccd5ef20261a5ee82437045ed27c56
|
[
"Apache-2.0"
] |
permissive
|
rnd137/pyiiko
|
e3a4ad6b8d60781abf977b291542d3bf11adec8a
|
a702858c2ee597cbcc32317f45c2a79ec6c19035
|
refs/heads/master
| 2020-04-17T04:13:59.702107
| 2018-12-24T18:28:15
| 2018-12-24T18:28:15
| 166,220,810
| 1
| 0
|
Apache-2.0
| 2019-01-17T12:17:58
| 2019-01-17T12:17:58
| null |
UTF-8
|
Python
| false
| false
| 132
|
py
|
__version__ = '0.2.1'
from Pyiiko.server import *
from Pyiiko.biz import *
from Pyiiko.card5 import *
from Pyiiko.frontWeb import *
|
[
"gadzhi@yandex.ru"
] |
gadzhi@yandex.ru
|
4c0874ae8e3969c644c69f902e55c0eb10c0ee8a
|
51a48244d08b63501130b8c7091aa66587ac1413
|
/parse_data.py
|
60e1be0c7fbc2e9d94497672ed148eadcaa6e06e
|
[] |
no_license
|
amlight/events_visualizer
|
cfd0233ebda2e544b2c74707899a78c0e8b021e3
|
9fcbbac185939dbd3bb2f97f9d20fca9ac08495a
|
refs/heads/main
| 2023-04-18T14:47:41.338862
| 2021-04-23T22:15:32
| 2021-04-23T22:15:32
| 357,566,303
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,455
|
py
|
import pandas as pd
# parse_data takes the data.csv and dictionary.txt files and creates a dataframe displaying the counts
# of flap events for each link.
# custom split function for get_dictionary
def split(line, sep, pos):
line = line.split(sep)
return sep.join(line[:pos]), sep.join(line[pos:])
def get_dictionary():
# This function opens dictionary.txt and creates a dictionary of tuples consisting of the format
# ['Device,Port': 'Label'] to label those links in the dataframe
with open("dictionary.txt", "r") as f:
content = f.read().splitlines()
return dict([tuple(map(str, split(sub, ',', 2))) for sub in content])
def process_data():
# This function returns a dataframe from the events that are given in data.csv
# and creates a count of events based on the given timestamps.
# It also replaces the device names with the labels given in the dictionary
# pandas options to display the dataframe in console, use print(df) to see the dataframe for testing
pd.set_option('display.max_rows', None)
pd.set_option('display.max_columns', None)
pd.set_option('display.width', None)
pd.set_option('display.max_colwidth', -1)
df = pd.read_csv("data.csv", header=None, names=('Device', 'Port', 'Date', 'Time', 'Event'),
parse_dates={'Timestamp': ['Date', 'Time']})
# merge device and port values together to later rename them according to dictionary.txt labels
df.Device = df.Device.astype(str) + ',' + df.Port.astype(str)
df.drop(['Port'], axis=1, inplace=True)
# look for duplicates of (Timestamp, Device) values and put that summation in a new 'Count' column
df = df.pivot_table(index=['Timestamp', 'Device'], aggfunc='size').to_frame('Count').reset_index()
# this turns the 'Device' column values into column headers (and timestamps into the new index)
df = df.pivot(index='Timestamp', columns='Device', values='Count')
df.fillna(0, inplace=True)
# group counts based on day
df = df.resample('D').sum()
res = get_dictionary()
# traverse through columns in dataframe and label them
for column in df:
if column in res:
df.rename(columns={column: res[column]}, inplace=True)
else:
df.drop(column, axis=1, inplace=True)
# removes the column header name when the 'Timestamp' column was assigned as index of the dataframe
del df.index.name
return df
|
[
"33048372+mjt98@users.noreply.github.com"
] |
33048372+mjt98@users.noreply.github.com
|
b5267886e52d4f26f06f7b57765cf73595a070da
|
2254e3c5e17955d654994d388a5babe20eab2516
|
/api/loadenv.py
|
ac4226a590f228155b4bdb7de3c96b9ab7f41e43
|
[] |
no_license
|
webclinic017/bauskslashtrader-monorepo
|
5069b001e214198e62c99c588db91f7707753de4
|
0a42d0eed4fb90bbf3178caac3ff00f16d0f0d5b
|
refs/heads/master
| 2023-07-12T18:25:21.505747
| 2020-07-25T00:49:24
| 2020-07-25T00:49:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 238
|
py
|
from dotenv import load_dotenv
from pathlib import Path
import os
# Set home directory
project_dir = Path(__file__).parent
os.chdir(project_dir)
# Load development vars
env_path = Path('.') / '.env'
load_dotenv(dotenv_path=env_path)
|
[
"bauskas@gmail.com"
] |
bauskas@gmail.com
|
3c02dfa0a6825a233462d9bf9303390430c747a0
|
f529b5f3de211ecde43d094e85a062acfa612840
|
/framework/__init__.py
|
0e34f418c834cb5d3f20facf60ae96e1b5b625be
|
[] |
no_license
|
ashwins-code/neural-network-library
|
251453444d9391b2a285db01a13a186464ca87bb
|
85ca9a6399fa281dd091907835efb2a253f67f01
|
refs/heads/master
| 2023-01-03T00:22:26.309362
| 2020-10-31T19:08:24
| 2020-10-31T19:08:24
| 308,945,939
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21
|
py
|
from .tensor import *
|
[
"ashwinbose6@gmail.com"
] |
ashwinbose6@gmail.com
|
94b849b44b714027b0e55f0fb64ab69f8c31279a
|
11730493cfd1aa88610894809a0b71128bae2c0f
|
/src/python_utilities/src/python_utilities/light_status.py
|
0566e61b597c7005fab32b56c71b6ceacc619dc8
|
[] |
no_license
|
Pencilvesterr/ros_robot_controller
|
dc0edb6a5a5f500599663cc72e6f897ad070fd1c
|
bd2a817452300f9cb3105f0a62415326fde3e7f2
|
refs/heads/master
| 2023-08-31T08:45:52.896557
| 2021-10-08T03:25:35
| 2021-10-08T03:25:35
| 407,418,826
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 93
|
py
|
from enum import Enum
class LightStatus(Enum):
unselected = 0
red = 1
yellow = 2
|
[
"m8crouch@gmail.com"
] |
m8crouch@gmail.com
|
a2fc6b67823b389fa5e113148a4c74bb481da9c5
|
142d2fc8080203174567a2ecb270f9ed2064e006
|
/08_list_manipulation/list_manipulation.py
|
1a485a1adddbc5d162359a2d7b8c17a3d616e111
|
[] |
no_license
|
bricruz/pythonpractice
|
e917b687063c2c5848e5d0c2e306578265a443b7
|
c05a85956ac7db7cf3e5d8838ae3769b50ff4621
|
refs/heads/main
| 2023-09-02T22:51:01.816662
| 2021-11-18T03:36:24
| 2021-11-18T03:36:24
| 428,284,583
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,621
|
py
|
def list_manipulation(lst, command, location, value=None):
"""Mutate lst to add/remove from beginning or end.
- lst: list of values
- command: command, either "remove" or "add"
- location: location to remove/add, either "beginning" or "end"
- value: when adding, value to add
remove: remove item at beginning or end, and return item removed
>>> lst = [1, 2, 3]
>>> list_manipulation(lst, 'remove', 'end')
3
>>> list_manipulation(lst, 'remove', 'beginning')
1
>>> lst
[2]
add: add item at beginning/end, and return list
>>> lst = [1, 2, 3]
>>> list_manipulation(lst, 'add', 'beginning', 20)
[20, 1, 2, 3]
>>> list_manipulation(lst, 'add', 'end', 30)
[20, 1, 2, 3, 30]
>>> lst
[20, 1, 2, 3, 30]
Invalid commands or locations should return None:
>>> list_manipulation(lst, 'foo', 'end') is None
True
>>> list_manipulation(lst, 'add', 'dunno') is None
True
"""
if command == 'remove':
if location == 'beginning':
lst.pop(0)
elif location == 'end':
lst.pop(-1)
else:
return None
elif command == 'add':
if value != None:
if location == 'beginning':
lst.insert(0,value)
elif location == 'end':
lst.append(value)
else:
return None
else:
return None
else:
return None
return lst
print(list_manipulation([1,2,3,4],'remove','beginning','Hi'))
|
[
"cruz.b.james@gmail.com"
] |
cruz.b.james@gmail.com
|
103f7be254649415a24450fe958b3718c8e1c050
|
95e3fb7930365261e0fe4055dacac47b5330b4aa
|
/src/predectorutils/analyses/tmbed.py
|
b71ee4f67e427c39231ddd2486990ca5b52e9fd7
|
[
"Apache-2.0"
] |
permissive
|
ccdmb/predector-utils
|
66af68fd07fa17556cbb9149d0be6b7eabf247cd
|
b394f101019151f07bf56242158c9e1345172c6c
|
refs/heads/master
| 2023-06-23T13:13:12.854325
| 2023-06-14T12:28:13
| 2023-06-14T12:28:13
| 237,380,937
| 0
| 0
| null | 2020-01-31T07:29:09
| 2020-01-31T07:27:52
|
Python
|
UTF-8
|
Python
| false
| false
| 4,821
|
py
|
#!/usr/bin/env python3
from typing import TypeVar
from typing import TextIO
from collections.abc import Iterator, Sequence
from ..gff import (
GFFRecord,
Strand
)
from ..parsers import (
FieldParseError,
BlockParseError,
parse_field,
raise_it,
parse_str,
parse_sequence,
)
from .base import Analysis, GFFAble
__all__ = ["TMBed"]
tm_name = raise_it(parse_field(parse_str, "name"))
tm_topology = raise_it(parse_field(
parse_sequence(["S", "B", "b", "H", "h", "."]),
"topology"
))
T = TypeVar("T")
def parse_topology(s: str):
if len(s) == 0:
return s
current_type = s[0]
start = 0
i = 1
while i < len(s):
if s[i] != current_type:
yield (current_type, start, i)
current_type = s[i]
start = i
i += 1
yield (current_type, start, i)
return
class TMBed(Analysis, GFFAble):
""" .
"""
columns = ["name", "has_sp", "has_tm", "topology"]
types = [str, bool, bool, str]
analysis = "tmbed"
software = "TMBed"
def __init__(
self,
name: str,
has_sp: bool,
has_tm: bool,
topology: str,
) -> None:
self.name = name
self.has_sp = has_sp
self.has_tm = has_tm
self.topology = topology
return
@classmethod
def from_block(cls, lines: Sequence[str]) -> "TMBed":
""" Parse a tmbed line as an object. """
ilines = list(lines)
try:
name = tm_name(ilines[0])
except FieldParseError as e:
raise e.as_block_error(0)
name = name.lstrip(">")
try:
top = tm_topology(ilines[2].strip())
except FieldParseError as e:
raise e.as_block_error(2)
# Slice is because model doesn't seem to have proper statemodel
# so i think SPs could _potentially_ happen in middle of protein.
has_sp = top.lower()[:30].count("s") > 10
longest_run = {"B": 0, "b": 0, "H": 0, "h": 0, ".": 0, "S": 0}
prev = top[:1] # Slice prevents error if empty
current_run = 1
for this in top[1:]:
if this == prev:
current_run += 1
elif (this != prev) and (current_run > longest_run[prev]):
longest_run[prev] = current_run
prev = this
current_run = 1
else:
prev = this
current_run = 1
if current_run > longest_run[prev]:
longest_run[prev] = current_run
del longest_run["."]
del longest_run["S"]
has_tm = any([v > 10 for v in longest_run.values()])
return cls(name, has_sp, has_tm, top)
@classmethod
def from_file(cls, handle: TextIO) -> Iterator["TMBed"]:
block: list[str] = []
# Avoid case where handle is empty and we raise BlockParseError
i = 0
for i, line in enumerate(handle):
sline = line.strip()
if sline.startswith("#"):
continue
elif sline == "":
continue
elif sline.startswith(">") and len(block) > 0:
try:
yield cls.from_block(block)
except BlockParseError as e:
raise (
e.as_parse_error(line=i - len(block))
.add_filename_from_handle(handle)
)
block = [sline]
else:
block.append(sline)
if len(block) > 0:
try:
yield cls.from_block(block)
except BlockParseError as e:
raise (
e.as_parse_error(line=i - len(block))
.add_filename_from_handle(handle)
)
return
def as_gff(
self,
software_version: str | None = None,
database_version: str | None = None,
keep_all: bool = False,
id_index: int = 1
) -> Iterator[GFFRecord]:
for (type_, start, end) in parse_topology(self.topology):
mapp = {
"B": "transmembrane_polypeptide_region",
"b": "transmembrane_polypeptide_region",
"H": "transmembrane_polypeptide_region",
"h": "transmembrane_polypeptide_region",
"S": "signal_peptide",
}
if type_ == ".":
continue
yield GFFRecord(
seqid=self.name,
source=self.gen_source(software_version, database_version),
type=mapp[type_],
start=start,
end=end,
strand=Strand.UNSTRANDED,
attributes=None
)
return
|
[
"darcy.ab.jones@gmail.com"
] |
darcy.ab.jones@gmail.com
|
8129e2a0cd766c799cb6243ba4faa2d109475333
|
c9eda3c0342d8cf51ecddcceba555f3556af5ee5
|
/slicer_gui.py
|
ec838d9798b5ca7c6fd246cdceb9ff84472986b4
|
[] |
no_license
|
nerginer/3dlp-host-software
|
4b5c9902fa16059127ad6cef8db7ad84c75cf8f0
|
32c957989baef9a2a92d31b73891b0eeee37cfb2
|
refs/heads/master
| 2021-01-10T18:33:36.498509
| 2013-07-06T00:20:09
| 2013-07-06T00:20:09
| 40,202,402
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 34,240
|
py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '3dlp_slicer.ui'
#
# Created: Sat Jun 01 20:47:44 2013
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1324, 768)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(217, 217, 217))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(217, 217, 217))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(217, 217, 217))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(217, 217, 217))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(217, 217, 217))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(120, 120, 120))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(90, 90, 90))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(180, 180, 180))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MainWindow.setPalette(palette)
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "3DLP Slicer", None, QtGui.QApplication.UnicodeUTF8))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/transform.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.frame)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = QtGui.QGroupBox(self.frame)
self.groupBox.setMinimumSize(QtCore.QSize(0, 200))
self.groupBox.setMaximumSize(QtCore.QSize(254, 16777215))
self.groupBox.setTitle(QtGui.QApplication.translate("MainWindow", "Model Information", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout.addWidget(self.groupBox)
self.groupBox_2 = QtGui.QGroupBox(self.frame)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 100))
self.groupBox_2.setMaximumSize(QtCore.QSize(254, 16777215))
self.groupBox_2.setTitle(QtGui.QApplication.translate("MainWindow", "Model List", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.horizontalLayout_7 = QtGui.QHBoxLayout(self.groupBox_2)
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.verticalLayout_4 = QtGui.QVBoxLayout()
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.addModel = QtGui.QPushButton(self.groupBox_2)
self.addModel.setText(QtGui.QApplication.translate("MainWindow", "Add", None, QtGui.QApplication.UnicodeUTF8))
self.addModel.setObjectName(_fromUtf8("addModel"))
self.horizontalLayout_6.addWidget(self.addModel)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.removeModel = QtGui.QPushButton(self.groupBox_2)
self.removeModel.setText(QtGui.QApplication.translate("MainWindow", "Remove", None, QtGui.QApplication.UnicodeUTF8))
self.removeModel.setObjectName(_fromUtf8("removeModel"))
self.horizontalLayout_6.addWidget(self.removeModel)
self.verticalLayout_4.addLayout(self.horizontalLayout_6)
self.modelList = QtGui.QListWidget(self.groupBox_2)
self.modelList.setFrameShadow(QtGui.QFrame.Plain)
self.modelList.setAlternatingRowColors(False)
self.modelList.setSpacing(1)
self.modelList.setModelColumn(0)
self.modelList.setObjectName(_fromUtf8("modelList"))
self.verticalLayout_4.addWidget(self.modelList)
self.horizontalLayout_7.addLayout(self.verticalLayout_4)
self.verticalLayout.addWidget(self.groupBox_2)
self.Transform_groupbox = QtGui.QGroupBox(self.frame)
self.Transform_groupbox.setEnabled(False)
self.Transform_groupbox.setMaximumSize(QtCore.QSize(254, 16777215))
self.Transform_groupbox.setTitle(QtGui.QApplication.translate("MainWindow", "Transform Model", None, QtGui.QApplication.UnicodeUTF8))
self.Transform_groupbox.setObjectName(_fromUtf8("Transform_groupbox"))
self.horizontalLayout_5 = QtGui.QHBoxLayout(self.Transform_groupbox)
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_12 = QtGui.QLabel(self.Transform_groupbox)
self.label_12.setText(QtGui.QApplication.translate("MainWindow", "Position:", None, QtGui.QApplication.UnicodeUTF8))
self.label_12.setObjectName(_fromUtf8("label_12"))
self.verticalLayout_3.addWidget(self.label_12)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.label_9 = QtGui.QLabel(self.Transform_groupbox)
self.label_9.setText(QtGui.QApplication.translate("MainWindow", "X:", None, QtGui.QApplication.UnicodeUTF8))
self.label_9.setObjectName(_fromUtf8("label_9"))
self.horizontalLayout_4.addWidget(self.label_9)
self.positionX = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.positionX.setMinimumSize(QtCore.QSize(55, 0))
self.positionX.setMinimum(-99.99)
self.positionX.setObjectName(_fromUtf8("positionX"))
self.horizontalLayout_4.addWidget(self.positionX)
self.label_10 = QtGui.QLabel(self.Transform_groupbox)
self.label_10.setText(QtGui.QApplication.translate("MainWindow", "Y:", None, QtGui.QApplication.UnicodeUTF8))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.horizontalLayout_4.addWidget(self.label_10)
self.positionY = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.positionY.setMinimumSize(QtCore.QSize(55, 0))
self.positionY.setMinimum(-99.99)
self.positionY.setObjectName(_fromUtf8("positionY"))
self.horizontalLayout_4.addWidget(self.positionY)
self.label_11 = QtGui.QLabel(self.Transform_groupbox)
self.label_11.setText(QtGui.QApplication.translate("MainWindow", "Z:", None, QtGui.QApplication.UnicodeUTF8))
self.label_11.setObjectName(_fromUtf8("label_11"))
self.horizontalLayout_4.addWidget(self.label_11)
self.positionZ = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.positionZ.setMinimumSize(QtCore.QSize(55, 0))
self.positionZ.setMinimum(-99.99)
self.positionZ.setObjectName(_fromUtf8("positionZ"))
self.horizontalLayout_4.addWidget(self.positionZ)
self.verticalLayout_3.addLayout(self.horizontalLayout_4)
self.label_4 = QtGui.QLabel(self.Transform_groupbox)
self.label_4.setText(QtGui.QApplication.translate("MainWindow", "Rotation:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.verticalLayout_3.addWidget(self.label_4)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(self.Transform_groupbox)
self.label.setText(QtGui.QApplication.translate("MainWindow", "X:", None, QtGui.QApplication.UnicodeUTF8))
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.rotationX = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.rotationX.setMinimumSize(QtCore.QSize(55, 0))
self.rotationX.setDecimals(0)
self.rotationX.setMinimum(-360.0)
self.rotationX.setMaximum(360.0)
self.rotationX.setObjectName(_fromUtf8("rotationX"))
self.horizontalLayout_2.addWidget(self.rotationX)
self.label_2 = QtGui.QLabel(self.Transform_groupbox)
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Y:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_2.addWidget(self.label_2)
self.rotationY = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.rotationY.setMinimumSize(QtCore.QSize(55, 0))
self.rotationY.setDecimals(0)
self.rotationY.setMinimum(-360.0)
self.rotationY.setMaximum(360.0)
self.rotationY.setObjectName(_fromUtf8("rotationY"))
self.horizontalLayout_2.addWidget(self.rotationY)
self.label_3 = QtGui.QLabel(self.Transform_groupbox)
self.label_3.setText(QtGui.QApplication.translate("MainWindow", "Z:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_2.addWidget(self.label_3)
self.rotationZ = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.rotationZ.setMinimumSize(QtCore.QSize(55, 0))
self.rotationZ.setDecimals(0)
self.rotationZ.setMinimum(-360.0)
self.rotationZ.setMaximum(360.0)
self.rotationZ.setObjectName(_fromUtf8("rotationZ"))
self.horizontalLayout_2.addWidget(self.rotationZ)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_8 = QtGui.QLabel(self.Transform_groupbox)
self.label_8.setText(QtGui.QApplication.translate("MainWindow", "Scaling Factor:", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setObjectName(_fromUtf8("label_8"))
self.horizontalLayout_3.addWidget(self.label_8)
self.scale = QtGui.QDoubleSpinBox(self.Transform_groupbox)
self.scale.setMinimumSize(QtCore.QSize(55, 0))
self.scale.setMinimum(-99.99)
self.scale.setObjectName(_fromUtf8("scale"))
self.horizontalLayout_3.addWidget(self.scale)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.verticalLayout_3.addLayout(self.horizontalLayout_3)
self.horizontalLayout_5.addLayout(self.verticalLayout_3)
self.verticalLayout.addWidget(self.Transform_groupbox)
self.verticalLayout_2.addLayout(self.verticalLayout)
spacerItem2 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem2)
self.horizontalLayout.addWidget(self.frame)
self.ModelFrame = QtGui.QFrame(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ModelFrame.sizePolicy().hasHeightForWidth())
self.ModelFrame.setSizePolicy(sizePolicy)
self.ModelFrame.setMinimumSize(QtCore.QSize(1024, 0))
self.ModelFrame.setFrameShape(QtGui.QFrame.Box)
self.ModelFrame.setFrameShadow(QtGui.QFrame.Plain)
self.ModelFrame.setLineWidth(1)
self.ModelFrame.setObjectName(_fromUtf8("ModelFrame"))
self.horizontalLayout.addWidget(self.ModelFrame)
MainWindow.setCentralWidget(self.centralwidget)
self.toolBar = QtGui.QToolBar(MainWindow)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(227, 227, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(227, 227, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(227, 227, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(227, 227, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(227, 227, 227))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(133, 133, 133))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(100, 100, 100))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(200, 200, 200))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.toolBar.setPalette(palette)
self.toolBar.setWindowTitle(QtGui.QApplication.translate("MainWindow", "toolBar", None, QtGui.QApplication.UnicodeUTF8))
self.toolBar.setAutoFillBackground(True)
self.toolBar.setMovable(False)
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon)
self.toolBar.setFloatable(False)
self.toolBar.setObjectName(_fromUtf8("toolBar"))
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.actionQuit = QtGui.QAction(MainWindow)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/delete2.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionQuit.setIcon(icon1)
self.actionQuit.setText(QtGui.QApplication.translate("MainWindow", "Quit", None, QtGui.QApplication.UnicodeUTF8))
self.actionQuit.setObjectName(_fromUtf8("actionQuit"))
self.actionOpen_Model = QtGui.QAction(MainWindow)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/import1.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionOpen_Model.setIcon(icon2)
self.actionOpen_Model.setText(QtGui.QApplication.translate("MainWindow", "Open Model", None, QtGui.QApplication.UnicodeUTF8))
self.actionOpen_Model.setObjectName(_fromUtf8("actionOpen_Model"))
self.actionSet_Model_Opacity = QtGui.QAction(MainWindow)
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/replace.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionSet_Model_Opacity.setIcon(icon3)
self.actionSet_Model_Opacity.setText(QtGui.QApplication.translate("MainWindow", "Set Model Opacity", None, QtGui.QApplication.UnicodeUTF8))
self.actionSet_Model_Opacity.setObjectName(_fromUtf8("actionSet_Model_Opacity"))
self.actionPreferences = QtGui.QAction(MainWindow)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/gear.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionPreferences.setIcon(icon4)
self.actionPreferences.setText(QtGui.QApplication.translate("MainWindow", "Slicing Preferences", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferences.setObjectName(_fromUtf8("actionPreferences"))
self.actionSlice_Model = QtGui.QAction(MainWindow)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/Icons/icons/media_play_green.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionSlice_Model.setIcon(icon5)
self.actionSlice_Model.setText(QtGui.QApplication.translate("MainWindow", "Slice Model", None, QtGui.QApplication.UnicodeUTF8))
self.actionSlice_Model.setObjectName(_fromUtf8("actionSlice_Model"))
self.toolBar.addAction(self.actionOpen_Model)
self.toolBar.addAction(self.actionSlice_Model)
self.toolBar.addAction(self.actionSet_Model_Opacity)
self.toolBar.addAction(self.actionPreferences)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionQuit)
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.actionQuit, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.close)
QtCore.QObject.connect(self.actionOpen_Model, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.AddModel)
QtCore.QObject.connect(self.actionSlice_Model, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.SliceModel)
QtCore.QObject.connect(self.actionSet_Model_Opacity, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.UpdateModelOpacity)
QtCore.QObject.connect(self.actionPreferences, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.OpenSettingsDialog)
QtCore.QObject.connect(self.positionX, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Position_X)
QtCore.QObject.connect(self.positionY, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Position_Y)
QtCore.QObject.connect(self.positionZ, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Position_Z)
QtCore.QObject.connect(self.rotationX, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Rotation_X)
QtCore.QObject.connect(self.rotationY, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Rotation_Y)
QtCore.QObject.connect(self.rotationZ, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Rotation_Z)
QtCore.QObject.connect(self.scale, QtCore.SIGNAL(_fromUtf8("valueChanged(QString)")), MainWindow.Update_Scale)
QtCore.QObject.connect(self.modelList, QtCore.SIGNAL(_fromUtf8("currentItemChanged(QListWidgetItem*,QListWidgetItem*)")), MainWindow.ModelIndexChanged)
QtCore.QObject.connect(self.addModel, QtCore.SIGNAL(_fromUtf8("pressed()")), MainWindow.AddModel)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
pass
import resource_rc
|
[
"oswaldonfire@4cfbdd0b-1efc-a790-3b7a-4779d8d2eca3"
] |
oswaldonfire@4cfbdd0b-1efc-a790-3b7a-4779d8d2eca3
|
993fae64ed448bd384cd9874345f1c6c7a511337
|
2f3d66965dbec4021c3819dce093a8b40724af33
|
/mirage/libs/zigbee_utils/encoders.py
|
81e74803e0c94aed5ca55b2bc7c77a345f2d0585
|
[
"MIT"
] |
permissive
|
RCayre/mirage
|
92bfa2c2822c06238976dbba6df993b10f2dc25d
|
f73f6c4442e4bfd239eb5caf5e1283c125d37db9
|
refs/heads/master
| 2023-02-04T06:23:05.985200
| 2022-11-24T19:16:53
| 2022-11-24T19:16:53
| 203,883,338
| 199
| 35
|
MIT
| 2023-01-26T03:06:32
| 2019-08-22T22:36:00
|
Python
|
UTF-8
|
Python
| false
| false
| 684
|
py
|
from mirage.libs.common.sdr.encoders import SDREncoder
from mirage.libs.zigbee_utils.chip_tables import *
class ZigbeeEncoder(SDREncoder):
'''
Software Defined Radio encoder for Zigbee protocol.
'''
def _getChips(self,bits):
for i in SYMBOL_TO_CHIP_MAPPING:
if bits == i["symbols"]:
return i["chip_values"]
return None
def encode(self,data):
if data[0] == 0xA7:
data = b"\x00\x00\x00\x00" + data
elif data[0] != 0x00:
data = b"\x00\x00\x00\x00\xA7" + data
bits = []
for i in bytes(data):
byte = "{:08b}".format(i)
bits += [byte[4:8][::-1], byte[0:4][::-1]]
sequence = ""
for bit in bits:
sequence += self._getChips(bit)
return sequence
|
[
"rcayre@laas.fr"
] |
rcayre@laas.fr
|
4b50f06cfdc691ef3c0d26607ce65b2d26a4dfe8
|
2bb04faa91f1bd6dbda1c5fa1d77963456a89f85
|
/nombres/settings.py
|
2bfd2678c65335221b9c156ec22c65cb8c1cab97
|
[] |
no_license
|
jevelezse/blog
|
8d8a0a771431487b5648ad74d5e7a5af134ebd24
|
607522200b59f48793e2d6b7c2a6141f3e7eb3b7
|
refs/heads/master
| 2021-01-22T06:23:17.295328
| 2017-02-12T23:05:28
| 2017-02-12T23:05:28
| 81,754,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,215
|
py
|
"""
Django settings for nombres project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2+y(_-(kg(qf3z=w_hhwngiws0!#_c=h*ux=h$aizq0^00w9-*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['jenniferbio.pythonanywhere.com','127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'nombres.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'nombres.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Bogota'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
|
[
"jevelezse@unal.edu.co"
] |
jevelezse@unal.edu.co
|
f679afceadef48ea4dfbb7be731d33ebf7c0048d
|
a61819ed872820725e6266faa4ebe5a9791dbe7d
|
/listogram.py
|
fb6607f28add7096fd7aeaa81baf20fb020388e4
|
[] |
no_license
|
NinjaAung/Tweeter-Gen
|
4ee2387956154e414c325da93b2282b7b945918d
|
0912acb08aaeecdf6f51cbd168feb5375eaccbe8
|
refs/heads/master
| 2021-07-12T09:51:31.643775
| 2020-03-05T17:46:53
| 2020-03-05T17:46:53
| 238,708,413
| 0
| 0
| null | 2021-03-20T02:58:21
| 2020-02-06T14:33:59
|
Python
|
UTF-8
|
Python
| false
| false
| 5,079
|
py
|
from __future__ import division, print_function # Python 2 and 3 compatibility
import random
class Listogram(list):
"""Listogram is a histogram implemented as a subclass of the list type."""
def __init__(self, word_list=None):
"""Initialize this histogram as a new list and count given words."""
super(Listogram, self).__init__() # Initialize this as a new list
# Add properties to track useful word counts for this histogram
self.types = 0 # Count of distinct word types in this histogram
self.tokens = 0 # Total count of all word tokens in this histogram
# Count words in given list, if any
if word_list is not None:
for word in word_list:
self.add_count(word)
def add_count(self, word, count=1):
"""Increase frequency count of given word by given count amount."""
word_in_list = False
self.tokens += count
for index in range (0, len(self)):
if self[index][0] == word:
self[index][1] += count
word_in_list = True
break
if word_in_list == False:
self.types += 1
self.append([word, count])
def frequency(self, word):
"""Return frequency count of given word, or 0 if word is not found."""
index = self.index_of(word)
if index != None:
return self[index][1]
else:
return 0
def __contains__(self, word):
"""Return boolean indicating if given word is in this histogram."""
index = self.index_of(word)
if index != None:
return True
else:
return False
def index_of(self, target):
"""Return the index of entry containing given target word if found in
this histogram, or None if target word is not found."""
index = None
for i in range (0, len(self)):
if self [i][0] == target:
index = i
break
return index
def sample(self):
"""Return a word from this histogram, randomly sampled by weighting
each word's probability of being chosen by its observed frequency."""
rand_num = random.randint(1, self.tokens)
num = 0
for item in self:
num += item[1]
if num >= rand_num:
return item[0]
def print_histogram(word_list):
print()
print('Histogram:')
print(f'word list: {word_list}')
# Create a listogram and display its contents
histogram = Listogram(word_list)
print(f'listogram: {histogram}')
print(f'{histogram.tokens} tokens, {histogram.types} types')
for word in word_list[-2:]:
freq = histogram.frequency(word)
print(f'{repr(word)} occurs {freq} times')
print()
print_histogram_samples(histogram)
def print_histogram_samples(histogram):
print('Histogram samples:')
# Sample the histogram 10,000 times and count frequency of results
samples_list = [histogram.sample() for _ in range(10000)]
samples_hist = Listogram(samples_list)
print(f'samples: {samples_hist}')
print()
print('Sampled frequency and error from observed frequency:')
header = '| word type | observed freq | sampled freq | error |'
divider = '-' * len(header)
print(divider)
print(header)
print(divider)
# Colors for error
green = '\033[32m'
yellow = '\033[33m'
red = '\033[31m'
reset = '\033[m'
# Check each word in original histogram
for word, count in histogram:
# Calculate word's observed frequency
observed_freq = count / histogram.tokens
# Calculate word's sampled frequency
samples = samples_hist.frequency(word)
sampled_freq = samples / samples_hist.tokens
# Calculate error between word's sampled and observed frequency
error = (sampled_freq - observed_freq) / observed_freq
color = green if abs(error) < 0.05 else yellow if abs(error) < 0.1 else red
print('| {!r:<9} '.format(word)
+ '| {:>4} = {:>6.2%} '.format(count, observed_freq)
+ '| {:>4} = {:>6.2%} '.format(samples, sampled_freq)
+ '| {}{:>+7.2%}{} |'.format(color, error, reset))
print(divider)
print()
def main():
import sys
arguments = sys.argv[1:] # Exclude script name in first argument
if len(arguments) >= 1:
# Test histogram on given arguments
print_histogram(arguments)
else:
# Test histogram on letters in a word
word = 'abracadabra'
print_histogram(list(word))
# Test histogram on words in a classic book title
fish_text = 'one fish two fish red fish blue fish'
print_histogram(fish_text.split())
# Test histogram on words in a long repetitive sentence
woodchuck_text = ('how much wood would a wood chuck chuck'
' if a wood chuck could chuck wood')
print_histogram(woodchuck_text.split())
if __name__ == '__main__':
main()
|
[
"ninja.aung02@gmail.com"
] |
ninja.aung02@gmail.com
|
ca58378ef29f0e1780b97d43010cb3a9819c329d
|
8fba37f0b439c26d61673a01d5aae8c27384b0a6
|
/Final/pathfinding.py
|
665c3a2e18da775a5a9c0a712fd6222bf65080b8
|
[] |
no_license
|
jlicht27/cmps1500
|
c0ecc531d63e7c2a029c304b0ded73c739cc2f2c
|
7ec5ee445e913729eacf7d1554c4de4443fb6751
|
refs/heads/master
| 2023-02-09T20:17:59.097041
| 2021-01-05T19:41:07
| 2021-01-05T19:41:07
| 327,102,506
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 908
|
py
|
grid = {'a': ['b', 'f'],
'b': ['a', 'c', 'g'],
'c': ['b', 'd'],
'd': ['c', 'e'],
'e': ['d'],
'f': ['a', 'g', 'h'],
'g': ['b', 'f', 'i'],
'h': ['f', 'i'],
'i': ['g', 'h', 'j'],
'j': ['i', 'k'],
'k': ['j', 'l', 'm'],
'l': ['k', 'n'],
'm': ['k', 'n', 'q'],
'n': ['l', 'm'],
'o': ['p'],
'p': ['o', 'q'],
'q': ['m', 'p', 'r'],
'r': ['n', 'q']
}
def findPath(G,start,end):
if end in G[start]: #base case
return end
else: #recursive case
for i in G[start]:
return [start] + [findPath(G,i,end)]
print(findPath(grid,'e','r'))
'''It throws up a recursion error, not sure how to change this
into finding only the shortest path. I know BFS has to be involved
in some way.'''
|
[
"jlicht@Jonathans-MBP-2.fios-router.home"
] |
jlicht@Jonathans-MBP-2.fios-router.home
|
21adb5fe26148007f1bdf9ddd5b7e1ea8a60d049
|
050bd514f7878190a61878d319e7bb6d44799f39
|
/app/shop/api_views.py
|
c48521c4748dc3fc7f6b7b574a51288eba142343
|
[] |
no_license
|
MikhailoVL/api_automation
|
a29cbcff3d1c3ef4c559d6ebbd1f63a6387b2c0f
|
bcfba9855c1106602edd90631d6d7c6073881227
|
refs/heads/main
| 2023-03-14T09:49:47.930090
| 2021-03-03T08:43:55
| 2021-03-03T08:43:55
| 342,175,022
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,725
|
py
|
from rest_framework.generics import ListAPIView, CreateAPIView,\
UpdateAPIView, RetrieveAPIView
from .serializers import OrderSerializer, ScoreSerializer
from .models import Order, Score
from .permissions import HasGroupPermission
from .util_s_filter import DateRangeFilter
from django_filters import rest_framework as filters
class OrderListAPIView(ListAPIView):
permission_classes = [HasGroupPermission]
# set group that have permission
required_groups = {
'GET': ['Booker', 'Cashier', 'Sales_assistant'],
}
serializer_class = OrderSerializer
queryset = Order.objects.all()
filter_backends = [filters.DjangoFilterBackend]
filterset_class = DateRangeFilter
class OrderCreateAPIView(CreateAPIView):
permission_classes = [HasGroupPermission]
# set group that have permission
required_groups = {
'GET': ['Cashier'],
'POST': ['Cashier'],
'PUT': ['Cashier'],
}
serializer_class = OrderSerializer
queryset = Order.objects.all()
class OrderUpdateAPIView(UpdateAPIView, RetrieveAPIView):
permission_classes = [HasGroupPermission]
# set group that have permission
required_groups = {
'GET': ['Cashier', 'Sales_assistant'],
'POST': ['Cashier', 'Sales_assistant'],
'PUT': ['Cashier', 'Sales_assistant'],
}
serializer_class = OrderSerializer
queryset = Order.objects.all()
class ScoreCreateAPIView(CreateAPIView):
permission_classes = [HasGroupPermission]
# set group that have permission
required_groups = {
'GET': ['Cashier'],
'POST': ['Cashier'],
'PUT': ['Cashier'],
}
serializer_class = ScoreSerializer
queryset = Score.objects.all()
|
[
"mykhailo.liutyi@introlab-systems.com"
] |
mykhailo.liutyi@introlab-systems.com
|
7a43ee47815996296f8211326aaaa908d8cd4e74
|
e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f
|
/indices/nndefunct.py
|
a6668d2bbc5efa937e3a225b5521fa0fc1b30293
|
[] |
no_license
|
psdh/WhatsintheVector
|
e8aabacc054a88b4cb25303548980af9a10c12a8
|
a24168d068d9c69dc7a0fd13f606c080ae82e2a6
|
refs/heads/master
| 2021-01-25T10:34:22.651619
| 2015-09-23T11:54:06
| 2015-09-23T11:54:06
| 42,749,205
| 2
| 3
| null | 2015-09-23T11:54:07
| 2015-09-18T22:06:38
|
Python
|
UTF-8
|
Python
| false
| false
| 232
|
py
|
ii = [('SadlMLP.py', 1), ('PettTHE.py', 8), ('CarlTFR.py', 5), ('CookGHP2.py', 1), ('ClarGE.py', 1), ('MedwTAI.py', 1), ('WadeJEB.py', 2), ('KirbWPW2.py', 1), ('SoutRD.py', 1), ('JacoWHI.py', 1), ('DibdTRL.py', 2), ('TaylIF.py', 1)]
|
[
"varunwachaspati@gmail.com"
] |
varunwachaspati@gmail.com
|
76f9aab1e2a377a6f833911580d749f2a1040ff4
|
9b40158304af99888bb473814151bacf8873c8b5
|
/lists/views.py
|
846475ffbc67bf27a1ae84accafd681d2fd2f26f
|
[] |
no_license
|
bjweiqm/ddtt
|
0503f16b5982f7d9fea5358ceecad85427f5d705
|
3fac475230da4e1a01d6ed60ba4736e7b9b6079d
|
refs/heads/master
| 2016-08-11T21:08:18.438065
| 2016-02-24T10:30:21
| 2016-02-24T10:30:21
| 52,411,063
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 229
|
py
|
#! /usr/bin/env python3
#encoding:utf-8
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
#在这儿编写视图
def home_page(request):
return render(request, 'home.html')
|
[
"weimegn@126.com"
] |
weimegn@126.com
|
f2dc4a4849474e33b6122176af5ac9b4ec1419f3
|
7328d5cdad3a201953c750aaa04f6c197e9eb858
|
/apps/result/views.py
|
42a320a488ad6e199b6210e822d71d11f9464235
|
[] |
no_license
|
asfaqahmed/School-management-system-Django
|
9a2bbae13fbbaa6333dde0a166e7501d488f84c1
|
56f8c419c8c41b8c73f660ccd5ce0185e00a5894
|
refs/heads/main
| 2023-08-23T02:12:59.912450
| 2021-10-13T16:52:23
| 2021-10-13T16:52:23
| 416,824,014
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,251
|
py
|
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import redirect, render
from django.views.generic import DetailView, ListView, View
from apps.students.models import Student
from .forms import CreateResults, EditResults
from .models import Result
@login_required
def create_result(request):
students = Student.objects.all()
if request.method == "POST":
# after visiting the second page
if "finish" in request.POST:
form = CreateResults(request.POST)
if form.is_valid():
subjects = form.cleaned_data["subjects"]
session = form.cleaned_data["session"]
term = form.cleaned_data["term"]
students = request.POST["students"]
results = []
for student in students.split(","):
stu = Student.objects.get(pk=student)
if stu.current_class:
for subject in subjects:
check = Result.objects.filter(
session=session,
term=term,
current_class=stu.current_class,
subject=subject,
student=stu,
).first()
if not check:
results.append(
Result(
session=session,
term=term,
current_class=stu.current_class,
subject=subject,
student=stu,
)
)
Result.objects.bulk_create(results)
return redirect("edit-results")
# after choosing students
id_list = request.POST.getlist("students")
if id_list:
form = CreateResults(
initial={
"session": request.current_session,
"term": request.current_term,
}
)
studentlist = ",".join(id_list)
return render(
request,
"result/create_result_page2.html",
{"students": studentlist, "form": form, "count": len(id_list)},
)
else:
messages.warning(request, "You didnt select any student.")
return render(request, "result/create_result.html", {"students": students})
@login_required
def edit_results(request):
if request.method == "POST":
form = EditResults(request.POST)
if form.is_valid():
form.save()
messages.success(request, "Results successfully updated")
return redirect("edit-results")
else:
results = Result.objects.filter(
session=request.current_session, term=request.current_term
)
form = EditResults(queryset=results)
return render(request, "result/edit_results.html", {"formset": form})
class ResultListView(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
results = Result.objects.filter(
session=request.current_session, term=request.current_term
)
bulk = {}
for result in results:
test_total = 0
exam_total = 0
subjects = []
for subject in results:
if subject.student == result.student:
subjects.append(subject)
test_total += subject.test_score
exam_total += subject.exam_score
bulk[result.student.id] = {
"student": result.student,
"subjects": subjects,
"test_total": test_total,
"exam_total": exam_total,
"total_total": test_total + exam_total,
}
context = {"results": bulk}
return render(request, "result/all_results.html", context)
|
[
"asfaqahmed356@gmail.com"
] |
asfaqahmed356@gmail.com
|
f44596aea4d37e1378e087af1f88953f0c088cf2
|
54d8a05e0238e96eb43e4893bacba024e490bf11
|
/python-projects/algo_and_ds/task_scheduler_leetcode621.py
|
4aeb5a5c1e5db5792764f6e1ed7d8104f82af46e
|
[] |
no_license
|
infinite-Joy/programming-languages
|
6ce05aa03afd7edeb0847c2cc952af72ad2db21e
|
0dd3fdb679a0052d6d274d19040eadd06ae69cf6
|
refs/heads/master
| 2023-05-29T10:34:44.075626
| 2022-07-18T13:53:02
| 2022-07-18T13:53:02
| 30,753,185
| 3
| 5
| null | 2023-05-22T21:54:46
| 2015-02-13T11:14:25
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 2,126
|
py
|
"""
# TODO this is not working
task scheduler
leetcode 621
Input: tasks = ["A","A","A","B","B","B"], n = 2
Output: 8
Explanation:
A -> B -> idle -> A -> B -> idle -> A -> B
There is at least 2 units of time between any two same tasks.
we can try this using a max heap
[(3, 0, a), (3, 0, b)]
a=0, b=0
i = 0 so a and [(2, 2, a), (3, 0, b)] and a=3
i=1 and nothing in b so b and [(2, a), (2, b)] and a=3, b=4
i = 2 and less than a so idle and [(2, a), (2, b)] and a=3, b=4
i = 3 and = a so a and [(1, a), (2, b)] and a=6, b=4
i = 4 and = b so b and [(1, a), (1, b)] and a = 6, b=7
null and
time complexity
so basically i can keep a max heap of the elements and a counter to see any of the items are still hot
time complexity: O(number of distinct items)
space complexity: O(number of distinct items)
"""
from heapq import heappop, heappush, heapreplace, heapify
from collections import Counter
from collections import defaultdict
from typing import List
class Solution:
def preprocess(self, tasks):
counts = Counter(tasks)
heap = [(-c, t) for t, c in counts.items()]
heapify(heap)
return heap
def units_count(self, heap, n):
availability = defaultdict(int)
counter = 0
#__import__('pdb').set_trace()
while heap:
rem, next_task = heap[0]
if availability[next_task] <= counter:
heappop(heap)
if rem < -1:
heappush(heap, (rem + 1, next_task))
counter += 1
availability[next_task] = counter + n
print(heap, availability)
return counter
def leastInterval(self, tasks: List[str], n: int) -> int:
heap = self.preprocess(tasks)
print(heap)
return self.units_count(heap, n)
# test cases
tasks = ["A","A","A","B","B","B"]
n = 2
sol = Solution()
print(sol.leastInterval(tasks, n))
tasks = ["A","A","A","B","B","B"]
n = 0
sol = Solution()
print(sol.leastInterval(tasks, n))
tasks = ["A","A","A","A","A","A","B","C","D","E","F","G"]
n = 2
sol = Solution()
print(sol.leastInterval(tasks, n))
|
[
"joydeepubuntu@gmail.com"
] |
joydeepubuntu@gmail.com
|
3a6a5df5f8a628756abc9856951e29d2fcb67018
|
ebc31d1c4ef8d1a28688a3822353d0312f7805a6
|
/entree/host/profiles/admin.py
|
da5b4f67ccd86b0e08491bc57590270d8c3fb27b
|
[] |
no_license
|
yedpodtrzitko/django-entree
|
4cf63232a9c3a975e8a682bb3e7250d6bb543166
|
7bca1bd13dc29a0310b2047f3d9095c7b35e6947
|
refs/heads/master
| 2021-01-04T14:07:25.874683
| 2013-07-04T23:17:05
| 2013-07-04T23:17:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,517
|
py
|
from cache_tools.utils import get_cached_object
from django.conf import settings
from django.contrib import admin
from django.forms import BaseInlineFormSet
from django.utils.translation import ugettext_lazy as _
from entree.host.models import EntreeSite
from entree.host.profiles.models import SiteProperty
class ResidentBlahFormSet(BaseInlineFormSet):
def __init__(self, *args, **kwargs):
try:
kwargs['instance'] = get_cached_object(EntreeSite, pk=settings.ENTREE['NOSITE_ID'])
except EntreeSite.DoesNotExist:
kwargs['instance'] = None
super(ResidentBlahFormSet, self).__init__(*args, **kwargs)
class SitePropertiesAdmin(admin.TabularInline):
model = SiteProperty
prepopulated_fields = {"slug": ("name",)}
verbose_name_plural = _("Site properties (available only for current site)")
class ResidentSitePropertiesAdmin(admin.TabularInline):
model = SiteProperty
#formset = ResidentBlahFormSet
prepopulated_fields = {"slug": ("name",)}
verbose_name_plural = _("Resident properties (applied to all sites)")
verbose_name = _("Resident property (applies to all sites)")
def get_formset(self, request, obj=None, **kwargs):
try:
obj = get_cached_object(EntreeSite, pk=settings.ENTREE['NOSITE_ID'])
except EntreeSite.DoesNotExist:
pass
return super(ResidentSitePropertiesAdmin, self).get_formset(request, obj, **kwargs)
admin.site.register(SiteProperty, SitePropertiesAdmin)
|
[
"yed@vanyli.net"
] |
yed@vanyli.net
|
c1a2bb3a2f2d23bb9480bbc734db437ea0e088b0
|
53c3f5e9bbaa52b80b2644a04e3ff336218ab4e8
|
/Year 2/Saul Burgess/Labs/2020-10-01/Lab6(3-B).py
|
089f8aabc4dac642caaf69010ddb640c39d3ef4e
|
[] |
no_license
|
DT211C-2019/programming
|
085011a709e0f8729df8184c3b71c82d06723398
|
dd15978e7b8059834e5a11f7036cb380ed52b202
|
refs/heads/master
| 2021-06-23T17:41:59.463000
| 2021-05-05T00:04:50
| 2021-05-05T00:04:50
| 216,625,686
| 10
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 99
|
py
|
for i in range(1, 10):
for j in range(1, 10):
print(i, "Multiplied by", j, "is", (i*j))
|
[
"C19349793@mytudublin.ie"
] |
C19349793@mytudublin.ie
|
ea2335cf507636acb309773f21249b1500c086a9
|
f3ac41b803545a7a272afaf25ed173598ce5b3bc
|
/aggressive_pipe_test.py
|
db766c521fc36c589272a9d36b13dffcab6f39b5
|
[] |
no_license
|
rynstewart/ECE_P4_Advanced_MIPS_Sim
|
4ec83eab3c19dbd504d1137d4e8a51d05ba6db9c
|
72f4cddc69d76b6075ac55045f659da97143a421
|
refs/heads/master
| 2020-09-16T14:05:04.161919
| 2019-12-09T03:50:01
| 2019-12-09T03:50:01
| 223,792,445
| 0
| 0
| null | 2019-12-09T03:50:02
| 2019-11-24T18:43:10
|
Python
|
UTF-8
|
Python
| false
| false
| 35,540
|
py
|
import time
class Statistics_Pipeine:
def __init__(self, debugMode):
self.I= "" #current instruction being executed
self.name = "" # name of the instruction
self.cycle = 0 # Total cycles in simulation
self.NOPcount = 0 #keeps track of NOP
self.branch_taken_stall = 0 #keeps track of flush
self.lw_use_stall = 0 #keeps track of stall count
self.compute_branch_stall = 0 #keeps track of stall count
self.lw_branch_stall = 0
self.ALUoutM_ALUSrcB = 0
self.ALUoutM_ALUSrcA = 0
self.ALUoutM_EqualD = 0
self.ALUoutM_WriteDataE = 0
self.ResultW_ALUSrcA = 0
self.ResultW_ALUSrcB = 0
self.ResultW_EqualD = 0
self.ResultW_WriteDataE = 0
self.debugMode = debugMode
def log_forward(self, cycle, ALUoutM_ALUSrcB, ALUoutM_ALUSrcA, ALUoutM_EqualD, ALUoutM_WriteDataE, ResultW_ALUSrcA, ResultW_ALUSrcB, ResultW_EqualD,ResultW_WriteDataE):
self.cycle += cycle
self.ALUoutM_ALUSrcB += ALUoutM_ALUSrcB
self.ALUoutM_ALUSrcA += ALUoutM_ALUSrcA
self.ALUoutM_EqualD += ALUoutM_EqualD
self.ALUoutM_WriteDataE += ALUoutM_WriteDataE
self.ResultW_ALUSrcA += ResultW_ALUSrcA
self.ResultW_ALUSrcB += ResultW_ALUSrcB
self.ResultW_EqualD += ResultW_EqualD
self.ResultW_WriteDataE += ResultW_WriteDataE
def log_stall(self, branch_taken_stall, lw_use_stall, compute_branch_stall, lw_branch_stall):
self.branch_taken_stall += branch_taken_stall
self.lw_use_stall += lw_use_stall
self.compute_branch_stall += compute_branch_stall
self.lw_branch_stall += lw_branch_stall
def exitSim(self):
print("***Finished simulation***")
print("Total # of cycles: " + str(self.cycle))
print("\nDelay Statistics:")
print(" " + str(self.branch_taken_stall) + " taken branch stalls" )
print(" " + str(self.lw_use_stall) + " lw use stall" )
print(" " + str(self.compute_branch_stall) + " compute branch stall" )
print(" " + str(self.lw_branch_stall) + " lw then branch stall" )
print("\n Forwarding Path Statistics:")
print(" " + str(self.ALUoutM_ALUSrcA) + " ALUOutM ‐> SrcAE" )
print(" " + str(self.ALUoutM_ALUSrcB) + " ALUOutM ‐> SrcBE" )
print(" " + str(self.ALUoutM_EqualD) + " ALUOutM ‐> EqualD" )
print(" " + str(self.ALUoutM_WriteDataE) + " ALUOutM ‐> WriteDataE" )
print(" " + str(self.ResultW_ALUSrcA) + " ResultW ‐> SrcAE" )
print(" " + str(self.ResultW_ALUSrcB) + " ResultW ‐> SrcBE" )
print(" " + str(self.ResultW_EqualD) + " ResultW ‐> WriteDataE" )
print(" " + str(self.ResultW_WriteDataE) + " ResultW ‐> EqualD" )
def saveJumpLabel(asm, labelIndex, labelName, labelAddr):
lineCount = 0
for line in asm:
line = line.replace(" ","")
if(line.count(":")):
labelName.append(line[0:line.index(":")]) # append the label name
labelIndex.append(lineCount) # append the label's index\
labelAddr.append(lineCount*4)
#asm[lineCount] = line[line.index(":")+1:]
lineCount += 1
for item in range(asm.count('\n')): # Remove all empty lines '\n'
asm.remove('\n')
def regNameInit(regName):
i = 0
while i<=23:
regName.append(str(i))
i = i + 1
regName.append('lo')
regName.append('hi')
def final_print(regval, MEM, PC, DIC):
print("REGISTERS:")
print("-----------")
for x in range(len(regval)):
if(x == 24):
print("lo: ", hex(regval[x]))
elif(x == 25):
print("hi: ", hex(regval[x]))
else:
print("$", x,": ", hex(regval[x]))
print("PC: ", hex(PC))
print("DIC: ", hex(DIC))
print("\n*************************************** Used Memory values ***************************************\n")
print("offset: ", end="")
for x in range(0,8,1):
print("0x"+ format((x*4),"08x"), end=" ")
print("\n")
print("--------------------------------------------------------------------------------------------------",end="")
count = 0
print("\n")
y=0
for x in range(0x2000,0x2041,1):
if((x-0x2000) % 0x20 ==0):
print("0x" +format(0x2000+ y,"08x") + '|', end=" ")
y += 0x20
if count == 8:
count = 0
print(" ", end = "\n")
if((x-0x2000)%4==0):
print('0x'+format(MEM[x], "08x"), end=" ")
count += 1
def get_imm(instr, index):
#first check base
if '0x' in instr[index]:
imm = int(instr[index][2:],16)
else:
imm = int(instr[index])
return imm
#############################################################################
# this is the main cycle information tracker, it looks at the list
# instr_cycles to see the first entry for which cycle an instruction is on
# then removes that cycle. It repeats this process for the next instructins
# as well. True is returned if an instruction still has cycles, false once
# the list has no more cycles.
#############################################################################
def cycle_tracker(instr_cycles, DIC):
index = DIC
print('\n************************ CYCLE INFORMATION ************************\n')
if "Fetch" in instr_cycles[index][1]:
print("\n"+instr_cycles[index][0] +" in Fetch Cycle")
del instr_cycles[index][1]
return True
if "Decode" in instr_cycles[index][1]:
print("\n" + instr_cycles[index][0] + " in Decode Cycle")
del instr_cycles[index][1]
try:
if "Fetch" in instr_cycles[index+1][1]:
print("\n"+instr_cycles[index+1][0] +" in Fetch Cycle")
del instr_cycles[index+1][1]
except:
return False
return True
if "Execute" in instr_cycles[index][1]:
print("\n" + instr_cycles[index][0] + " in Execute Cycle")
if(instr_cycles[index][1][1] != ""):
print("\n\tForwarding Path(s):")
for k in range(1,len(instr_cycles[index][1]),1):
print("\n\t" + instr_cycles[index][1][k])
del instr_cycles[index][1]
try:
if "Decode" in instr_cycles[index+1][1]:
print("\n"+instr_cycles[index+1][0] +" in Decode Cycle")
del instr_cycles[index+1][1]
except:
return False
try:
if "Fetch" in instr_cycles[index+2][1]:
print("\n"+instr_cycles[index+2][0] +" in Fetch Cycle")
del instr_cycles[index+2][1]
except:
return False
return True
if "Memory" in instr_cycles[index][1]:
print("\n" + instr_cycles[index][0] + " in Memory Cycle")
del instr_cycles[index][1]
try:
if "Execute" in instr_cycles[index+1][1]:
print("\n"+instr_cycles[index+1][0] +" in Execute Cycle")
if(instr_cycles[index+1][1][1] != ""):
for k in range(1,len(instr_cycles[index+1][1]),1):
print("\n\t" + instr_cycles[index+1][1][k])
del instr_cycles[index+1][1]
except:
return False
try:
if "Decode" in instr_cycles[index+2][1]:
print("\n"+instr_cycles[index+2][0] +" in Decode Cycle")
del instr_cycles[index+2][1]
except:
return False
try:
if "Fetch" in instr_cycles[index+3][1]:
print("\n"+instr_cycles[index+3][0] +" in Fetch Cycle")
del instr_cycles[index+3][1]
except:
return False
return True
if "Write Back" in instr_cycles[index][1]:
print("\n" + instr_cycles[index][0] + " in Write Back Cycle")
if(instr_cycles[index][1][1] != ""):
for k in range(1,len(instr_cycles[index][1]),1):
print("\n\t" + instr_cycles[index][1][k])
instr_cycles[index][1]
try:
if "Memory" in instr_cycles[index+1][1]:
print("\n"+instr_cycles[index+1][0] +" in Memory Cycle")
del instr_cycles[index+1][1]
except:
return False
try:
if "Execute" in instr_cycles[index+2][1]:
print("\n"+instr_cycles[index+2][0] +" in Execute Cycle")
if(instr_cycles[index+2][1][1] != ""):
for k in range(1,len(instr_cycles[index+2][1]),1):
print("\n\t" + instr_cycles[index+2][1][k])
del instr_cycles[index+2][1]
except:
return False
try:
if "Decode" in instr_cycles[index+3][1]:
print("\n"+instr_cycles[index+3][0] +" in Decode Cycle")
del instr_cycles[index+3][1]
except:
return False
try:
if "Fetch" in instr_cycles[index+4][1]:
print("\n"+instr_cycles[index+4][0] +" in Fetch Cycle")
del instr_cycles[index+4][1]
except:
return False
return False
#############################################################################
# This function handles hazard detection. If a hazard is detected it modifies
# the inst_cycles list to include a fowarding path in the cycle
# for the cases that a forwarding path is not enough to avoid a hazard this
# function inserts an NOP to stall in the asn list
#############################################################################
def hazards_handle(stats, instr_cycles, asm, instr):
NOP = "STALL INSTRUCTION"
add_in = [NOP,["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]]
DIC = len(instr_cycles) - 1
try:
instr_cycles[DIC-1]
except:
return instr
i = 1
if ":" in asm[instr-i] or "#" in asm[instr-i]:
i = 2
if ":" in asm[instr-i] or "#" in asm[instr-i]:
i = 3
if instr - i < 0:
return instr
current = asm[instr]
current = current.replace("\n","") # Removes extra chars
current = current.replace("$","")
current = current.replace(" ","")
current_name = instr_cycles[DIC][0]
current = current.replace(current_name,"")
current = current.replace("(",",")
current = current.replace(")","")
current = current.split(",")
prev = asm[instr-i]
prev = prev.replace("\n","") # Removes extra chars
prev = prev.replace("$","")
prev = prev.replace(" ","")
prev_name = instr_cycles[DIC-1][0]
prev = prev.replace(prev_name,"")
prev = prev.replace("(",",")
prev = prev.replace(")","")
prev = prev.split(",")
norm_instri = ["addi", "ori", "sll"]
norm_instrr = ["xor", "addu", "sltu", "slt", "sub"]
branch = ["beq", "bne"]
if prev_name in norm_instrr or prev_name in norm_instri:
prd = int(prev[0])
if current_name in norm_instri:
rs = int(current[1])
if rs == prd:
stats.log_forward(0,0,1,0,0,0,0,0,0)
instr_cycles[DIC-1][3][1] = "ALUoutM --> ALUSrcA"
if current_name in norm_instrr:
rt = int(current[2])
rs = int(current[1])
if rs == prd:
stats.log_forward(0,0,1,0,0,0,0,0,0)
if instr_cycles[DIC-1][3][1] == "":
instr_cycles[DIC-1][3].append("ALUoutM --> ALUSrcA")
else:
instr_cycles[DIC-1][3][1] = "ALUoutM --> ALUSrcA"
if rt == prd:
stats.log_forward(0,1,0,0,0,0,0,0,0)
if instr_cycles[DIC-1][3][1] == "":
instr_cycles[DIC-1][3].append("ALUoutM --> ALUSrcB")
else:
instr_cycles[DIC-1][3][1] = "ALUoutM --> ALUSrcB"
if prev_name in norm_instrr or prev_name in norm_instri:
prd = int(prev[0])
if current_name == "sw":
rt = int(current[2])
if rt == prd:
stats.log_forward(0,1,0,0,0,0,0,0,0)
instr_cycles[DIC-1][3][1] = "ALUoutM --> ALUSrcB"
if prev_name in norm_instrr or prev_name in norm_instri:
prd = int(prev[0])
if current_name in branch:
rt = int(current[1])
rs = int(current[0])
if rs == prd or rt == prd:
stats.log_forward(0,0,0,1,0,0,0,0,0)
instr_cycles[DIC-1][3][1] = "ALUoutM --> EqualD"
stats.log_stall(0,0,1,0)
if instr_cycles[DIC-1] != NOP:
instr_cycles.insert(DIC, add_in)
if prev_name in norm_instrr or prev_name in norm_instri:
prd = int(prev[0])
if current_name == "lw":
rt = int(current[2])
if rt == prd:
stats.log_forward(0,1,0,0,0,0,0,0,0)
instr_cycles[DIC-1][3][1] = "ALUoutM --> ALUSrcB"
if prev_name == "lw":
prd = int(prev[0])
if current_name in branch:
rt = int(current[2])
rs = int(current[1])
if rs == prd or rt == prd:
stats.log_forward(0,0,0,0,0,0,0,1,0)
instr_cycles[DIC - 1][5][1] = "ResultW --> EqualD"
stats.log_stall(0,0,0,2)
if instr_cycles[DIC-1] != NOP:
instr_cycles.insert(DIC, add_in)
instr_cycles.insert(DIC, add_in)
if prev_name in "lw":
prd = int(prev[0])
if current_name in norm_instrr or prev_name in norm_instri:
rt = int(current[2])
rs = int(current[1])
if rs == prd:
stats.log_forward(0,0,0,0,0,1,0,0,0)
if instr_cycles[DIC-1][3][1] == "":
instr_cycles[DIC-1][5].append("ResultW --> ALUSrcA")
else:
instr_cycles[DIC-1][5][1] = "ResultW --> ALUSrcA"
if rt == prd:
stats.log_forward(0,0,0,0,0,0,1,0,0)
if instr_cycles[DIC-1][5][1] == "":
instr_cycles[DIC-1][5].append("ResultW --> ALUSrcB")
else:
instr_cycles[DIC-1][5][1] = "ResultW --> ALUSrcB"
if rt == prd or rs == prd:
stats.log_stall(0,1,0,0)
if instr_cycles[DIC-1] != NOP:
instr_cycles.insert(DIC, add_in)
def simulate(Instructions, f, debugMode):
labelIndex = []
labelName = []
labelAddr = []
regName = []
regNameInit(regName)
LO = 24
HI = 25
stats = Statistics_Pipeine(debugMode)
NOP = "STALL INSTRUCTION"
saveJumpLabel(Instructions,labelIndex,labelName, labelAddr)
instr_cycles = []
f = open(f,"w+")
for loop in range(2):
DIC = 0
PC = 0
MEM = [0]*12288 #intialize array to all 0s for 0x3000 indices
regval = [0]*26 #0-23 and lo, hi
lineCount = 0
i = 0
not_Done = True
if loop == 1:
tot_cycles = len(instr_cycles) + 4
stats.log_forward(tot_cycles,0,0,0,0,0,0,0,0)
while lineCount < len(Instructions):
line = Instructions[lineCount]
if(debugMode == 1 and loop == 1):
while(True):
if not_Done:
user_pause = input("Press enter to continue or q to quit diagnosis mode:\n\n")
if(user_pause == ""):
print('MIPS Instruction: ' + line + '\n')
break
if(user_pause == "q"):
print("Continuing in non-stop mode\n")
debugMode = 2
break
else:
continue
break
f.write('------------------------------ \n')
if(not(':' in line)):
f.write('MIPS Instruction: ' + line + '\n')
line = line.replace("\n","") # Removes extra chars
line = line.replace("$","")
line = line.replace(" ","")
line = line.replace("zero","0") # assembly can also use both $zero and $0
if(line[0:4] == "addi"): # ADDI, $t = $s + imm; advance_pc (4); addi $t, $s, imm
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("addi","")
line = line.split(",")
imm = get_imm(line,2)
regval[int(line[0])] = (regval[int(line[1])] + imm) & 0xFFFFFFFF
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' + ' + line[2] + '; ' + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
if(debugMode != 1):
DIC += 1
PC += 4
if(loop == 0):
if(int(line[0])==23):
instr_cycles.append(["addi-NOP INSTRUCTION", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
else:
instr_cycles.append(["addi", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
elif(line[0:3] == "xor"): #$d = $s ^ $t; advance_pc (4); xor $d, $s, $t
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("xor","")
line = line.split(",")
x = regval[int(line[1])]
y = regval[int(line[2])]
z = int(x)^int(y)
regval[int(line[0])] = z & 0xFFFFFFFF
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' ^ $' + line[2] + '; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["xor", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
#addu
elif(line[0:4] == "addu"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("addu","")
line = line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["addu", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
regval[int(line[0])] = (abs(regval[int(line[1])]) + abs(regval[int(line[2])])) & 0xFFFFFFFF
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' + ' + '$' + line[2] + '; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
elif(line[0:4] == "sltu"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("sltu","")
line = line.split(",")
if(abs(regval[int(line[1])]) < abs(regval[int(line[2])])):
regval[int(line[0])] = 1
else:
regval[int(line[0])] = 0
PC = PC + 4
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["sltu", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' < $' + line[2] + '? 1 : 0 ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[ int(line[0]) ]) + '\n')
elif(line[0:3] == "slt"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("slt","")
line = line.split(",")
if(regval[int(line[1])] < regval[int(line[2])]):
regval[int(line[0])] = 1
else:
regval[int(line[0])] = 0
PC = PC + 4
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["sltu", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' < $' + line[2] + '? 1 : 0 ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[ int(line[0]) ]) + '\n')
elif(line[0:3] == "ori"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("ori", "")
line = line.split(",")
imm = get_imm(line,2)
PC = PC + 4
regval[int(line[0])] = (imm | regval[int(line[1])]) & 0xFFFFFFFF
if(loop == 0):
instr_cycles.append(["ori", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
if(debugMode != 1):
DIC += 1
f.write('Operation: $' + line[0] + '= $' + line[1] + " | " + line[2] + '\n'),
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + '=' + line[2] + '\n')
#bne
elif(line[0:3] == "bne"): # BNE
if (debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("bne","")
line = line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["bne", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
if(regval[int(line[0])]!=regval[int(line[1])]):
if(line[2].isdigit()): # First,test to see if it's a label or a integer
PC = int(line[2])*4
lineCount = int(line[2])
f.write('PC is now at ' + str(line[2]) + '\n')
else: # Jumping to label
for i in range(len(labelName)):
if(labelName[i] == line[2]):
PC = labelAddr[i]
lineCount = labelIndex[i]
f.write('PC is now at ' + str(labelAddr[i]) + '\n')
break
if Instructions[lineCount+1] != NOP and loop == 0:
instr_cycles.append([NOP,["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
stats.log_stall(1, 0, 0, 0)
f.write('No Registers have changed. \n')
continue
f.write('No Registers have changed. \n')
#beq
elif(line[0:3] == "beq"): # Beq
if (debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("beq","")
line = line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["beq", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
if(regval[int(line[0])]==regval[int(line[1])]):
if(line[2].isdigit()): # First,test to see if it's a label or a integer
PC = int(line[2])*4
lineCount = int(line[2])
f.write('PC is now at ' + str(line[2]) + '\n')
f.write('PC is now at ' + str(labelAddr[i]) + '\n')
f.write('No Registers have changed. \n')
else: # Jumping to label
for i in range(len(labelName)):
if(labelName[i] == line[2]):
PC = labelAddr[i]
lineCount = labelIndex[i]
f.write('PC is now at ' + str(labelAddr[i]) + '\n')
f.write('No Registers have changed. \n')
break
if Instructions[lineCount+1] != NOP and loop == 0:
instr_cycles.append([NOP,["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
stats.log_stall(1, 0, 0, 0)
continue
f.write('No Registers have changed. \n')
elif(line[0:2] =="lw" and not('lw_loop' in line)):
if (debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line= line.replace("lw","")
line= line.replace("(",",")
line= line.replace(")","")
line= line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["lw", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
rs = regval[int(line[2])]
imm = get_imm(line, 1)
MEM_val = MEM[ rs + imm ] & 0xFFFFFFFF
bin_str = format(MEM_val, '32b')
if bin_str[0] == '1':
MEM_val = MEM_val ^ 0xffffffff
MEM_val +=1
MEM_val = -MEM_val
regval[int(line[0])]= MEM_val
f.write('Operation: $' + line[0] + ' = ' + 'MEM[$' + line[2] + ' + ' + line[1] + ']; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
elif(line[0:2] =="sw" and not('sw_' in line)):
if (debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line= line.replace("sw","")
line= line.replace("(",",")
line= line.replace(")","")
line= line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["sw", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
imm = get_imm(line, 1)
MEM_val = regval[int(line[0])]
MEM[ regval[int(line[2])] + imm ] = MEM_val
f.write('Operation: MEM[ $' + line[2] + ' + ' + line[1] + ' ] = $' + line[0] + '; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: None\n')
elif(line[0:3] =="sub"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("sub","")
line = line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["sub", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
regval[int(line[0])] = (regval[int(line[1])] - regval[int(line[2])]) & 0xFFFFFFFF
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' - ' + '$' + line[2] + '; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
elif(line[0:3] == "sll"):
if(debugMode == 1 and loop == 1):
if not_Done:
not_Done = cycle_tracker(instr_cycles, DIC)
continue
DIC += 1
not_Done = True
line = line.replace("sll","")
line = line.split(",")
if(debugMode != 1):
DIC += 1
if(loop == 0):
instr_cycles.append(["sll", ["Fetch",""], ["Decode",""], ["Execute",""], ["Memory", ""], ["Write Back", ""]])
hazards_handle(stats, instr_cycles, Instructions, lineCount)
imm = get_imm(line,2)
regval[int(line[0])] = (regval[int(line[1])] << imm) & 0xFFFFFFFF
f.write('Operation: $' + line[0] + ' = ' + '$' + line[1] + ' << ' + line[2] + '; ' + '\n')
f.write('PC is now at ' + str(PC) + '\n')
f.write('Registers that have changed: ' + '$' + line[0] + ' = ' + str(regval[int(line[0])]) + '\n')
lineCount += 1
PC = (len(Instructions)-len(labelName)) * 4
final_print(regval,MEM, PC, DIC)
print("\n\n**************************************** FINAL CYCLE INFO ****************************************\n")
print("PIPLINED CYCLES: ", stats.cycle)
stats.exitSim()
f.close()
def splitText(text):
return text.split("\n")
def readIn(s):
text = ""
with open(s, "r") as f:
for line in f:
if (line != "\n" and line[0]!='#'):
text += line
return text
def main():
while(True):
file_Name = input("Please type input file name or enter for default (proj_A.asm), or q to quit:\n")
if(file_Name == "q"):
print("Bye!")
return
if(file_Name == ""):
file_Name = "proj_A.asm"
try:
f = open(file_Name)
f.close()
break
except FileNotFoundError:
print('File does not exist')
while(True):
file_NameOut = input("Please type output file name or enter for default (mc.txt), or q to quit:\n")
if(file_NameOut == "q"):
print("Bye!")
return
if(file_NameOut == ""):
file_NameOut = "mc.txt"
break
while(True):
user_select = input("select one of the below or q to quit:\n" + \
"\ta) Diagnosis mode\n" +\
"\tb) Non-stop mode\n")
if(user_select == "a"):
select = 1
break
if(user_select == "b"):
select = 2
break
if(user_select == "q"):
return
else:
print("ERROR: Please type valid input\n")
continue
h = open(file_Name,"r")
asm = h.readlines()
for item in range(asm.count('\n')): # Remove all empty lines '\n'
asm.remove('\n')
simulate(asm, file_NameOut, select)
main()
|
[
"rynstewart10@gmail.com"
] |
rynstewart10@gmail.com
|
688973944544c6f2ad0c18a1f82d05ccfc259f1a
|
f82e67dd5f496d9e6d42b4fad4fb92b6bfb7bf3e
|
/scripts/client/gui/battle_control/dynsquadviewlistener.py
|
46819f433e680bed108cdf012b7912d40ccedadf
|
[] |
no_license
|
webiumsk/WOT0.10.0
|
4e4413ed4e7b00e22fb85d25fdae9400cbb4e76b
|
a84f536c73f86d9e8fab559e97f88f99f2ad7e95
|
refs/heads/master
| 2021-01-09T21:55:00.662437
| 2015-10-23T20:46:45
| 2015-10-23T20:46:45
| 44,835,654
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,046
|
py
|
# Embedded file name: scripts/client/gui/battle_control/DynSquadViewListener.py
import BigWorld
from constants import INVITATION_TYPE
from gui.battle_control import g_sessionProvider
from gui.battle_control.requests.context import SendInvitesCtx
from gui.prb_control.prb_helpers import prbInvitesProperty
from adisp import process
class DynSquadViewListener(object):
def __init__(self, battleUI):
super(DynSquadViewListener, self).__init__()
self.__battleUI = battleUI
self.__battleUI.addExternalCallbacks({'Battle.UsersRoster.LeaveSquad': self.__onLeaveSquad,
'Battle.UsersRoster.ExcludedFromSquad': self.__onExcludedFromSquad,
'Battle.UsersRoster.SendInvitationToSquad': self.__onSentInviteToSquad,
'Battle.UsersRoster.WithdrawInvitationToSquad': self.__onWithdrawInviteToSquad,
'Battle.UsersRoster.AcceptInvitationToSquad': self.__onAcceptInviteToSquad,
'Battle.UsersRoster.RejectInvitationToSquad': self.__onRejectInviteToSquad,
'Battle.addToDynamicSquad': self.__onSentInviteToSquad,
'Battle.acceptInviteToDynamicSquad': self.__onAcceptInviteToSquad})
@prbInvitesProperty
def prbInvites(self):
return None
def destroy(self):
if self.__battleUI:
self.__battleUI.removeExternalCallbacks(('Battle.UsersRoster.LeaveSquad', 'Battle.UsersRoster.ExcludedFromSquad', 'Battle.UsersRoster.SendInvitationToSquad', 'Battle.UsersRoster.WithdrawInvitationToSquad', 'Battle.UsersRoster.AcceptInvitationToSquad', 'Battle.UsersRoster.RejectInvitationToSquad', 'Battle.addToDynamicSquad', 'Battle.acceptInviteToDynamicSquad'))
self.__battleUI = None
return
def __onLeaveSquad(self, _, userId):
pass
def __onExcludedFromSquad(self, _, userId):
pass
@process
def __onSentInviteToSquad(self, _, userId):
yield g_sessionProvider.sendRequest(SendInvitesCtx(databaseIDs=(userId,)))
def __onAcceptInviteToSquad(self, _, userId):
inviteID = self.__getInviteID(userId, True, True)
if inviteID is not None:
self.prbInvites.acceptInvite(inviteID)
return
def __onWithdrawInviteToSquad(self, _, userId):
inviteID = self.__getInviteID(userId, False, False)
if inviteID is not None:
self.prbInvites.revokeInvite(inviteID)
return
def __onRejectInviteToSquad(self, _, userId):
inviteID = self.__getInviteID(userId, True, True)
if inviteID is not None:
self.prbInvites.declineInvite(inviteID)
return
def __getInviteID(self, userId, isCreator, incomingInvites):
invites = self.prbInvites.getInvites(incoming=incomingInvites, onlyActive=True)
if isCreator:
idGetter = lambda i: i.creatorDBID
else:
idGetter = lambda i: i.receiverDBID
for invite in invites:
if invite.type == INVITATION_TYPE.SQUAD and idGetter(invite) == userId:
return invite.clientID
return None
|
[
"info@webium.sk"
] |
info@webium.sk
|
dab71c7c7ca10f4fa9d20b9077001ab35b0c0475
|
2fe41c6d3968a8acad6afb37333134c828462492
|
/binary_to_float-tqdm.py
|
3e38702b5dafad030ff886a8a45f5f2139f76fb2
|
[] |
no_license
|
alex-meadows/binaryparse
|
6d1f9bc65e2b24200c141ceb9d62682323893bd4
|
ecadc1cb64e433ac108b417f1edd47362f60fa72
|
refs/heads/master
| 2022-12-11T07:35:28.460321
| 2020-09-09T22:54:54
| 2020-09-09T22:54:54
| 294,243,144
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,671
|
py
|
import struct
import sys
from tqdm import tqdm
#initialize the 'fileName' list with file names for, xi, eta, Dg, and GH
fileName = "", "", "", ""
fileContent = []
numbytes = []
for z in fileName:
with open(z, mode='rb') as file:
v = file.read()
fileContent.append(v)
numbytes.append(int(len(v)/4))#the four comes from the buffer size of float byte
#the below variables of xi, eta, Dg, GH are the parsed float values
xi = struct.unpack(str(numbytes[0]) + 'f', fileContent[0][0:4*int(numbytes[0])])
eta = struct.unpack(str(numbytes[1]) + 'f', fileContent[1][0:4*int(numbytes[1])])
Dg = struct.unpack(str(numbytes[2]) + 'f', fileContent[2][0:4*int(numbytes[2])])
GH = struct.unpack(str(numbytes[3]) + 'f', fileContent[3][0:4*int(numbytes[3])])
#the below portion is just for writing the floats to text files, it takes a couple of minutes for each file
ans = input("Write data to txt file? (y/n)")
if (ans == 'y'):
xi_data = open(fileName[0].replace('.', '')+'.txt','w')
xi_data.write('\n')
for x in tqdm(xi):
xi_data.write(str(x))
xi_data.write('\n')
xi_data.close()
eta_data = open(fileName[1].replace('.', '')+'.txt','w')
eta_data.write('\n')
for x in tqdm(eta):
eta_data.write(str(x))
eta_data.write('\n')
eta_data.close()
Dg_data = open(fileName[2].replace('.', '')+'.txt','w')
Dg_data.write('\n')
for x in tqdm(Dg):
Dg_data.write(str(x))
Dg_data.write('\n')
Dg_data.close()
GH_data = open(fileName[3].replace('.', '')+'.txt','w')
GH_data.write('\n')
for x in tqdm(GH):
GH_data.write(str(x))
GH_data.write('\n')
GH_data.close()
sys.exit()
|
[
"noreply@github.com"
] |
alex-meadows.noreply@github.com
|
f092071813dfa5e13e3b8190ae4b7e3c3fb549fd
|
815151e83df53382eba9fbd34b9fec84e11714f0
|
/AWS_IOT/aws_publisher.py
|
9af2f2aceaa341680b3e35e8425be931dca6a448
|
[] |
no_license
|
tadeuamaral/pfc
|
65d65642dcb8b4ab4a01c46c0ab29970af951de9
|
6118c24626fb992fce7bb87f029888a01ba98684
|
refs/heads/master
| 2020-04-22T02:14:35.247752
| 2019-01-30T02:30:34
| 2019-01-30T02:30:34
| 170,041,671
| 0
| 1
| null | 2019-02-10T23:56:13
| 2019-02-10T23:56:12
| null |
UTF-8
|
Python
| false
| false
| 2,397
|
py
|
# -*- coding: utf-8 -*-
#!/bin/bash
import os
import sys
from datetime import datetime
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import time
import json
import argparse
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from configure import pfc_conf
from configure import pfc_mqtt_topic
class aws_publisher:
IOT_MQTT_CLIENT = None
QOS_LEVEL = 1
def __init__(self,QOS_LEVEL=1):
self.QOS_LEVEL = QOS_LEVEL
self.IOT_MQTT_CLIENT = AWSIoTMQTTClient(pfc_conf.PFC_AWS_IOT_CLIENT_ID)
self.IOT_MQTT_CLIENT.configureEndpoint(pfc_mqtt_topic.AWS_ENDPOINT,8883)
self.IOT_MQTT_CLIENT.configureCredentials(pfc_conf.CA_PATH, pfc_conf.PRIVATE_KEY_PATH, pfc_conf.CERTIFICATE_PATH)
self.IOT_MQTT_CLIENT.configureAutoReconnectBackoffTime(1, 32, 20)
self.IOT_MQTT_CLIENT.configureOfflinePublishQueueing(-1)
self.IOT_MQTT_CLIENT.configureDrainingFrequency(2)
self.IOT_MQTT_CLIENT.configureConnectDisconnectTimeout(10)
self.IOT_MQTT_CLIENT.configureMQTTOperationTimeout(20)
def publish_mqtt_broker(self,topic,messageJson):
if messageJson == None:
print("message is none.")
sys.exit()
elif "PFC_SERIAL" not in messageJson or "DEVICE_DT" not in messageJson:
print("PFC_SERIAL, DEVICE_DT is a demandable.")
sys.exit()
self.IOT_MQTT_CLIENT.connect()
self.IOT_MQTT_CLIENT.publish(topic, messageJson, self.QOS_LEVEL)
self.IOT_MQTT_CLIENT.disconnect()
print("Pbulished MQTT topic: " + str(topic))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-t","--topic",action="store",required=True,dest="topic", help="MQTT message `Topic` name")
parser.add_argument("-m","--message",action="store",required=True,dest="message", help="MQTT message data")
parser.add_argument("-q","--qos_level",action="store",dest="qos_level", help="MQTT QOS_LEVEL", default=1)
qos_level = 1
topic = 'EZFARM/PFC/V1/DEV/00000001/order_subscribe'
# topic = 'EZFARM/PFC/V1/DEV'
message = {
"PFC_SERIAL": "00000000",
"DEVICE_DT" : str(datetime.now()),
# "ORDER" : "UPLOAD",
# "TARGET" : "S3_UPLOAD",
# "TYPE" : "DATA_LAKE",
"ORDER" : "ON",
"TARGET" : "LED",
"TYPE" : "ACTUATOR",
"ORDER_DT" : str(datetime.now())
}
message['ORDER_DT'] = str(datetime.now())
messageJson = json.dumps(message)
publisher_aws = aws_publisher(QOS_LEVEL = qos_level)
publisher_aws.publish_mqtt_broker(topic,messageJson)
|
[
"house9737@gmail.com"
] |
house9737@gmail.com
|
c4140a667a35b2d8e8c828993ec52a8eca4ca50f
|
58fcbbde289b4c0575c06542986e5d1f3a95ff6d
|
/app/main/model/calificacion_producto.py
|
94c1eb261df26b8e1dc6a791943b2794ae2eff2f
|
[] |
no_license
|
Team-3-TCS/api-my-store
|
045691196aa019efe0188912ead4f7fe32c68a9a
|
e3e6d716102280e73932e5eba65b2ff27eec45e0
|
refs/heads/dev
| 2023-04-09T23:43:49.973791
| 2021-04-26T00:21:43
| 2021-04-26T00:21:43
| 351,312,190
| 1
| 0
| null | 2021-04-26T00:21:44
| 2021-03-25T04:49:31
|
Python
|
UTF-8
|
Python
| false
| false
| 570
|
py
|
from .. import db
class Calificacion_producto(db.Model):
__tablename__ = "calificacion_producto"
id_calificacion = db.Column(
db.Integer, primary_key=True, autoincrement=True)
id_producto = db.Column(db.Integer, db.ForeignKey(
'producto.id_producto'), nullable=False)
id_cliente = db.Column(db.Integer, db.ForeignKey(
'cliente.id_cliente'), nullable=False)
puntuacion = db.Column(db.Integer)
comentario = db.Column(db.String(200))
def __repr__(self):
return "<Calificacion_producto '{}'>".format(self.id_producto)
|
[
"eisten.flores@unmsm.edu.pe"
] |
eisten.flores@unmsm.edu.pe
|
10585dad5b6446bed09db2ec7320848d018b4d5e
|
a330f3ba0431b397206cdb02df8e0ac923be2dcb
|
/LyndaCourse7.2.py
|
b03e496140cc8898fb81402685d6b3bbe1038259
|
[] |
no_license
|
coreyderosa/Python-Tkinter
|
d53a5776346eead5e1ced6b688d8ec8c85b22cf8
|
3b7c642f90378db132ac6436fb4509cdde1fdf1d
|
refs/heads/master
| 2021-01-10T22:58:32.650462
| 2016-10-06T02:12:26
| 2016-10-06T02:12:26
| 69,717,974
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 972
|
py
|
#can use this with Keyboard events such as ButtonPress, ButtonRelease, Enter, Leave, Motion, KeyPress, KeyRelease, FocusIn, FocusOut
from tkinter import *
from tkinter import ttk
root = Tk()
def key_press(event):
print('type:{}'.format(event.type)) #displays key type event- type 2 is key press
print('widget:{}'.format(event.widget))
print('char:{}'.format(event.char)) #actual key character pressed
print('keysym:{}'.format(event.keysym)) #symbol of the key- if shift + 3 this will show #- if left shift key is pressed Shift_L is displayed
print('keycode:{}'.format(event.keycode)) #numeric code of key- a = 65
def shortcut(action):
print(action)
#root.bind('<KeyPress>', key_press)
root.bind('<Control-c>', lambda e: shortcut('Copy')) #needed tto add variable 'e' inorder for the lambda to work
root.bind('<Control-v>', lambda e: shortcut('Paste')) #needed tto add variable 'e' inorder for the lambda to work
root.mainloop()
|
[
"coreyderosa@gmail.com"
] |
coreyderosa@gmail.com
|
b8dae0aba14861e40314b608502313c768f9ecb4
|
62bb149a207550c2ad6a6c319e24d23b08f0bb07
|
/page/admin.py
|
3deb51ee981b51f98cfe44821986dd8e95b522a0
|
[] |
no_license
|
onr20/kaft
|
4a1ab3b9454dd3cc05a24b609c98ef9e667ad235
|
2f52d180963d4a0e3f7c58352b3c88af5c600ec1
|
refs/heads/master
| 2023-04-19T06:23:58.564336
| 2021-05-08T21:50:00
| 2021-05-08T21:50:00
| 365,242,279
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
from django.contrib import admin
from .models import Page, Carousel
class PageModify(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
list_display = (
'pk',
'title',
'slug',
'status',
'updated_at',
)
list_filter = ('status',)
list_editable = ('status','title',)
admin.site.register(Page, PageModify)
admin.site.register(Carousel)
|
[
"onrozcan20@gmail.com"
] |
onrozcan20@gmail.com
|
a115ec089052a5f20b7c8d7cc33f4f75f2e6b2b9
|
eb98ad1577f052f72dedc530191132f55bfbb6ce
|
/code_and_data/core/ppo.py
|
5b424a553c6c2ddc931d84d21f3febac63e671c0
|
[] |
no_license
|
zchaoking/VAKLIL-Supplementary-Files---AAAI-2020
|
9ca3ae540aeff8353e255e3699779848c5d50175
|
37b73a9030f56578de920d77bae7ba41403a3ba8
|
refs/heads/master
| 2022-03-10T07:52:47.451663
| 2019-11-21T15:38:01
| 2019-11-21T15:38:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,385
|
py
|
import torch
def ppo_step(policy_net, value_net, optimizer_policy, optimizer_value, optim_value_iternum, states, actions,
returns, advantages, fixed_log_probs, clip_epsilon, l2_reg):
# debug
#policy_net, value_net, optimizer_policy, optimizer_value, optim_value_iternum, states, actions, \
#returns, advantages, fixed_log_probs, clip_epsilon, l2_reg = \
#policy_net, value_net, optimizer_policy, optimizer_value, 1, states_b, actions_b, returns_b, \
#advantages_b, fixed_log_probs_b, args.clip_epsilon, args.l2_reg
"""update critic"""
for _ in range(optim_value_iternum):
values_pred = value_net(states)
value_loss = (values_pred - returns).pow(2).mean()
# weight decay
for param in value_net.parameters():
value_loss += param.pow(2).sum() * l2_reg
optimizer_value.zero_grad()
value_loss.backward()
optimizer_value.step()
"""update policy"""
log_probs = policy_net.get_log_prob(states, actions)
ratio = torch.exp(log_probs - fixed_log_probs)
surr1 = ratio * advantages
surr2 = torch.clamp(ratio, 1.0 - clip_epsilon, 1.0 + clip_epsilon) * advantages
policy_surr = -torch.min(surr1, surr2).mean()
optimizer_policy.zero_grad()
policy_surr.backward()
torch.nn.utils.clip_grad_norm_(policy_net.parameters(), 40)
optimizer_policy.step()
|
[
"fanyang@dhcp-morris-3166.redrover.cornell.edu"
] |
fanyang@dhcp-morris-3166.redrover.cornell.edu
|
cc1e3ec045322da5e27aa08c83831635d126cfb1
|
2e92d3fec5046f0052c40c9b078fe27314b49af4
|
/rekog-working.py
|
0065e4c253222b8d89027b1f1140ec9fca65a69b
|
[] |
no_license
|
mondiman/rekog-ss
|
b2b3f3757bf63aa9dba67ea7f47bbdd10d3228ba
|
f2add14a2d34ab5bf74fad44231d2ce03bbeda71
|
refs/heads/master
| 2020-03-23T14:08:54.192860
| 2018-07-20T19:01:33
| 2018-07-20T19:01:33
| 141,659,371
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,310
|
py
|
from __future__ import print_function
import boto3
from decimal import Decimal
import json
import urllib
from copy import deepcopy
print('Loading function')
rekognition = boto3.client('rekognition')
client = boto3.client('sns')
rekog_max_labels = 10
rekog_min_conf = 80.0
label_watch_list = ["Human", "People", "Person", "Automobile", "Car"]
label_watch_min_conf = 80.0
# --------------- Helper Functions to call Rekognition APIs ------------------
def detect_labels(bucket, key):
response = rekognition.detect_labels(Image={"S3Object": {"Bucket": bucket, "Name": key}}, MaxLabels=rekog_max_labels, MinConfidence=rekog_min_conf,)
return response
# --------------- Main handler ------------------
def lambda_handler(event, context):
'''Demonstrates S3 trigger that uses
Rekognition APIs to detect faces, labels and index faces in S3 Object.
'''
#print("Received event: " + json.dumps(event, indent=2))
# Get the object from the event
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
try:
# Calls rekognition DetectFaces API to detect faces in S3 object
# response = detect_faces(bucket, key)
# Calls rekognition DetectLabels API to detect labels in S3 object
response = detect_labels(bucket, key)
for record in event['Records']:
filename = record['s3']['object']['key'];
#filesize = record['s3']['object']['size'];
#source = record['requestParameters']['sourceIPAddress'];
eventTime = record['eventTime'];
#Iterate on rekognition labels. Enrich and prep them for storage in DynamoDB
labels_on_watch_list = []
for label in response['Labels']:
lbl = label['Name']
conf = label['Confidence']
label['OnWatchList'] = False
#Print labels and confidence to lambda console
print('{} .. conf %{:.2f}'.format(lbl, conf))
#Check label watch list and trigger action
if (lbl.upper() in (label.upper() for label in label_watch_list)
and conf >= label_watch_min_conf):
label['OnWatchList'] = True
labels_on_watch_list.append(deepcopy(label))
tosend=""
for Label in response["Labels"]:
print ('{0} - {1}%'.format(Label["Name"], Label["Confidence"]))
tosend+= '{0} - {1}%'.format(Label["Name"], round(Label["Confidence"], 2))
# Calls rekognition IndexFaces API to detect faces in S3 object and index faces into specified collection
#response = index_faces(bucket, key)
# Print response to console.
print(response)
if len(labels_on_watch_list) > 0:
message = client.publish(TargetArn='arn:aws:sns:us-west-2:126009388920:image-recognition-SNS', Message=filename+" "+tosend, Subject=filename)
return response
except Exception as e:
print(e)
print("Error processing object {} from bucket {}. ".format(key, bucket) +
"Make sure your object and bucket exist and your bucket is in the same region as this function.")
raise e
|
[
"noreply@github.com"
] |
mondiman.noreply@github.com
|
d8e0f174795778a50f9f731e86cd07665ffab752
|
4559498420ef7a3de7c8d49761bbb5a38ae6403f
|
/stackGAN/solver_stackGAN.py
|
72fc6a2dd1a9ae806480a9e3d4e219382842017d
|
[] |
no_license
|
Dylan199602/CS230_StackedGAN
|
02163be3c26c92f61602f5718c945d65e881b1c7
|
a335044b9a65c525922f03f908bdff376e7b2626
|
refs/heads/master
| 2021-03-23T23:21:19.031777
| 2019-12-08T17:28:23
| 2019-12-08T17:28:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 48,221
|
py
|
from model import Generator
from model import Discriminator
from generator import Generator as G2
from discriminator import Discriminator as D2
from torch.autograd import Variable
from torchvision.utils import save_image
import torch
import torch.nn.functional as F
import numpy as np
import os
import time
import datetime
import gc
from glob import glob
from itertools import product
from random import choice
from imageio import imwrite
import tensorflow as tf
from tqdm import tqdm
from logger_cartoonGAN import get_logger
@tf.function
def gram(x):
shape_x = tf.shape(x)
b = shape_x[0]
c = shape_x[3]
x = tf.reshape(x, [b, -1, c])
return tf.matmul(tf.transpose(x, [0, 2, 1]), x) / tf.cast((tf.size(x) // b), tf.float32)
# for StarGAN
class Solver(object):
def __init__(self,rafdb_loader, config):
"""Initialize configurations."""
"StarGAN."
# Data loader.
self.rafdb_loader = rafdb_loader
# Model configurations.
self.c_dim = config.c_dim
self.image_size = config.image_size
self.g_conv_dim = config.g_conv_dim
self.d_conv_dim = config.d_conv_dim
self.g_repeat_num = config.g_repeat_num
self.d_repeat_num = config.d_repeat_num
self.lambda_cls = config.lambda_cls
self.lambda_rec = config.lambda_rec
self.lambda_gp = config.lambda_gp
# Training configurations.
# self.dataset = config.dataset
self.batch_size_starGAN = config.batch_size_starGAN
self.num_iters = config.num_iters
self.num_iters_decay = config.num_iters_decay
self.g_lr = config.g_lr
self.d_lr = config.d_lr
self.n_critic = config.n_critic
self.beta1 = config.beta1
self.beta2 = config.beta2
self.resume_iters = config.resume_iters
# self.selected_attrs = config.selected_attrs
# Test configurations.
self.test_iters = config.test_iters
# Miscellaneous.
self.use_tensorboard = config.use_tensorboard
self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# self.device = torch.device('cpu')
# Directories.
self.log_dir = config.log_dir
self.sample_dir = config.sample_dir
self.model_save_dir = config.model_save_dir
#Adding this directory to
self.intermediate_dir = config.intermediate_dir
self.result_dir_starGAN = config.result_dir_starGAN
# Step size.
self.log_step = config.log_step
self.sample_step = config.sample_step
self.model_save_step = config.model_save_step
self.lr_update_step = config.lr_update_step
# Build the model and tensorboard.
self.build_model()
if self.use_tensorboard:
self.build_tensorboard()
"CartoonGAN"
self.debug = config.debug
self.ascii = os.name == "nt"
self.dataset_name = config.dataset_name
self.light = config.light
self.source_domain = config.source_domain
self.target_domain = config.target_domain
self.gan_type = config.gan_type
self.epochs = config.epochs
self.input_size = config.input_size
self.multi_scale = config.multi_scale
self.batch_size_cartoonGAN = config.batch_size_cartoonGAN
self.sample_size = config.sample_size
self.reporting_steps = config.reporting_steps
self.content_lambda = float(config.content_lambda)
self.style_lambda = float(config.style_lambda)
self.g_adv_lambda =config. g_adv_lambda
self.d_adv_lambda = config.d_adv_lambda
self.generator_lr = config.generator_lr
self.discriminator_lr = config.discriminator_lr
self.data_dir =config.data_dir
self.log_dir_cartoonGAN = config.log_dir_cartoonGAN
self.result_dir_cartoonGAN = config.result_dir_cartoonGAN
self.checkpoint_dir = config.checkpoint_dir
self.generator_checkpoint_prefix = config.generator_checkpoint_prefix
self.discriminator_checkpoint_prefix = config.discriminator_checkpoint_prefix
self.pretrain_checkpoint_prefix = config.pretrain_checkpoint_prefix
self.pretrain_model_dir = config.pretrain_model_dir
self.model_dir = config.model_dir
self.disable_sampling = config.disable_sampling
self.ignore_vgg = config.ignore_vgg
self.pretrain_learning_rate = config.pretrain_learning_rate
self.pretrain_epochs = config.pretrain_epochs
self.pretrain_saving_epochs = config.pretrain_saving_epochs
self.pretrain_reporting_steps = config.pretrain_reporting_steps
self.pretrain_generator_name = config.pretrain_generator_name
self.generator_name = config.generator_name
self.discriminator_name = config.discriminator_name
self.logger_cartoonGAN = get_logger("Solver", debug=False)
# NOTE: just minimal demonstration of multi-scale training
self.sizes = [self.input_size - 32, self.input_size, self.input_size + 32]
if not self.ignore_vgg:
self.logger_cartoonGAN.info("Setting up VGG19 for computing content loss...")
from tensorflow.keras.applications import VGG19
from tensorflow.keras.layers import Conv2D
input_shape = (self.input_size, self.input_size, 3)
# download model using kwarg weights="imagenet"
base_model = VGG19(weights="imagenet", include_top=False, input_shape=input_shape)
tmp_vgg_output = base_model.get_layer("block4_conv3").output
tmp_vgg_output = Conv2D(512, (3, 3), activation='linear', padding='same',
name='block4_conv4')(tmp_vgg_output)
self.vgg = tf.keras.Model(inputs=base_model.input, outputs=tmp_vgg_output)
self.vgg.load_weights(os.path.expanduser(os.path.join(
"~", ".keras", "models",
"vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5")), by_name=True)
else:
self.logger_cartoonGAN.info("VGG19 will not be used. "
"Content loss will simply imply pixel-wise difference.")
self.vgg = None
self.logger_cartoonGAN.info(f"Setting up objective functions and metrics using {self.gan_type}...")
self.mae = tf.keras.losses.MeanAbsoluteError()
self.generator_loss_object = tf.keras.losses.BinaryCrossentropy(from_logits=True)
if self.gan_type == "gan":
self.discriminator_loss_object = tf.keras.losses.BinaryCrossentropy(
from_logits=True)
elif self.gan_type == "lsgan":
self.discriminator_loss_object = tf.keras.losses.MeanSquaredError()
else:
wrong_msg = f"Non-recognized 'gan_type': {self.gan_type}"
self.logger_cartoonGAN.critical(wrong_msg)
raise ValueError(wrong_msg)
self.g_total_loss_metric = tf.keras.metrics.Mean("g_total_loss", dtype=tf.float32)
self.g_adv_loss_metric = tf.keras.metrics.Mean("g_adversarial_loss", dtype=tf.float32)
if self.content_lambda != 0.:
self.content_loss_metric = tf.keras.metrics.Mean("content_loss", dtype=tf.float32)
if self.style_lambda != 0.:
self.style_loss_metric = tf.keras.metrics.Mean("style_loss", dtype=tf.float32)
self.d_total_loss_metric = tf.keras.metrics.Mean("d_total_loss", dtype=tf.float32)
self.d_real_loss_metric = tf.keras.metrics.Mean("d_real_loss", dtype=tf.float32)
self.d_fake_loss_metric = tf.keras.metrics.Mean("d_fake_loss", dtype=tf.float32)
self.d_smooth_loss_metric = tf.keras.metrics.Mean("d_smooth_loss", dtype=tf.float32)
self.metric_and_names = [
(self.g_total_loss_metric, "g_total_loss"),
(self.g_adv_loss_metric, "g_adversarial_loss"),
(self.d_total_loss_metric, "d_total_loss"),
(self.d_real_loss_metric, "d_real_loss"),
(self.d_fake_loss_metric, "d_fake_loss"),
(self.d_smooth_loss_metric, "d_smooth_loss"),
]
if self.content_lambda != 0.:
self.metric_and_names.append((self.content_loss_metric, "content_loss"))
if self.style_lambda != 0.:
self.metric_and_names.append((self.style_loss_metric, "style_loss"))
self.logger_cartoonGAN.info("Setting up checkpoint paths...")
self.pretrain_checkpoint_prefix = os.path.join(
self.checkpoint_dir, "pretrain", self.pretrain_checkpoint_prefix)
self.generator_checkpoint_dir = os.path.join(
self.checkpoint_dir, self.generator_checkpoint_prefix)
self.generator_checkpoint_prefix = os.path.join(
self.generator_checkpoint_dir, self.generator_checkpoint_prefix)
self.discriminator_checkpoint_dir = os.path.join(
self.checkpoint_dir, self.discriminator_checkpoint_prefix)
self.discriminator_checkpoint_prefix = os.path.join(
self.discriminator_checkpoint_dir, self.discriminator_checkpoint_prefix)
#################################################################################################
"functions for StarGAN"
def build_model(self):
"""Create a generator and a discriminator."""
self.G = Generator(self.g_conv_dim, self.c_dim, self.g_repeat_num)
self.D = Discriminator(self.image_size, self.d_conv_dim, self.c_dim, self.d_repeat_num)
self.g_optimizer = torch.optim.Adam(self.G.parameters(), self.g_lr, [self.beta1, self.beta2])
self.d_optimizer = torch.optim.Adam(self.D.parameters(), self.d_lr, [self.beta1, self.beta2])
self.print_network(self.G, 'G')
self.print_network(self.D, 'D')
self.G.to(self.device)
self.D.to(self.device)
def print_network(self, model, name):
"""Print out the network information."""
num_params = 0
for p in model.parameters():
num_params += p.numel()
print(model)
print(name)
print("The number of parameters: {}".format(num_params))
def restore_model(self, resume_iters):
"""Restore the trained generator and discriminator."""
print('Loading the trained models from step {}...'.format(resume_iters))
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(resume_iters))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(resume_iters))
self.G.load_state_dict(torch.load(G_path, map_location=lambda storage, loc: storage))
self.D.load_state_dict(torch.load(D_path, map_location=lambda storage, loc: storage))
def build_tensorboard(self):
"""Build a tensorboard logger."""
from logger import Logger
self.logger = Logger(self.log_dir)
def update_lr(self, g_lr, d_lr):
"""Decay learning rates of the generator and discriminator."""
for param_group in self.g_optimizer.param_groups:
param_group['lr'] = g_lr
for param_group in self.d_optimizer.param_groups:
param_group['lr'] = d_lr
def reset_grad(self):
"""Reset the gradient buffers."""
self.g_optimizer.zero_grad()
self.d_optimizer.zero_grad()
def denorm(self, x):
"""Convert the range from [-1, 1] to [0, 1]."""
out = (x + 1) / 2
return out.clamp_(0, 1)
def gradient_penalty(self, y, x):
"""Compute gradient penalty: (L2_norm(dy/dx) - 1)**2."""
weight = torch.ones(y.size()).to(self.device)
dydx = torch.autograd.grad(outputs=y,
inputs=x,
grad_outputs=weight,
retain_graph=True,
create_graph=True,
only_inputs=True)[0]
dydx = dydx.view(dydx.size(0), -1)
dydx_l2norm = torch.sqrt(torch.sum(dydx**2, dim=1))
return torch.mean((dydx_l2norm-1)**2)
def label2onehot(self, labels, dim):
"""Convert label indices to one-hot vectors."""
batch_size = labels.size(0)
out = torch.zeros(batch_size, dim)
out[np.arange(batch_size), labels.long()] = 1
return out
def create_labels(self, c_org, c_dim=5):
"""Generate target domain labels for debugging and testing."""
c_trg_list = []
for i in range(c_dim):
c_trg = self.label2onehot(torch.ones(c_org.size(0))*i, c_dim)
c_trg_list.append(c_trg.to(self.device))
return c_trg_list
def classification_loss(self, logit, target):
"""Compute binary or softmax cross entropy loss."""
return F.cross_entropy(logit, target)
#################################################################################################
"functions for CartoonGAN"
def _save_generated_images(self, batch_x, image_name, nrow=2, ncol=4, dir_):
# NOTE: 0 <= batch_x <= 1, float32, numpy.ndarray
if not isinstance(batch_x, np.ndarray):
batch_x = batch_x.numpy()
n, h, w, c = batch_x.shape
out_arr = np.zeros([h * nrow, w * ncol, 3], dtype=np.uint8)
for (i, j), k in zip(product(range(nrow), range(ncol)), range(n)):
out_arr[(h * i):(h * (i+1)), (w * j):(w * (j+1))] = batch_x[k]
if not os.path.isdir(self.dir_):
os.makedirs(self.dir_)
imwrite(os.path.join(self.dir_, image_name), out_arr)
gc.collect()
return out_arr
@tf.function
def random_resize(self, x):
size = choice(self.sizes)
return tf.image.resize(x, (size, size))
@tf.function
def image_processing(self, filename, is_train=True):
crop_size = self.input_size
if self.multi_scale and is_train:
crop_size += 32
x = tf.io.read_file(filename)
x = tf.image.decode_jpeg(x, channels=3)
if is_train:
sizes = tf.cast(
crop_size * tf.random.uniform([2], 0.9, 1.1), tf.int32)
shape = tf.shape(x)[:2]
sizes = tf.minimum(sizes, shape)
x = tf.image.random_crop(x, (sizes[0], sizes[1], 3))
x = tf.image.random_flip_left_right(x)
x = tf.image.resize(x, (crop_size, crop_size))
img = tf.cast(x, tf.float32) / 127.5 - 1
return img
def get_dataset(self, dataset_name, domain, _type, batch_size):
files = glob(os.path.join(self.data_dir, dataset_name, f"{_type}{domain}", "*"))
num_images = len(files)
self.logger_cartoonGAN.info(
f"Found {num_images} domain{domain} images in {_type}{domain} folder."
)
ds = tf.data.Dataset.from_tensor_slices(files)
ds = ds.apply(tf.data.experimental.shuffle_and_repeat(num_images))
def fn(fname):
if self.multi_scale:
return self.random_resize(self.image_processing(fname, True))
else:
return self.image_processing(fname, True)
ds = ds.apply(tf.data.experimental.map_and_batch(fn, batch_size))
steps = int(np.ceil(num_images/batch_size))
# user iter(ds) to avoid generating iterator every epoch
return iter(ds), steps
@tf.function
def pass_to_vgg(self, tensor):
# NOTE: self.vgg should be fixed
if self.vgg is not None:
tensor = self.vgg(tensor)
return tensor
@tf.function
def content_loss(self, input_images, generated_images):
return self.mae(input_images, generated_images)
@tf.function
def style_loss(self, input_images, generated_images):
input_images = gram(input_images)
generated_images = gram(generated_images)
return self.mae(input_images, generated_images)
@tf.function
def discriminator_loss(self, real_output, fake_output, smooth_output):
real_loss = self.discriminator_loss_object(tf.ones_like(real_output), real_output)
fake_loss = self.discriminator_loss_object(tf.zeros_like(fake_output), fake_output)
smooth_loss = self.discriminator_loss_object(
tf.zeros_like(smooth_output), smooth_output)
total_loss = real_loss + fake_loss + smooth_loss
return real_loss, fake_loss, smooth_loss, total_loss
@tf.function
def generator_adversarial_loss(self, fake_output):
return self.generator_loss_object(tf.ones_like(fake_output), fake_output)
@tf.function
def pretrain_step(self, input_images, generator, optimizer):
with tf.GradientTape() as tape:
generated_images = generator(input_images, training=True)
c_loss = self.content_lambda * self.content_loss(
self.pass_to_vgg(input_images), self.pass_to_vgg(generated_images))
gradients = tape.gradient(c_loss, generator.trainable_variables)
optimizer.apply_gradients(zip(gradients, generator.trainable_variables))
self.content_loss_metric(c_loss)
@tf.function
def train_step(self, source_images, target_images, smooth_images,
generator, discriminator, g_optimizer, d_optimizer):
with tf.GradientTape() as g_tape, tf.GradientTape() as d_tape:
real_output = discriminator(target_images, training=True)
generated_images = generator(source_images, training=True)
fake_output = discriminator(generated_images, training=True)
smooth_out = discriminator(smooth_images, training=True)
d_real_loss, d_fake_loss, d_smooth_loss, d_total_loss = \
self.discriminator_loss(real_output, fake_output, smooth_out)
g_adv_loss = self.g_adv_lambda * self.generator_adversarial_loss(fake_output)
g_total_loss = g_adv_loss
# NOTE: self.*_lambdas are fixed
if self.content_lambda != 0. or self.style_lambda != 0.:
vgg_generated_images = self.pass_to_vgg(generated_images)
if self.content_lambda != 0.:
c_loss = self.content_lambda * self.content_loss(
self.pass_to_vgg(source_images), vgg_generated_images)
g_total_loss = g_total_loss + c_loss
if self.style_lambda != 0.:
s_loss = self.style_lambda * self.style_loss(
self.pass_to_vgg(target_images[:vgg_generated_images.shape[0]]),
vgg_generated_images)
g_total_loss = g_total_loss + s_loss
d_grads = d_tape.gradient(d_total_loss, discriminator.trainable_variables)
g_grads = g_tape.gradient(g_total_loss, generator.trainable_variables)
d_optimizer.apply_gradients(zip(d_grads, discriminator.trainable_variables))
g_optimizer.apply_gradients(zip(g_grads, generator.trainable_variables))
self.g_total_loss_metric(g_total_loss)
self.g_adv_loss_metric(g_adv_loss)
if self.content_lambda != 0.:
self.content_loss_metric(c_loss)
if self.style_lambda != 0.:
self.style_loss_metric(s_loss)
self.d_total_loss_metric(d_total_loss)
self.d_real_loss_metric(d_real_loss)
self.d_fake_loss_metric(d_fake_loss)
self.d_smooth_loss_metric(d_smooth_loss)
def pretrain_generator(self):
summary_writer = tf.summary.create_file_writer(os.path.join(self.log_dir_cartoonGAN, "pretrain"))
self.logger_cartoonGAN.info(f"Starting to pretrain generator with {self.pretrain_epochs} epochs...")
self.logger_cartoonGAN.info(
f"Building `{self.dataset_name}` dataset with domain `{self.source_domain}`..."
)
dataset, steps_per_epoch = self.get_dataset(dataset_name=self.dataset_name,
domain=self.source_domain,
_type="train",
batch_size=self.batch_size_cartoonGAN)
if self.multi_scale:
self.logger_cartoonGAN.info(f"Initializing generator with "
f"batch_size_cartoonGAN: {self.batch_size_cartoonGAN}, input_size: multi-scale...")
else:
self.logger_cartoonGAN.info(f"Initializing generator with "
f"batch_size_cartoonGAN: {self.batch_size_cartoonGAN}, input_size: {self.input_size}...")
generator = G2(base_filters=2 if self.debug else 64, light=self.light)
generator(tf.keras.Input(
shape=(self.input_size, self.input_size, 3),
batch_size=self.batch_size_cartoonGAN))
generator.summary()
self.logger_cartoonGAN.info("Setting up optimizer to update generator's parameters...")
optimizer = tf.keras.optimizers.Adam(
learning_rate=self.pretrain_learning_rate,
beta_1=0.5)
self.logger_cartoonGAN.info(f"Try restoring checkpoint: `{self.pretrain_checkpoint_prefix}`...")
try:
checkpoint = tf.train.Checkpoint(generator=generator)
status = checkpoint.restore(tf.train.latest_checkpoint(
os.path.join(self.checkpoint_dir, "pretrain")))
status.assert_consumed()
self.logger_cartoonGAN.info(f"Previous checkpoints has been restored.")
trained_epochs = checkpoint.save_counter.numpy()
epochs = self.pretrain_epochs - trained_epochs
if epochs <= 0:
self.logger_cartoonGAN.info(f"Already trained {trained_epochs} epochs. "
"Set a larger `pretrain_epochs`...")
return
else:
self.logger_cartoonGAN.info(f"Already trained {trained_epochs} epochs, "
f"{epochs} epochs left to be trained...")
except AssertionError:
self.logger_cartoonGAN.info(f"Checkpoint is not found, "
f"training from scratch with {self.pretrain_epochs} epochs...")
trained_epochs = 0
epochs = self.pretrain_epochs
if not self.disable_sampling:
val_files = glob(os.path.join(
self.data_dir, self.dataset_name, f"test{self.source_domain}", "*"))
val_real_batch = tf.map_fn(
lambda fname: self.image_processing(fname, False),
tf.constant(val_files), tf.float32, back_prop=False)
real_batch = next(dataset)
while real_batch.shape[0] < self.sample_size:
real_batch = tf.concat((real_batch, next(dataset)), 0)
real_batch = real_batch[:self.sample_size]
with summary_writer.as_default():
img = np.expand_dims(self._save_generated_images(
tf.cast((real_batch + 1) * 127.5, tf.uint8),
image_name="pretrain_sample_images.png"), 0,result_dir_cartoonGAN)
tf.summary.image("pretrain_sample_images", img, step=0)
img = np.expand_dims(self._save_generated_images(
tf.cast((val_real_batch + 1) * 127.5, tf.uint8),
image_name="pretrain_val_sample_images.png",result_dir_cartoonGAN), 0,)
tf.summary.image("pretrain_val_sample_images", img, step=0)
gc.collect()
else:
self.logger_cartoonGAN.info("Proceeding pretraining without sample images...")
self.logger_cartoonGAN.info("Starting pre-training loop, "
"setting up summary writer to record progress on TensorBoard...")
for epoch in range(epochs):
epoch_idx = trained_epochs + epoch + 1
for step in tqdm(
range(1, steps_per_epoch + 1),
desc=f"Pretrain Epoch {epoch + 1}/{epochs}"):
# NOTE: not following official "for img in dataset" example
# since it generates new iterator every epoch and can
# hardly be garbage-collected by python
image_batch = dataset.next()
self.pretrain_step(image_batch, generator, optimizer)
if step % self.pretrain_reporting_steps == 0:
global_step = (epoch_idx - 1) * steps_per_epoch + step
with summary_writer.as_default():
tf.summary.scalar('content_loss',
self.content_loss_metric.result(),
step=global_step)
if not self.disable_sampling:
fake_batch = tf.cast(
(generator(real_batch, training=False) + 1) * 127.5, tf.uint8)
img = np.expand_dims(self._save_generated_images(
fake_batch,
image_name=(f"pretrain_generated_images_at_epoch_{epoch_idx}"
f"_step_{step}.png"),result_dir_cartoonGAN),
0,
)
tf.summary.image('pretrain_generated_images', img, step=global_step)
self.content_loss_metric.reset_states()
with summary_writer.as_default():
if not self.disable_sampling:
val_fake_batch = tf.cast(
(generator(val_real_batch, training=False) + 1) * 127.5, tf.uint8)
img = np.expand_dims(self._save_generated_images(
val_fake_batch,
image_name=("pretrain_val_generated_images_at_epoch_"
f"{epoch_idx}_step_{step}.png"),result_dir_cartoonGAN),
0,
)
tf.summary.image('pretrain_val_generated_images', img, step=epoch)
if epoch % self.pretrain_saving_epochs == 0:
self.logger_cartoonGAN.info(f"Saving checkpoints after epoch {epoch_idx} ended...")
checkpoint.save(file_prefix=self.pretrain_checkpoint_prefix)
gc.collect()
del dataset
gc.collect()
#############################################################################################################
"Train StarGAN and CartoonGAN in a stacked model"
def train(self):
"""Train StarGAN setting"""
# Set data loader.
data_loader = self.rafdb_loader
# Fetch fixed inputs for debugging.
data_iter = iter(data_loader)
x_fixed_db, c_org_db = next(data_iter)
x_fixed_db = x_fixed_db.to(self.device)
c_fixed_list_db = self.create_labels(c_org_db, self.c_dim)
# Get 15*16*5 faked data for second gan
for i in range(16):
data_iter = iter(data_loader)
x_fixed, c_org = next(data_iter)
x_fixed = x_fixed.to(self.device)
if i == 0:
x_fixed_all = x_fixed
c_org_all = c_org
else:
x_fixed_all = torch.cat((x_fixed_all,x_fixed),0)
c_org_all = torch.cat((c_org_all,c_org),0)
c_fixed_list = self.create_labels(c_org_all, self.c_dim)
# Learning rate cache for decaying.
g_lr = self.g_lr
d_lr = self.d_lr
# Start training from scratch or resume training.
start_iters = 0
if self.resume_iters:
start_iters = self.resume_iters
self.restore_model(self.resume_iters)
"Train CartoonGAN setting"
self.logger_cartoonGAN.info("Setting up summary writer to record progress on TensorBoard...")
summary_writer = tf.summary.create_file_writer(self.log_dir_cartoonGAN)
self.logger_cartoonGAN.info(
f"Starting adversarial training with {self.epochs} epochs, "
f"batch size: {self.batch_size_cartoonGAN}..."
)
self.logger_cartoonGAN.info(f"Building `{self.dataset_name}` "
"datasets for source/target/smooth domains...")
ds_source, steps_per_epoch = self.get_dataset(dataset_name=self.dataset_name,
domain=self.source_domain,
_type="train",
batch_size=self.batch_size_cartoonGAN)
ds_target, _ = self.get_dataset(dataset_name=self.dataset_name,
domain=self.target_domain,
_type="train",
batch_size=self.batch_size_cartoonGAN)
ds_smooth, _ = self.get_dataset(dataset_name=self.dataset_name,
domain=f"{self.target_domain}_smooth",
_type="train",
batch_size=self.batch_size_cartoonGAN)
self.logger_cartoonGAN.info("Setting up optimizer to update generator and discriminator...")
g_optimizer = tf.keras.optimizers.Adam(learning_rate=self.generator_lr, beta_1=.5)
d_optimizer = tf.keras.optimizers.Adam(learning_rate=self.discriminator_lr, beta_1=.5)
if self.multi_scale:
self.logger_cartoonGAN.info(f"Initializing generator with "
f"batch_size: {self.batch_size_cartoonGAN}, input_size: multi-scale...")
else:
self.logger_cartoonGAN.info(f"Initializing generator with "
f"batch_size: {self.batch_size_cartoonGAN}, input_size: {self.input_size}...")
generator = G2(base_filters=2 if self.debug else 64, light=self.light)
generator(tf.keras.Input(
shape=(self.input_size, self.input_size, 3),
batch_size=self.batch_size_cartoonGAN))
self.logger_cartoonGAN.info(f"Searching existing checkpoints: `{self.generator_checkpoint_prefix}`...")
try:
g_checkpoint = tf.train.Checkpoint(generator=generator)
g_checkpoint.restore(
tf.train.latest_checkpoint(
self.generator_checkpoint_dir)).assert_existing_objects_matched()
self.logger_cartoonGAN.info(f"Previous checkpoints has been restored.")
trained_epochs = g_checkpoint.save_counter.numpy()
epochs = self.epochs - trained_epochs
if epochs <= 0:
self.logger_cartoonGAN.info(f"Already trained {trained_epochs} epochs. "
"Set a larger `epochs`...")
return
else:
self.logger_cartoonGAN.info(f"Already trained {trained_epochs} epochs, "
f"{epochs} epochs left to be trained...")
except AssertionError as e:
self.logger_cartoonGAN.warning(e)
self.logger_cartoonGAN.warning(
"Previous checkpoints are not found, trying to load checkpoints from pretraining..."
)
try:
g_checkpoint = tf.train.Checkpoint(generator=generator)
g_checkpoint.restore(tf.train.latest_checkpoint(
os.path.join(
self.checkpoint_dir, "pretrain"))).assert_existing_objects_matched()
self.logger_cartoonGAN.info("Successfully loaded "
f"`{self.pretrain_checkpoint_prefix}`...")
except AssertionError:
self.logger_cartoonGAN.warning("specified pretrained checkpoint is not found, "
"training from scratch...")
trained_epochs = 0
epochs = self.epochs
if self.multi_scale:
self.logger_cartoonGAN.info(f"Initializing discriminator with "
f"batch_size: {self.batch_size_cartoonGAN}, input_size: multi-scale...")
else:
self.logger_cartoonGAN.info(f"Initializing discriminator with "
f"batch_size: {self.batch_size_cartoonGAN}, input_size: {self.input_size}...")
if self.debug:
d_base_filters = 2
elif self.light:
d_base_filters = 24
else:
d_base_filters = 32
d = D2(base_filters=d_base_filters)
d(tf.keras.Input(
shape=(self.input_size, self.input_size, 3),
batch_size=self.batch_size_cartoonGAN))
self.logger_cartoonGAN.info("Searching existing checkpoints: "
f"`{self.discriminator_checkpoint_prefix}`...")
try:
d_checkpoint = tf.train.Checkpoint(d=d)
d_checkpoint.restore(
tf.train.latest_checkpoint(
self.discriminator_checkpoint_dir)).assert_existing_objects_matched()
self.logger_cartoonGAN.info(f"Previous checkpoints has been restored.")
except AssertionError:
self.logger_cartoonGAN.info("specified checkpoint is not found, training from scratch...")
if not self.disable_sampling:
val_files = glob(os.path.join(
self.data_dir, self.dataset_name, f"test{self.source_domain}", "*"))
val_real_batch = tf.map_fn(
lambda fname: self.image_processing(fname, False),
tf.constant(val_files), tf.float32, back_prop=False)
real_batch = next(ds_source)
while real_batch.shape[0] < self.sample_size:
real_batch = tf.concat((real_batch, next(ds_source)), 0)
real_batch = real_batch[:self.sample_size]
with summary_writer.as_default():
img = np.expand_dims(self._save_generated_images(
tf.cast((real_batch + 1) * 127.5, tf.uint8),
image_name="gan_sample_images.png",result_dir_cartoonGAN), 0,)
tf.summary.image("gan_sample_images", img, step=0)
img = np.expand_dims(self._save_generated_images(
tf.cast((val_real_batch + 1) * 127.5, tf.uint8),
image_name="gan_val_sample_images.png",result_dir_cartoonGAN), 0,)
tf.summary.image("gan_val_sample_images", img, step=0)
gc.collect()
else:
self.logger_cartoonGAN.info("Proceeding training without sample images...")
self.logger_cartoonGAN.info("Starting training loop...")
self.logger_cartoonGAN.info(f"Number of trained epochs: {trained_epochs}, "
f"epochs to be trained: {epochs}, "
f"batch size: {self.batch_size_cartoonGAN}")
"Start Training!"
# Start training.
print('Start training...')
start_time = time.time()
for i in range(start_iters, self.num_iters):
"StarGAN"
# =================================================================================== #
# 1. Preprocess input data #
# =================================================================================== #
# Fetch real images and labels.
try:
x_real, label_org = next(data_iter)
except:
data_iter = iter(data_loader)
x_real, label_org = next(data_iter)
# Generate target domain labels randomly.
rand_idx = torch.randperm(label_org.size(0))
label_trg = label_org[rand_idx]
c_org = self.label2onehot(label_org, self.c_dim)
c_trg = self.label2onehot(label_trg, self.c_dim)
x_real = x_real.to(self.device) # Input images.
c_org = c_org.to(self.device) # Original domain labels.
c_trg = c_trg.to(self.device) # Target domain labels.
label_org = label_org.to(self.device) # Labels for computing classification loss.
label_trg = label_trg.to(self.device) # Labels for computing classification loss.
# =================================================================================== #
# 2. Train the discriminator #
# =================================================================================== #
# Compute loss with real images.
out_src, out_cls = self.D(x_real)
d_loss_real = - torch.mean(out_src)
d_loss_cls = self.classification_loss(out_cls, label_org)
# Compute loss with fake images.
x_fake = self.G(x_real, c_trg)
out_src, out_cls = self.D(x_fake.detach())
d_loss_fake = torch.mean(out_src)
# Compute loss for gradient penalty.
alpha = torch.rand(x_real.size(0), 1, 1, 1).to(self.device)
x_hat = (alpha * x_real.data + (1 - alpha) * x_fake.data).requires_grad_(True)
out_src, _ = self.D(x_hat)
d_loss_gp = self.gradient_penalty(out_src, x_hat)
# Backward and optimize.
d_loss = d_loss_real + d_loss_fake + self.lambda_cls * d_loss_cls + self.lambda_gp * d_loss_gp
self.reset_grad()
d_loss.backward()
self.d_optimizer.step()
# Logging.
loss = {}
loss['D/loss_real'] = d_loss_real.item()
loss['D/loss_fake'] = d_loss_fake.item()
loss['D/loss_cls'] = d_loss_cls.item()
loss['D/loss_gp'] = d_loss_gp.item()
# =================================================================================== #
# 3. Train the generator #
# =================================================================================== #
if (i+1) % self.n_critic == 0:
# Original-to-target domain.
x_fake = self.G(x_real, c_trg)
out_src, out_cls = self.D(x_fake)
g_loss_fake = - torch.mean(out_src)
g_loss_cls = self.classification_loss(out_cls, label_trg)
# Target-to-original domain.
x_reconst = self.G(x_fake, c_org)
g_loss_rec = torch.mean(torch.abs(x_real - x_reconst))
# Backward and optimize.
g_loss = g_loss_fake + self.lambda_rec * g_loss_rec + self.lambda_cls * g_loss_cls
self.reset_grad()
g_loss.backward()
self.g_optimizer.step()
# Logging.
loss['G/loss_fake'] = g_loss_fake.item()
loss['G/loss_rec'] = g_loss_rec.item()
loss['G/loss_cls'] = g_loss_cls.item()
# =================================================================================== #
# 4. Miscellaneous #
# =================================================================================== #
# Print out training information.
if (i+1) % self.log_step == 0:
et = time.time() - start_time
et = str(datetime.timedelta(seconds=et))[:-7]
log = "Elapsed [{}], Iteration [{}/{}]".format(et, i+1, self.num_iters)
for tag, value in loss.items():
log += ", {}: {:.4f}".format(tag, value)
print(log)
if self.use_tensorboard:
for tag, value in loss.items():
self.logger.scalar_summary(tag, value, i+1)
self.logger.writer.flush()
# Translate fixed images for debugging.
if (i+1) % self.sample_step == 0:
with torch.no_grad():
x_fake_list = [x_fixed_db]
for c_fixed in c_fixed_list_db:
x_fake_list.append(self.G(x_fixed_db, c_fixed))
x_concat = torch.cat(x_fake_list, dim=3)
sample_path = os.path.join(self.sample_dir, '{}-images.jpg'.format(i+1))
save_image(self.denorm(x_concat.data.cpu()), sample_path, nrow=1, padding=0)
print('Saved real and fake images into {}...'.format(sample_path))
# Saving images for next generator.
# The iteration should be consistent with the next generator
if stack_mode=='A':
if (i+1) % ((self.n_critic)*200) == 0:
with torch.no_grad():
#create labels for the data
for j in range(len(c_fixed_list)):
photos = self.denorm(self.G(x_fixed_all, c_fixed_list[j]))
for index in range(len(c_fixed_list[0])):
intermediate_path = os.path.join(self.intermediate_dir, '{0}-{1}-images.jpg'.format(index,j))
save_image(photos[index],intermediate_path)
print('Saved intermediate images for next GAN into {}...'.format(intermediate_path))
# Save model checkpoints.
if (i+1) % self.model_save_step == 0:
G_path = os.path.join(self.model_save_dir, '{}-G.ckpt'.format(i+1))
D_path = os.path.join(self.model_save_dir, '{}-D.ckpt'.format(i+1))
torch.save(self.G.state_dict(), G_path)
torch.save(self.D.state_dict(), D_path)
print('Saved model checkpoints into {}...'.format(self.model_save_dir))
# Decay learning rates.
if (i+1) % self.lr_update_step == 0 and (i+1) > (self.num_iters - self.num_iters_decay):
g_lr -= (self.g_lr / float(self.num_iters_decay))
d_lr -= (self.d_lr / float(self.num_iters_decay))
self.update_lr(g_lr, d_lr)
print ('Decayed learning rates, g_lr: {}, d_lr: {}.'.format(g_lr, d_lr))
"CartoonGAN"
if (i+1)% ((self.n_critic)*200) ==0:
epoch= i
epoch_idx= i +1
for step in tqdm(
range(1, steps_per_epoch + 1),
desc=f'Train {epoch + 1}/{epochs}',
total=steps_per_epoch):
source_images, target_images, smooth_images = (
ds_source.next(), ds_target.next(), ds_smooth.next())
self.train_step(source_images, target_images, smooth_images,
generator, d, g_optimizer, d_optimizer)
if step % self.reporting_steps == 0:
global_step = (epoch_idx - 1) * steps_per_epoch + step
with summary_writer.as_default():
for metric, name in self.metric_and_names:
tf.summary.scalar(name, metric.result(), step=global_step)
metric.reset_states()
if not self.disable_sampling:
fake_batch = tf.cast(
(generator(real_batch, training=False) + 1) * 127.5, tf.uint8)
img = np.expand_dims(self._save_generated_images(
fake_batch,
image_name=("gan_generated_images_at_epoch_"
f"{epoch_idx}_step_{step}.png"),result_dir_cartoonGAN),
0,)
tf.summary.image('gan_generated_images', img, step=global_step)
# output intermediate images for next GAN
if stack_mode=='B':
img = np.expand_dims(self._save_generated_images(
fake_batch,
image_name=("gan_generated_images_at_epoch_"
f"{epoch_idx}_step_{step}.png"),result_dir_cartoonGAN),
0,dir_='datasets/RaFDB/train')
tf.summary.image('gan_generated_images', img, step=global_step)
self.logger_cartoonGAN.debug(f"Epoch {epoch_idx}, Step {step} finished, "
f"{global_step * self.batch_size_cartoonGAN} images processed.")
with summary_writer.as_default():
if not self.disable_sampling:
val_fake_batch = tf.cast(
(generator(val_real_batch, training=False) + 1) * 127.5, tf.uint8)
img = np.expand_dims(self._save_generated_images(
val_fake_batch,
image_name=("gan_val_generated_images_at_epoch_"
f"{epoch_idx}_step_{step}.png")),
0,
)
tf.summary.image('gan_val_generated_images', img, step=epoch)
self.logger_cartoonGAN.info(f"Saving checkpoints after epoch {epoch_idx} ended...")
g_checkpoint.save(file_prefix=self.generator_checkpoint_prefix)
d_checkpoint.save(file_prefix=self.discriminator_checkpoint_prefix)
generator.save_weights(os.path.join(self.model_dir, "generator"))
gc.collect()
# del ds_source, ds_target, ds_smooth
gc.collect()
def test(self):
"""Translate images using StarGAN trained on a single dataset."""
# Load the trained generator.
self.restore_model(self.test_iters)
# Set data loader.
data_loader = self.rafdb_loader
with torch.no_grad():
for i, (x_real, c_org) in enumerate(data_loader):
# Prepare input images and target domain labels.
x_real = x_real.to(self.device)
c_trg_list = self.create_labels(c_org, self.c_dim)
# Translate images.
cnt = 1
for c_trg in c_trg_list:
for j in range(16):
result_path = os.path.join(self.result_dir, '{0}-{1}-images.jpg'.format((i*16 + j),cnt))
file = self.denorm(self.G(x_real, c_trg))
save_image(file[j],result_path)
cnt= cnt+1
|
[
"noreply@github.com"
] |
Dylan199602.noreply@github.com
|
5c2f65f3d8265e99550da60ed7b4a8cf09a5c48b
|
6f7e071edf7845c44688d2aa2a7882e3b33c449f
|
/chapter22-lesson4.py
|
b7a6e921285a5a17a8210f51e38fe4b53b44bb8f
|
[] |
no_license
|
amano7/LearningPython
|
af7ead578fcb0e86cecc01becc6d5f02a87bd021
|
6a72f8c80fa6a1e665d89b9912894414dce9c0e8
|
refs/heads/master
| 2020-06-19T05:57:48.407094
| 2019-09-01T05:39:59
| 2019-09-01T05:39:59
| 196,588,866
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 233
|
py
|
def anagram(w1,w2):
w1 = w1.lower()
w2 = w2.lower()
return sorted(w1) == sorted(w2)
if __name__ == "__main__":
print(anagram("Iceman","cinema"))
print(anagram("leaf","tree"))
print(sorted([1,2,5,4,3])[::-1])
|
[
"super7.amano@gmail.com"
] |
super7.amano@gmail.com
|
9bbed5650b78a27c595b8d6796994c735b5bf023
|
4028a44fcd7349f8169260c3817ad7085a3f4919
|
/apps/goods/migrations/0001_initial.py
|
17f3512decc546c70695aa67fb9bd7dcf8d909bf
|
[] |
no_license
|
benjieqiang/dailyfresh
|
1fbae02b293cd38de88c564512cef65864eb222d
|
3116935fd1bf382b9c3ed5126b5ba36f024100a4
|
refs/heads/master
| 2022-12-11T22:40:50.494114
| 2019-09-03T00:49:50
| 2019-09-03T00:49:50
| 205,858,693
| 1
| 0
| null | 2022-12-08T02:34:30
| 2019-09-02T13:03:27
|
Python
|
UTF-8
|
Python
| false
| false
| 7,981
|
py
|
# Generated by Django 2.1 on 2018-12-06 06:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='GoodsImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('image', models.ImageField(upload_to='./static/upload/goods', verbose_name='图片路径')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='GoodsSKU',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='商品名称')),
('desc', models.CharField(max_length=256, verbose_name='商品描述')),
('price', models.CharField(max_length=10, verbose_name='商品价格')),
('unite', models.CharField(max_length=20, verbose_name='商品单位')),
('image', models.ImageField(upload_to='./static/upload/goods', verbose_name='商品图片')),
('stock', models.IntegerField(default=1, verbose_name='商品库存')),
('status', models.SmallIntegerField(choices=[(0, '下线'), (1, '上线')], default=1, verbose_name='商品状态')),
],
options={
'verbose_name_plural': '商品SKU',
'db_table': 'df_goods_sku',
},
),
migrations.CreateModel(
name='GoodsSPU',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='商品SPU名称')),
('detail', models.CharField(max_length=128, verbose_name='详情')),
],
options={
'verbose_name_plural': '商品SPU',
'db_table': 'df_goods_spu',
},
),
migrations.CreateModel(
name='GoodsType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='种类名称')),
('logo', models.CharField(max_length=20, verbose_name='标识')),
('image', models.ImageField(upload_to='upload/type', verbose_name='商品类型图片')),
],
options={
'verbose_name_plural': '商品种类',
'db_table': 'df_goods_type',
},
),
migrations.CreateModel(
name='IndexGoodsBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('image', models.ImageField(upload_to='./static/upload/banner', verbose_name='图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品')),
],
options={
'verbose_name_plural': '首页轮播图',
'db_table': 'df_index_banner',
},
),
migrations.CreateModel(
name='IndexPromotionBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='活动名称')),
('url', models.URLField(verbose_name='活动链接')),
('image', models.ImageField(upload_to='./static/upload/banner', verbose_name='活动图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
],
options={
'verbose_name_plural': '主页促销活动',
'db_table': 'df_index_promotion',
},
),
migrations.CreateModel(
name='IndexTypeGoodsBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateTimeField(auto_now_add=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('display_type', models.SmallIntegerField(choices=[(0, '文字'), (1, '图片')], default=1)),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品SKU')),
('type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsType', verbose_name='商品类型')),
],
options={
'verbose_name_plural': '主页分类展示商品',
'db_table': 'df_index_tupe_goods',
},
),
migrations.AddField(
model_name='goodssku',
name='goods',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSPU', verbose_name='商品SPU'),
),
migrations.AddField(
model_name='goodssku',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsType', verbose_name='商品种类'),
),
migrations.AddField(
model_name='goodsimage',
name='name',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品'),
),
]
|
[
"benjieqiang@163.com"
] |
benjieqiang@163.com
|
cd26a97a9b5a11a42584b1b09d43489d5196cbae
|
77be6786ca1b176987aa1f23a992a039b16bd9b6
|
/LeetCode/二叉树/144.py
|
a58c9dd3ad92c087a64f635734d702921846d229
|
[] |
no_license
|
yuyaxiong/interveiw_algorithm
|
b854fb952e94c5d204657a6cd6a90b3775d14a49
|
907a60702ef94e4f79313b0d0c7fe999bc508051
|
refs/heads/master
| 2022-03-20T07:04:14.944704
| 2022-02-07T10:12:25
| 2022-02-07T10:12:25
| 170,670,091
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,185
|
py
|
# Definition for a binary tree node.
from typing import List, Optional
# 144.二叉树的前序遍历
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
# 递归版本
class Solution:
def preorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
result = []
self.preorder(root, result)
return result
def preorder(self, root, result):
if root is None:
return
result.append(root.val)
self.preorder(root.left, result)
self.preorder(root.right, result)
# 非递归版本
class Solution1:
def preorderTraversal(self, root: Optional[TreeNode]) -> List[int]:
if root is None:
return []
node_list = [root]
result = []
while len(node_list) > 0:
node = node_list.pop(0)
tmp = []
result.append(node.val)
if node.left is not None:
tmp.append(node.left)
if node.right is not None:
tmp.append(node.right)
node_list = tmp + node_list
return result
|
[
"chinayuyaxiong@sina.com"
] |
chinayuyaxiong@sina.com
|
2d1d47ab6569f49eb7676644d902bb9501b95b6c
|
3978985f6cf08f141cb9d780be4f8b7b2347c16b
|
/part-data/test-hex.py
|
ace3c362bebd65ef1741f086274328e456212db3
|
[
"MIT"
] |
permissive
|
wuljchange/interesting_python
|
1ee5f2474645267fca9eb4e391700dcb5db051e3
|
3fdf9f7f17f7b361be030bb4eadf7aab889b15fe
|
refs/heads/master
| 2020-05-02T18:47:18.087498
| 2020-03-09T04:02:08
| 2020-03-09T04:02:08
| 178,139,898
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 291
|
py
|
import base64
import binascii
if __name__ == "__main__":
s = b'hello world!'
# 2进制转换成16进制
h = binascii.b2a_hex(s)
print(h)
# 16进制转换成2进制
print(binascii.a2b_hex(h))
h1 = base64.b16encode(s)
print(h1)
print(base64.b16decode(h1))
|
[
"wulinjiang1@kingsoft.com"
] |
wulinjiang1@kingsoft.com
|
7f73826c85e81db663f9b34aed4a3145967e7f40
|
42ff9e20d828aee1497a819037e0f215c8e9313a
|
/start.py
|
681cfd14b6a23b849b0b649a2a2a7f6ed151aaf7
|
[
"MIT"
] |
permissive
|
yangshyrmei24/connecting_flights
|
c11fa7113339cb1a20b92d647ac8a50bcd13a638
|
a0e9ce1c98985755bebadce4d047cd979e90936d
|
refs/heads/master
| 2023-04-09T18:10:23.305800
| 2019-05-27T17:39:03
| 2019-05-27T17:39:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 221
|
py
|
from shortest import Database, ConnectingFlights, console
if __name__ == '__main__':
db = Database('localhost', 27017, "connecting_flight")
cf = ConnectingFlights(db)
console.menu(db, cf)
console.clear()
|
[
"nuttysalmon@gmail.com"
] |
nuttysalmon@gmail.com
|
97e7426c257696024e0ab90fca3023b7aa6b13d9
|
e790278fa154798103ce11d593502e4841b09cce
|
/Custom_Layers.py
|
b0983c35661e0b1965eff79f2b550d3c252f3ced
|
[] |
no_license
|
enix45/Recommender_keras
|
1ae6c7843ba3492ee00c7ecc2ea7d5c85a606bd4
|
d65bbdec0547b8d1ef88e0c1034710f1729a3b67
|
refs/heads/master
| 2021-09-09T23:54:34.749926
| 2018-03-20T09:05:01
| 2018-03-20T09:05:01
| 125,987,282
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,735
|
py
|
from keras import backend as K
from keras.engine.topology import Layer
class interaction(Layer):
def __init__(self, output_dim, **kwargs):
self.output_dim = output_dim
super(interaction, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(
name = 'kernel',
shape = (input_shape[1], self.output_dim),
initializer = 'uniform',
trainable = True)
def call(self, x):
return 0.5 * (K.pow(K.dot(x, self.kernel), 2) - K.dot(K.pow(x, 2), K.pow(self.kernel, 2)))
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
class ans_fm(Layer):
def __init__(self, sigmoid, **kwargs):
self.output_dim = 1
self.sigmoid = sigmoid
super(ans_fm, self).__init__(**kwargs)
def call(self, inputs):
ans = inputs[0] + K.sum(inputs[1], axis = 1)
if self.sigmoid:
return K.sigmoid(ans)
else:
return ans
def compute_output_shape(self, input_shape):
return (input_shape[0], self.output_dim)
class dfm(Layer):
def __init__(self, input_dims, emb_dim, hid_dims, **kwargs):
self.input_dims = input_dims
self.emb_dim = emb_dim
self.hid_dims = hid_dims
self.n_cates = len(input_dims)
super(dfm, self).__init__(**kwargs)
def build(self, input_shape):
self.emb_kernels = list()
self.mlp_kernels = list()
self.mlp_bias = list()
for i in range(self.n_cates):
self.emb_kernels.append(
self.add_weight(
name = 'emb_' + str(i),
shape = (self.input_dims[i], self.emb_dim),
initializer = 'uniform',
trainable = True))
def call(self, x):
embs = [K.dot(x[i], self.emb_kernels[i]) for i in range(self.n_cates)]
for i in range(self.n_cates):
if i == 0:
emb = embs[0]
q_emb = K.dot(K.pow(x[i], 2), K.pow(self.emb_kernels[i], 2))
else:
emb = emb + embs[i]
q_emb = q_emb + K.dot(K.pow(x[i], 2), K.pow(self.emb_kernels[i], 2))
fm_ans = 0.5 * (K.pow(emb, 2) - q_emb)
embs = K.concatenate(embs, axis = -1)
return K.concatenate([fm_ans, embs], axis = -1)
def compute_output_shape(self, input_shape):
#return [(input_shape[0][0], 1), (input_shape[0][0], self.n_cates * self.emb_dim) ]
return (input_shape[0][0], (self.n_cates + 1) * self.emb_dim)
class ans_dfm(Layer):
def __init__(self, input_dim, **kwargs):
self.output_dim = 1
self.input_dim = input_dim
super(ans_dfm, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(
name = 'kernel',
#shape = (input_shape[1][1], self.output_dim),
shape = (self.input_dim, self.output_dim),
initializer = 'uniform',
trainable = True)
self.bias = self.add_weight(
name = 'bias',
shape = (self.output_dim,),
initializer = 'uniform',
trainable = True)
def call(self, inputs):
# Note that the following did not have the linear term of FM component
ans1 = K.sum(inputs[0], axis = 1, keepdims = True)
ans2 = K.bias_add(K.dot(inputs[1], self.kernel), self.bias)
#return K.sigmoid(ans1 + ans2 + inputs[2])
return K.sigmoid(ans1 + ans2)
def compute_output_shape(self, input_shape):
#return (input_shape[1][0], self.output_dim)
return (input_shape[0][0], self.output_dim)
class inner_product(Layer):
def __init__(self, nb_cate, hidden_dim, **kwargs):
self.nb_cate = nb_cate
self.hidden_dim = hidden_dim
super(inner_product, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(
name = 'kernel',
shape = (self.hidden_dim, self.nb_cate),
initializer = 'uniform',
trainable = True)
def call(self, x):
return K.transpose(K.sum(K.pow(K.dot(self.kernel, K.stack(x, axis = 1)), 2), axis = -1))
def compute_output_shape(self, input_shape):
return (input_shape[0][0], self.hidden_dim)
class outer_product(Layer):
def __init__(self, emb_dim, hidden_dim, **kwargs):
self.emb_dim = emb_dim
self.hidden_dim = hidden_dim
super(outer_product, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(
name = 'kernel',
shape = (self.emb_dim ** 2, self.hidden_dim),
initializer = 'uniform',
trainable = True)
def call(self, x):
# x is f_sigma in the paper
x = K.expand_dims(x, axis = -1)
prod = x * K.permute_dimensions(x, (0, 2, 1))
prod = K.batch_flatten(prod) # prod is now of the shape (batch_size, emb_dim*emb_dim)
return K.dot(prod, self.kernel)
def compute_output_shape(self, input_shape):
return (input_shape[0], self.hidden_dim)
class cross_layer(Layer):
def __init__(self, hidden_dim, **kwargs):
self.hidden_dim = hidden_dim
super(cross_layer, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(
name = 'kernel',
shape = (self.hidden_dim, 1),
initializer = 'uniform',
trainable = True)
self.bias = self.add_weight(
name = 'bias',
shape = (self.hidden_dim, ),
initializer = 'uniform',
trainable = True)
def call(self, x):
return K.bias_add(K.dot(x[1], self.kernel) * x[0] + x[1], self.bias)
def compute_output_shape(self, input_shape):
return input_shape
class Gen_prob(Layer):
def __init__(self, **kwargs):
#self.nb_item = nb_item
super(Gen_prob, self).__init__(**kwargs)
def call(self, x):
return K.sum(x[0] * x[1], axis = 1)
def compute_output_shape(self, input_shape):
return (input_shape[0], 1)
class Gen_sim(Layer):
def __init__(self, **kwargs):
super(Gen_sim, self).__init__(**kwargs)
def call(self, x):
return K.sigmoid(K.sum(x, axis = 1, keepdims = True))
def compute_output_shape(self, input_shape):
return (input_shape[0], 1)
|
[
"ubuntu@ip-10-152-8-137.ap-northeast-1.compute.internal"
] |
ubuntu@ip-10-152-8-137.ap-northeast-1.compute.internal
|
681455c58163656fd7be32f57a4dbd6258269b02
|
8dceed86beea3a9f84ba68d8345f1686786d12ff
|
/mtcnn-pytorch-master/test_on_image.py
|
058ad1941fb4b4f17395654a1918c6094817137d
|
[
"MIT"
] |
permissive
|
nishino123/detection-recognization
|
6beb503f24a28dbc07600d72989a16ebcc482028
|
ee74ed41830083adc4301664abb6a08b1c63d332
|
refs/heads/master
| 2022-04-13T07:13:26.040655
| 2020-04-13T12:28:18
| 2020-04-13T12:28:18
| 255,317,963
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 465
|
py
|
# -*- coding: UTF-8 -*-
#@Time : 2020/3/23 @Author : SUNLIN
from src import detect_faces, show_bboxes
from PIL import Image
def face_detect(filename,save_file_name):
img = Image.open(filename)
print(img)
bounding_boxes, landmarks = detect_faces(img)
img_copy=show_bboxes(img, bounding_boxes, landmarks)
img_copy.save(save_file_name)
return img_copy
if __name__=='__main__':
face_detect('images/office1.jpg','images/office1_copy.jpg')
|
[
"your email"
] |
your email
|
3fbdcdb01d4eff3aefb6d1b33806471e8d35d230
|
a69597f476b069ef3dadac8f90187b3d90df8ae8
|
/scripts/perf/alltime.py
|
9eb3d31ba74dd8687f993d46793851f768ffce1d
|
[
"MIT"
] |
permissive
|
duzhuqi/rocFFT
|
b1b917e85d434c8fa48a0670b0b88c9403e33545
|
ebd54e7124deacec29be382d66c78a37bf2ac133
|
refs/heads/master
| 2023-06-18T19:04:30.743400
| 2021-07-06T03:27:37
| 2021-07-06T03:27:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 38,939
|
py
|
#!/usr/bin/python3
import sys, getopt
import numpy as np
from math import *
import subprocess
import os
import re # regexp package
import shutil
import tempfile
import perflib
import timing
usage = '''A timing script to generate perf data and plot for major fft 1D/2D/3D cases
Usage:
\talltime.py
\t\t-b Specify dload executable(optional)
\t\t-i Specify test libraries for dload or test executables
\t\t for regular test(appendable)
\t\t-o Specify output directories for raw data
\t\t appendable; defaults to "dir0", "dir1", etc.
\t\t-l Specify labels for runs
\t\t appendable; defaults to "dir0", "dir1", etc.
\t\t-w output directory for graphs and final document
\t\t-S plot speedup (default: 1, disabled: 0)
\t\t-t data type: time (default) or gflops or roofline
\t\t-y secondary axis type: none or gflops or efficiency
\t\t-s short run
\t\t-T do not perform FFTs; just generate document
\t\t-f document format: pdf (default) or docx
\t\t-g generate graphs via Asymptote: 0(default) or 1
\t\t-d device number (default: 0)
\t\t-N Number of samples (default: 10)
\t\t-D dims to test. default: 1,2,3
\t\t-R runtype: report benchmark or efficiency
\t\t-F filename to read problem sizes from
\t\t-p precision: single or double(default)
\t\t-B specify bandwidth for efficiency computation
\t\t-v verbose output
'''
def nextpow(val, radix):
x = 1
while(x <= val):
x *= radix
return x
# A class for generating data for figures.
class rundata:
def __init__(self, label,
dimension, minsize, maxsize, nbatch, radix, ratio, ffttype,
direction, inplace, precision):
self.dimension = dimension
self.minsize = minsize
self.maxsize = maxsize
self.nbatch = nbatch
self.radix = radix
self.ratio = ratio
self.ffttype = ffttype
self.precision = precision
self.inplace = inplace
self.direction = direction
self.label = label
def outfilename(self, odir):
outfile = ""
outfile += "radix" + str(self.radix)
outfile += "_dim" + str(self.dimension)
outfile += "_" + self.precision
outfile += "_n" + str(self.nbatch)
if self.direction == 1:
outfile += "_inv"
if self.dimension > 1:
outfile += "_ratio" + "_" + str(self.ratio[0])
if self.dimension > 2:
outfile += "_" + str(self.ratio[1])
outfile += "_" + self.ffttype
if self.inplace:
outfile += "_inplace"
else:
outfile += "_outofplace"
outfile += ".dat"
outfile = os.path.join(odir, outfile)
return outfile
def runcmd(self, nsample, inlist, outdirlist, dloadexe, problem_file):
timer = timing.Timer()
if dloadexe == None:
# When not using dload, we just have one input and output dir.
timer.prog = os.path.abspath(inlist[0])
timer.out = [ self.outfilename(outdirlist[0]) ]
else:
timer.prog = dloadexe
timer.lib = inlist
timer.out = [ self.outfilename(x) for x in outdirlist ]
timer.ntrial = nsample
timer.nbatch = self.nbatch
timer.xmin = self.minsize
timer.xmax = self.maxsize
if self.dimension > 1:
timer.ymin = self.minsize * self.ratio[0]
timer.ymax = self.maxsize * self.ratio[0]
if self.dimension > 2:
timer.zmin = self.minsize * self.ratio[1]
timer.zmax = self.maxsize * self.ratio[1]
timer.radix = self.radix
timer.inplace = self.inplace
timer.direction = self.direction
timer.dimension = self.dimension
timer.precision = self.precision
timer.real = self.ffttype == "r2c"
timer.problem_file = problem_file
return timer
def executerun(self, nsample, inlist, outdirlist, dloadexe, problem_file):
if dloadexe != None:
self.runcmd(nsample, inlist, outdirlist, dloadexe, problem_file).run_cases()
else:
for idx in range(min(len(inlist), len(outdirlist))):
print(idx, ":", inlist[idx], "->", outdirlist[idx])
self.runcmd(nsample, [inlist[idx]], [outdirlist[idx]], None, problem_file).run_cases()
# Figure class, which contains runs and provides commands to generate figures.
class figure:
def __init__(self, name, caption):
self.name = name
self.runs = []
self.caption = caption
def inputfiles(self, outdirlist):
import os
files = []
for run in self.runs:
for outdir in outdirlist:
files.append(run.outfilename(outdir))
print(files)
return files
def labels(self, labellist):
labels = []
for run in self.runs:
for label in labellist:
labels.append(label + run.label)
return labels
def filename(self, outdir, docformat):
outfigure = self.name
outfigure += ".pdf"
# if docformat == "pdf":
# outfigure += ".pdf"
# if docformat == "docx":
# outfigure += ".png"
return os.path.join(outdir, outfigure)
def asycmd(self, docdir, outdirlist, labellist, docformat, datatype, ncompare, secondtype, just1dc2crad2, bandwidth):
asycmd = ["asy"]
asycmd.append("-f")
asycmd.append("pdf")
# if docformat == "pdf":
# asycmd.append("-f")
# asycmd.append("pdf")
# if docformat == "docx":
# asycmd.append("-f")
# asycmd.append("png")
# asycmd.append("-render")
# asycmd.append("8")
asycmd.append(os.path.join(sys.path[0],"datagraphs.asy"))
asycmd.append("-u")
inputfiles = self.inputfiles(outdirlist)
asycmd.append('filenames="' + ",".join(inputfiles) + '"')
asycmd.append("-u")
labels = self.labels(labellist)
asycmd.append('legendlist="' + ",".join(labels) + '"')
asycmd.append("-u")
asycmd.append('speedup=' + str(ncompare))
if just1dc2crad2 :
asycmd.append("-u")
asycmd.append('just1dc2crad2=true')
if secondtype != "":
asycmd.append("-u")
asycmd.append('secondaryaxis="'+secondtype +'"')
if datatype == "gflops":
asycmd.append("-u")
asycmd.append('primaryaxis="gflops"')
if bandwidth != None:
asycmd.append("-u")
asycmd.append('bandwidth=' + str(bandwidth) + '')
if datatype == "roofline":
asycmd.append("-u")
asycmd.append('primaryaxis="roofline"')
# roofline on multiple devices doesn't really make sense; just use the first device
with open(os.path.join(outdirlist[0], "gpuid.txt"), "r") as f:
gpuid = f.read()
asycmd.append("-u")
asycmd.append('gpuid="' + gpuid.strip() + '"')
if len(self.runs) > 0:
asycmd.append("-u")
asycmd.append('batchsize=' + str(self.runs[0].nbatch))
asycmd.append("-u")
asycmd.append('problemdim=' + str(self.runs[0].dimension))
asycmd.append("-u")
val = 1
for rat in self.runs[0].ratio:
val *= rat
asycmd.append('problemratio=' + str(val))
asycmd.append("-u")
if self.runs[0].ffttype == "r2c":
asycmd.append("realcomplex=true")
else:
asycmd.append("realcomplex=false")
asycmd.append("-o")
asycmd.append(self.filename(docdir, docformat) )
return asycmd
def executeasy(self, docdir, outdirs, labellist, docformat, datatype, ncompare, secondtype,
just1dc2crad2, bandwidth, verbose):
fout = tempfile.TemporaryFile(mode="w+")
ferr = tempfile.TemporaryFile(mode="w+")
asyproc = subprocess.Popen(self.asycmd(docdir, outdirs, labellist,
docformat, datatype, ncompare, secondtype,
just1dc2crad2, bandwidth),
stdout=fout, stderr=ferr, env=os.environ.copy(),
cwd = sys.path[0])
asyproc.wait()
asyrc = asyproc.returncode
if asyrc != 0:
print("****asy fail****")
if verbose or (asyrc != 0):
fout.seek(0)
cout = fout.read()
print(cout)
ferr.seek(0)
cerr = ferr.read()
print(cerr)
return asyrc
# Function for generating figures for benchmark output
def benchfigs(rundims, shortrun, precision):
figs = []
# FFT directions
forwards = -1
backwards = 1
if 1 in rundims:
dimension = 1
nbatch = 1
min1d = 256 if shortrun else 1024
max1d = 4000 if shortrun else 536870912
for inplace in [True, False]:
fig = figure("1d_c2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"1D complex transforms " + ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix), max1d, nbatch,
radix, [], "c2c", forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("1d_r2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"1D real-to-complex transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix), max1d, nbatch,
radix, [], "r2c", forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("1d_c2r" + ("inplace" if inplace else "outofplace") + "_" + precision,
"1D complex-to-real transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix) ,
dimension, nextpow(min1d, radix), max1d, nbatch,
radix, [], "r2c", backwards, inplace, precision) )
figs.append(fig)
if 2 in rundims:
dimension = 2
nbatch = 1
min2d = 64 if shortrun else 128
max2d = 8192 if shortrun else 32768
for inplace in [True, False]:
fig = figure("2d_c2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"2D complex transforms " + ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min2d, radix), max2d, nbatch, radix, [1],
"c2c",
forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("2d_r2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"2D real-to-complex transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min2d, radix), max2d, nbatch, radix, [1],
"r2c",
forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("2d_c2r" + ("inplace" if inplace else "outofplace") + "_" + precision,
"2D complex-to-real transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min2d, radix), max2d, nbatch, radix, [1],
"r2c",
backwards, inplace, precision) )
figs.append(fig)
if 3 in rundims:
dimension = 3
min3d = 16
max3d = 128 if shortrun else 1024
nbatch = 1
for inplace in [True, False]:
fig = figure("3d_c2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"3D complex transforms " + ("in-place" if inplace else "out-of-place"))
for radix in [2, 3, 5]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min3d, radix), max3d, nbatch, radix, [1,1],
"c2c",
forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("3d_r2c" + ("inplace" if inplace else "outofplace") + "_" + precision,
"3D real-to-complex transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min3d, radix), max3d, nbatch, radix, [1,1],
"r2c",
forwards, inplace, precision) )
figs.append(fig)
for inplace in [True, False]:
fig = figure("3d_c2r" + ("inplace" if inplace else "outofplace") + "_" + precision,
"3D complex-to-real transforms " \
+ ("in-place" if inplace else "out-of-place"))
for radix in [2, 3]:
fig.runs.append( rundata("radix " + str(radix), dimension,
nextpow(min3d, radix), max3d, nbatch, radix, [1,1],
"r2c",
backwards, inplace, precision) )
figs.append(fig)
return figs
def efficiencyfigs(rundims, shortrun, precision):
figs = []
# FFT directions
forwards = -1
backwards = 1
inplace = True
dimension = 1
radix = 2
min1d = 1024
max1d = 1048576 if shortrun else 268435456 #pow(2,28) gives a floating type :(
nbatch = 1
while max1d > min1d:
fig = figure("1d_c2c_batch" + str(nbatch) + "_radix" + str(radix) + "_" + precision,
"1D complex transforms " + ("in-place" if inplace else "out-of-place") + " radix " + str(radix) + " batch " + str(nbatch) )
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix), max1d, nbatch,
radix, [], "c2c", forwards, inplace, precision) )
figs.append(fig)
nbatch *= 2
max1d //= 2
min1d //= 2
min1d = max(min1d, 2^5)
return figs
# Function for generating figures for a performance report
def reportfigs(rundims, shortrun, precision):
figs = []
# FFT directions
forwards = -1
backwards = 1
inplace = True
if 1 in rundims:
dimension = 1
for min1d, max1d, nbatch in [[1024,536870912,1], [8,32768,100000]]:
for radix in [2, 3, 5, 7]:
fig = figure("1d_c2c" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"1D complex transforms with radix " + str(radix)\
+ " and batch size " + str(nbatch) + "." )
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix),
max1d, nbatch,
radix, [], "c2c", forwards,
inplace, precision) )
figs.append(fig)
for radix in [2, 3, 5, 7]:
fig = figure("1d_r2c"\
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"1D real-to-complex transforms with radix "\
+ str(radix) \
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix),
max1d, nbatch,
radix, [], "r2c", forwards,
inplace, precision) )
figs.append(fig)
for radix in [2, 3, 5, 7]:
fig = figure("1d_c2r" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"1D complex-to-real transforms with radix " \
+ str(radix) \
+ " and batch size " + str(nbatch) + "." )
fig.runs.append( rundata("radix " + str(radix),
dimension, nextpow(min1d, radix),
max1d, nbatch,
radix, [], "r2c", backwards,
inplace, precision) )
figs.append(fig)
if 2 in rundims:
dimension = 2
for min2d, max2d, nbatch in [[128,32768,1], [64,8192,100]]:
for radix in [2, 3, 5]:
fig = figure("2d_c2c" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"2D complex transforms with radix " + str(radix)\
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata( "radix "+ str(radix),
dimension,
nextpow(min2d, radix), max2d,
nbatch,
radix, [1], "c2c",
forwards, inplace, precision) )
figs.append(fig)
for radix in [2, 3, 5]:
fig = figure("2d_r2c" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"2D real-to-complex transforms with radix "\
+ str(radix) \
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata( "radix " + str(radix),
dimension,
nextpow(min2d, radix), max2d,
nbatch,
radix, [1], "r2c",
forwards, inplace, precision) )
figs.append(fig)
for radix in [2, 3, 5]:
fig = figure("2d_c2r" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"2D complex-to-real transforms with radix "\
+ str(radix) +\
" and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix " + str(radix),
dimension,
nextpow(min2d, radix), max2d,
nbatch,
radix, [1], "r2c",
backwards, inplace, precision) )
figs.append(fig)
for radix in [2]:
fig = figure("2d_c2c_r2" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"2D complex transforms "\
+ "with aspect ratio N:2N with radix "\
+ str(radix) + " and batch size " + str(nbatch) \
+ ".")
fig.runs.append( rundata( "radix 2",
dimension, min2d, max2d, nbatch, 2,
[2], "c2c",
forwards, inplace, precision) )
figs.append(fig)
for radix in [2]:
fig = figure("2d_r2c_r2" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"2D real-to-complex transforms with radix "\
+ str(radix) \
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix 2",
dimension, min2d, max2d, nbatch, 2,
[2], "r2c",
forwards, inplace, precision) )
figs.append(fig)
if 3 in rundims:
dimension = 3
for min3d, max3d, nbatch in [[16,128,1],[4,64,100]]:
for radix in [2, 3, 5]:
fig = figure("3d_c2c" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"3D complex transforms with radix "\
+ str(radix) \
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix " + str(radix),
dimension,
nextpow(min3d, radix), max3d,
nbatch,
radix, [1,1], "c2c",
forwards, inplace, precision) )
figs.append(fig)
for radix in [2, 3]:
fig = figure("3d_r2c" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"3D real-to-complex transforms with radix "\
+ str(radix)\
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix " + str(radix),
dimension,
nextpow(min3d, radix), max3d,
nbatch,
radix, [1,1], "r2c",
forwards, inplace, precision) )
figs.append(fig)
fig = figure("3d_c2r" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"3D complex-to-real transforms with radix "\
+ str(radix)
+ " and batch size " + str(nbatch) + ".")
for radix in [2]:
fig.runs.append( rundata("radix " + str(radix),
dimension,
nextpow(min3d, radix), max3d,
nbatch,
radix, [1,1], "r2c",
backwards, inplace, precision) )
figs.append(fig)
fig = figure("3d_c2c_aspect" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"3D complex transforms "\
+ "with aspect ratio N:N:16N with radix "\
+ str(radix)\
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix 2",
dimension, min3d, max3d, nbatch, 2,
[1,16], "c2c",
forwards, inplace, precision) )
figs.append(fig)
fig = figure("3d_r2c_aspect" \
+ "_radix" + str(radix) \
+ "_batch" + str(nbatch) + "_" + precision,
"3D real-to-complex transforms " \
+ "with aspect ratio N:N:16N with radix " \
+ str(radix)\
+ " and batch size " + str(nbatch) + ".")
fig.runs.append( rundata("radix 2",
dimension, min3d, max3d, nbatch, 2,
[1,16], "r2c",
forwards, inplace, precision) )
figs.append(fig)
return figs
def main(argv):
dloadexe = None
inlist = []
outdirlist = []
labellist = []
docdir = "doc"
dryrun = False
nbatch = 1
speedup = True
datatype = "time"
shortrun = False
docformat = "pdf"
devicenum = 0
doAsy = True
nsample = 10
rundims = [1,2,3]
runtype = "benchmark"
secondtype = "none"
precision = "double"
problem_file = None
verbose = False
bandwidth = None
try:
opts, args = getopt.getopt(argv,"hb:D:f:Tt:i:o:l:S:sg:d:N:R:w:y:F:p:B:v")
except getopt.GetoptError:
print("error in parsing arguments.")
print(usage)
sys.exit(2)
for opt, arg in opts:
if opt in ("-h"):
print(usage)
exit(0)
elif opt in ("-b"):
dloadexe = os.path.abspath(arg)
elif opt in ("-i"):
inlist.append(arg)
elif opt in ("-o"):
outdirlist.append(arg)
elif opt in ("-l"):
labellist.append(arg)
elif opt in ("-w"):
docdir = arg
elif opt in ("-T"):
dryrun = True
elif opt in ("-s"):
shortrun = True
elif opt in ("-g"):
if int(arg) == 0:
doAsy = False
if int(arg) == 1:
doAsy = True
elif opt in ("-d"):
devicenum = int(arg)
elif opt in ("-D"):
rundims = []
for val in arg.split(','):
rundims.append(int(val))
elif opt in ("-N"):
nsample = int(arg)
elif opt in ("-S"):
if int(arg) == 0:
speedup = False
if int(arg) == 1:
speedup = True
elif opt in ("-t"):
if arg not in ["time", "gflops", "roofline"]:
print("data type must be time or gflops or roofline")
print(usage)
sys.exit(1)
datatype = arg
elif opt in ("-y"):
if arg not in ["none", "gflops", "efficiency"]:
print("data type must be gflops or none")
print(usage)
sys.exit(1)
secondtype = arg
elif opt in ("-R"):
if arg not in ["report", "benchmark", "efficiency"]:
print("data type must be gflops or none")
print(usage)
sys.exit(1)
runtype = arg
if runtype == "efficiency":
datatype = "roofline"
elif opt in ("-f"):
goodvals = ["pdf", "docx"]
if arg not in goodvals:
print("error: format must in " + " ".join(goodvals))
print(usage)
sys.exit(1)
docformat = arg
elif opt in ("-p"):
if arg not in ["single", "double"]:
print("precision type must be single or double")
print(usage)
sys.exit(1)
precision = arg
elif opt in ("-F"):
problem_file = arg
elif opt in ("-B"):
bandwidth = float(arg)
elif opt in ("-v"):
verbose = True
print("rundims:")
print(rundims)
if not dryrun:
if dloadexe != None:
if not os.path.isfile(dloadexe):
print("unable to find " + dloadexe)
sys.exit(1)
for i in inlist:
if not os.path.isfile(i):
print("unable to find " + i)
print("please specify with -i")
sys.exit(1)
print("inputs:", inlist)
if len(inlist) > len(labellist):
for i in range(len(labellist), len(inlist)):
labellist.append("dir" + str(i))
print("run labels:", labellist)
for idx in range(len(inlist)):
inlist[idx] = os.path.abspath(inlist[idx])
if len(inlist) > len(outdirlist):
for i in range(len(outdirlist), len(inlist)):
outdirlist.append(os.path.abspath("dir" + str(i)))
for idx in range(len(outdirlist)):
outdirlist[idx] = os.path.abspath(outdirlist[idx])
print("data output directories:", outdirlist)
if shortrun:
print("short run")
print("output format: " + docformat)
print("device number: " + str(devicenum))
print("precision: " + precision)
docdir = os.path.abspath(docdir)
print("document output in", docdir)
if not os.path.exists(docdir):
os.makedirs(docdir)
for outdir in outdirlist:
if not os.path.exists(outdir):
os.makedirs(outdir)
if not dryrun:
machine_specs = perflib.get_machine_specs(devicenum)
for outdir in outdirlist:
with open(os.path.join(outdir, "specs.txt"), "w+") as f:
f.write(str(machine_specs))
with open(os.path.join(outdir, "gpuid.txt"), "w") as f:
f.write(machine_specs.gpuid)
figs = []
if runtype == "benchmark":
figs = benchfigs(rundims, shortrun, precision)
if runtype == "report":
figs = reportfigs(rundims, shortrun, precision)
if runtype == "efficiency":
figs = efficiencyfigs(rundims, shortrun, precision)
just1dc2crad2 = runtype == "efficiency"
for idx, fig in enumerate(figs):
for idx2, fig2 in enumerate(figs):
if idx != idx2 and fig.name == fig2.name:
print("figures have the same name!")
print(fig.name)
print(fig2.name)
sys.exit(1)
for fig in figs:
print(fig.name)
# Run the tests and put output in the outdirs:
for run in fig.runs:
if not dryrun:
run.executerun(nsample, inlist, outdirlist, dloadexe, problem_file)
# HACK: problem file should have all the problem sizes
# that need running, so just one execution should produce
# all the data we need
if problem_file:
break
# Compile the data in the outdirs into figures in docdir:
ncompare = 0
if speedup:
ncompare = len(labellist) if dryrun else len(inlist)
print(fig.labels(labellist))
if doAsy:
#plotgflops = runtype == "submission" and not datatype == "gflops"
print(fig.asycmd(docdir, outdirlist, labellist, docformat, datatype, ncompare, secondtype, just1dc2crad2, bandwidth))
fig.executeasy(docdir, outdirlist, labellist, docformat, datatype, ncompare, secondtype, just1dc2crad2, bandwidth, verbose)
if doAsy:
# Make the document in docdir:
#
# HACK: problem file implies html report
if problem_file:
from html_report import graph_dirs
graph_dirs(outdirlist, problem_file, docdir)
else:
# otherwise, make other doc types using asymptote figs
if docformat == "pdf":
maketex(figs, docdir, outdirlist, labellist, nsample, secondtype, precision)
if docformat == "docx":
makedocx(figs, docdir, nsample, secondtype, precision)
print("Finished! Output in " + docdir)
def binaryisok(dirname, progname):
prog = os.path.join(dirname, progname)
return os.path.isfile(prog)
gflopstext = '''\
GFLOP/s are computed based on the Cooley--Tukey operation count \
for a radix-2 transform, and half that for in the case of \
real-complex transforms. The rocFFT operation count may differ from \
this value: GFLOP/s is provided for the sake of comparison only.'''
efficiencytext = '''\
Efficiency is computed for an idealised FFT which requires exactly \
one read and one write to global memory. In practice, this \
isn't possible for most problem sizes, as the data does \
not fit into cache, and one must use global memory to store \
intermediary results. As FFTs are bandwidth-limited on modern hardware, \
the efficiency is measured against the theoretical maximum bandwidth \
for the device.'''
# Function for generating a tex document in PDF format.
def maketex(figs, docdir, outdirlist, labellist, nsample, secondtype, precision):
header = '''\
\\documentclass[12pt]{article}
\\usepackage[margin=1in]{geometry}
\\usepackage{graphicx}
\\usepackage{url}
\\begin{document}
'''
texstring = header
texstring += "\n\\section{Introduction}\n"
texstring += "Each data point represents the median of " + str(nsample) + " values, with error bars showing the 95\\% confidence interval for the median. All transforms are " + precision + "-precision.\n\n"
if secondtype == "gflops":
texstring += gflopstext + "\n\n"
texstring += "\\vspace{1cm}\n"
# texstring += "\\begin{tabular}{ll}"
# texstring += labelA +" &\\url{"+ dirA+"} \\\\\n"
# if not dirB == None:
# texstring += labelB +" &\\url{"+ dirB+"} \\\\\n"
# texstring += "\\end{tabular}\n\n"
# texstring += "\\vspace{1cm}\n"
texstring += "\n\\section{Device Specification}\n"
for idx in range(len(outdirlist)):
texstring += "\n\\subsection{" + labellist[idx] + "}\n"
specfilename = os.path.join(outdirlist[idx], "specs.txt")
if os.path.isfile(specfilename):
specs = ""
with open(specfilename, "r") as f:
specs = f.read()
for line in specs.split("\n"):
if line.startswith("Host info"):
texstring += "\\noindent " + line
texstring += "\\begin{itemize}\n"
elif line.startswith("Device info"):
texstring += "\\end{itemize}\n"
texstring += line
texstring += "\\begin{itemize}\n"
else:
if line.strip() != "":
texstring += "\\item \\verb|" + line + "|\n"
texstring += "\\end{itemize}\n"
texstring += "\n"
texstring += "\\clearpage\n"
texstring += "\n\\section{Figures}\n"
for idx, fig in enumerate(figs):
print(fig.filename(docdir, "pdf"))
print(fig.caption)
texstring += '''
\\centering
\\begin{figure}[htbp]
\\includegraphics[width=\\textwidth]{'''
texstring += fig.filename("", "pdf")
texstring += '''}
\\caption{''' + fig.caption + '''}
\\end{figure}
'''
if (idx % 2) == 0:
texstring += "\\clearpage\n"
texstring += "\n\\end{document}\n"
fname = os.path.join(docdir, 'figs.tex')
with open(fname, 'w') as outfile:
outfile.write(texstring)
fout = open(os.path.join(docdir, "texcmd.log"), 'w+')
ferr = open(os.path.join(docdir, "texcmd.err"), 'w+')
latexcmd = ["latexmk", "-pdf", 'figs.tex']
print(" ".join(latexcmd))
texproc = subprocess.Popen(latexcmd, cwd=docdir, stdout=fout, stderr=ferr,
env=os.environ.copy())
texproc.wait()
fout.close()
ferr.close()
texrc = texproc.returncode
if texrc != 0:
print("****tex fail****")
# Confert a PDF to an EMF using pdf2svg and inkscape.
def pdf2emf(pdfname):
svgname = pdfname.replace(".pdf",".svg")
cmd_pdf2svg = ["pdf2svg", pdfname, svgname]
proc = subprocess.Popen(cmd_pdf2svg, env=os.environ.copy())
proc.wait()
if proc.returncode != 0:
print("pdf2svg failed!")
sys.exit(1)
emfname = pdfname.replace(".pdf",".emf")
cmd_svg2emf = ["inkscape", svgname, "-M", emfname]
proc = subprocess.Popen(cmd_svg2emf, env=os.environ.copy())
proc.wait()
if proc.returncode != 0:
print("svg2emf failed!")
sys.exit(1)
return emfname
# Function for generating a docx using emf files and the docx package.
def makedocx(figs, outdir, nsample, secondtype, precision):
import docx
document = docx.Document()
document.add_heading('rocFFT benchmarks', 0)
document.add_paragraph("Each data point represents the median of " + str(nsample) + " values, with error bars showing the 95% confidence interval for the median. Transforms are " + precision + "-precision, forward, and in-place.")
if secondtype == "gflops":
document.add_paragraph(gflopstext)
if secondtype == "efficiency":
document.add_paragraph(efficiencytext)
specfilename = os.path.join(outdir, "specs.txt")
if os.path.isfile(specfilename):
with open(specfilename, "r") as f:
specs = f.read()
for line in specs.split("\n"):
document.add_paragraph(line)
for fig in figs:
print(fig.filename(outdir, "docx"))
print(fig.caption)
emfname = pdf2emf(fig.filename(outdir, "docx"))
document.add_picture(emfname, width=docx.shared.Inches(6))
document.add_paragraph(fig.caption)
document.save(os.path.join(outdir,'figs.docx'))
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"noreply@github.com"
] |
duzhuqi.noreply@github.com
|
d8e049a229f3a22afbce372b735887be1c31d193
|
f1e293397a23629261cd162862914d6d5e7c456c
|
/src/polls/views.py
|
3931e666647bbe9a5cc3ad6138fe31c3fb2b48c3
|
[
"Apache-2.0"
] |
permissive
|
s4boys/serveling
|
0a2c469a8e7829e738fa3dbbd68edaa9e0a58e09
|
c49063b154deea51cd401e2c02b7092ddfad22b3
|
refs/heads/master
| 2020-04-05T04:43:11.645764
| 2018-11-15T11:16:38
| 2018-11-15T11:16:38
| 156,563,616
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 151
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("hi2")
|
[
"71duha1bif@hft-stuttgart.de"
] |
71duha1bif@hft-stuttgart.de
|
1ed0f4d3aefd0f204f6d3172550834342f343de7
|
f6b1d01a7b31427a2a5e424d7725f8f5c08d1b39
|
/alg/swap-node-pair/index.py
|
0fbcb891db00e7477b4def031310845bab1fc72d
|
[] |
no_license
|
thainam83vn/learns
|
5ce3b89af5b70982554d64f77569abb5ae1cd805
|
239f69b50cf689b46270d6ee097ef62cb9309ad7
|
refs/heads/master
| 2023-01-08T17:57:18.126709
| 2020-12-19T02:55:28
| 2020-12-19T02:55:28
| 197,106,240
| 0
| 0
| null | 2023-01-07T11:49:54
| 2019-07-16T02:31:06
|
Python
|
UTF-8
|
Python
| false
| false
| 415
|
py
|
from libs.node import ListNode, NodeLib
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
next = head.next
temp = next.next
next.next = head
head.next = self.swapPairs(temp)
return next
libs = NodeLib()
sol = Solution()
libs.printListNode(sol.swapPairs(libs.createList([1, 2, 3, 4])))
|
[
"thainam83vn@gmail.com"
] |
thainam83vn@gmail.com
|
16db426e69fbbf5f00eebc5b166fe294c8ba0245
|
dc38691dc76478290b52efe2cfd9a814d5d5f510
|
/code/python/338.counting-bits.py
|
68ca9877516c00ff5db3eea2d0be028ba8c25c7c
|
[
"Apache-2.0"
] |
permissive
|
ANYALGO/ANYALGO
|
f6b947a2e5d7f2e07b936e1371902064868359fe
|
09060bd4605d112b7ee66b4571ef309bfac06b6a
|
refs/heads/master
| 2021-08-02T13:05:35.762982
| 2021-07-24T01:57:09
| 2021-07-24T01:57:09
| 254,394,046
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 120
|
py
|
class Solution:
def countBits(self, num: int) -> [int]:
return [bin(i).count("1") for i in range(num + 1)]
|
[
"Kirin.Toudou@protonmail.com"
] |
Kirin.Toudou@protonmail.com
|
16e0b7ce242e8efacebf2e5f1d3a610e46dd3fda
|
0dd849243d8fc7930d267277a2ee251bc35273ba
|
/library/nxos_vlan
|
269a4ed0a2943ff99aea87d391cf347a3e081d2a
|
[
"Apache-2.0"
] |
permissive
|
my76128/nxos-ansible
|
bd95e49b95fb8b057f67b7932ab6c280cb457173
|
8cfa8b0803a7645d39ef6e44cb994b6e72225d54
|
refs/heads/master
| 2021-01-12T21:01:56.823450
| 2015-04-29T13:57:04
| 2015-04-29T13:57:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,628
|
#!/usr/bin/env python
# Copyright 2015 Jason Edelman <jedelman8@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DOCUMENTATION = '''
---
module: nxos_vlan
short_description: Manages VLAN resources and attributes
description:
- Manages VLAN configurations on NX-API enabled switches
author: Jason Edelman (@jedelman8)
requirements:
- NX-API 1.0
- NX-OS 6.1(2)I3(1)
- pycsco
notes:
- While username and password are not required params, they are
if you are not using the .netauth file. .netauth file is recommended
as it will clean up the each task in the playbook by not requiring
the username and password params for every tasks.
- Using the username and password params will override the .netauth file
options:
vlan_id:
description:
- vlan id or range of VLANs
required: true
default: null
choices: []
aliases: []
name:
description:
- name of VLAN (not supported when using range of VLANs)
required: false
default: null
choices: []
aliases: []
vlan_state:
description:
- Manage the vlan oper state of the VLAN (equiv to state {active | suspend} command
required: false
default: active
choices: ['active','suspend']
aliases: []
admin_state:
description:
- Manage the vlan admin state of the VLAN (equiv to shut/no shut in vlan config mode
required: false
default: up
choices: ['up','down']
aliases: []
state:
description:
- Manage the state of the resource
required: true
default: present
choices: ['present','absent']
aliases: []
host:
description:
- IP Address or hostname (resolvable by Ansible control host)
of the target NX-API enabled switch
required: true
default: null
choices: []
aliases: []
username:
description:
- Username used to login to the switch
required: false
default: null
choices: []
aliases: []
password:
description:
- Password used to login to the switch
required: false
default: null
choices: []
aliases: []
protocol:
description:
- Dictates connection protocol to use for NX-API
required: false
default: http
choices: ['http']
aliases: []
'''
EXAMPLES = '''
# Ensure VLAN 50 exists with the name WEB and is in the shutdown state
- nxos_vlan: vlan_id=50 host={{ inventory_hostname }} admin_state=down name=WEB
# Ensure VLAN is NOT on the device
- nxos_vlan: vlan_id=50 host={{ inventory_hostname }} state=absent
# Ensure a range of VLANs are present on the switch
- nxos_vlan: vlan_id="2-10,20,50,55-60" host={{ inventory_hostname }} state=present
# Ensure a group of VLANs are present with the given names
- nxos_vlan: vlan_id={{ item.vlan_id }} name={{ item.name }} host={{ inventory_hostname }} state=present
with_items:
- vlan_id: 10
name: web
- vlan_id: 20
name: app
- { vlan_id: 30, name: db }
- vlan_id: 40
name: misc
- vlan_id: 99
name: native_vlan
'''
try:
import socket
from pycsco.nxos.device import Device
from pycsco.nxos.device import Auth
from pycsco.nxos.utils import nxapi_lib
except ImportError as e:
print '*' * 30
print e
print '*' * 30
def main():
module = AnsibleModule(
argument_spec=dict(
vlan_id=dict(required=True, type='str'),
name=dict(default=None),
vlan_state=dict(choices=['active', 'suspend'], default='active'),
state=dict(choices=['present', 'absent'], default='present'),
admin_state=dict(choices=['up', 'down'], default='up'),
host=dict(required=True),
username=dict(type='str'),
password=dict(type='str'),
),
supports_check_mode=True
)
auth = Auth(vendor='cisco', model='nexus')
username = module.params['username'] or auth.username
password = module.params['password'] or auth.password
host = socket.gethostbyname(module.params['host'])
vlan_id = module.params['vlan_id']
name = module.params['name']
vlan_state = module.params['vlan_state']
admin_state = module.params['admin_state']
state = module.params['state']
device = Device(ip=host, username=username, password=password)
changed = False
proposed = dict(vlan_id=vlan_id, name=name, vlan_state=vlan_state,
admin_state=admin_state)
proposed_vlans_list = nxapi_lib.vlan_range_to_list(vlan_id)
proposed_vlans_list.sort()
existing_vlans_list = nxapi_lib.get_list_of_vlans(device)
existing_vlans_list.sort()
# These are all of the VLANs being proposed that don't already exist
# on the switch
vlans_delta = set(proposed_vlans_list).difference(existing_vlans_list)
# VLANs that are common between what is being proposed and what is on
# the switch
vlans_common = set(proposed_vlans_list).intersection(existing_vlans_list)
if state == 'absent' and (vlan_id == '1' or '1' in vlans_common):
module.fail_json(msg="You cannot remove VLAN 1. Doh!!")
if len(proposed_vlans_list) > 1:
if state == 'present':
my_vlans = list(vlans_delta)
name_param = proposed.get('name', None)
if name_param and vlans_delta:
module.fail_json(msg="You cannot set the name for multiple "
+ "VLANs. Remove the name parameter from "
+ "being used.")
elif state == 'absent':
my_vlans = list(vlans_common)
else:
my_vlans = proposed_vlans_list
# my_vlans holds the VLANs that will be manipulated in some way
final_existing = {}
final_proposed = {}
final_commands = {}
final_postrun = {}
for vlan in my_vlans:
existing = nxapi_lib.get_vlan(device, vlan)
delta = set()
commands = []
if state == 'absent':
if existing:
command = nxapi_lib.get_remove_vlan_commands(device, vlan)
commands.append(command)
elif state == 'present':
if int(vlan) > 1005 and admin_state == 'down':
module.fail_json(msg='You cannot shutdown VLANs > 1005')
proposed = dict(vlan_id=vlan,
vlan_state=vlan_state,
admin_state=admin_state,
name=name)
delta = set(proposed.iteritems()).difference(existing.iteritems())
if delta:
command = nxapi_lib.get_vlan_config_commands(device,
delta,
vlan)
commands.append(command)
cmds = ''
if commands:
for each in commands:
cmds = nxapi_lib.cmd_list_to_string(each)
final_commands[vlan] = cmds
final_existing[vlan] = existing
final_proposed[vlan] = dict(vlan_id=vlan,
vlan_state=vlan_state,
admin_state=admin_state)
if final_commands:
if module.check_mode:
module.exit_json(changed=True, commands=final_commands)
else:
for vlan, commands in final_commands.iteritems():
device.config(commands)
changed = True
for vlan in my_vlans:
final_postrun[vlan] = nxapi_lib.get_vlan(device, vlan)
results = {}
results['proposed'] = proposed
results['existing'] = final_existing
results['new'] = final_postrun
results['state'] = state
results['commands'] = final_commands
results['changed'] = changed
module.exit_json(**results)
from ansible.module_utils.basic import *
main()
|
[
"jedelman8@gmail.com"
] |
jedelman8@gmail.com
|
|
4501c661e8f14f94c93296355442f997beee8ef4
|
8e5c37560d9b03e30b00a1d4b423013882eb2d55
|
/hzpzp/scripts/template_cff.py
|
8949802b6bbb32c422040223bc0e14cc5646a148
|
[] |
no_license
|
syuvivida/MonoHiggsSignal_13TeV
|
ac363aab91fe4a2f5562884fce3bffb8471fc8eb
|
5428633898c61dbf021e82c630a08da23192ccd4
|
refs/heads/master
| 2021-05-25T11:42:22.048512
| 2020-04-16T11:44:02
| 2020-04-16T11:44:02
| 42,017,135
| 0
| 1
| null | 2015-10-05T15:40:44
| 2015-09-06T20:03:32
|
Python
|
UTF-8
|
Python
| false
| false
| 550
|
py
|
import FWCore.ParameterSet.Config as cms
# link to cards:
# https://github.com/cms-sw/genproductions/tree/31b6e7510443b74e0f9aac870e4eb9ae30c19d65/bin/MadGraph5_aMCatNLO/cards/production/13TeV/monoHiggs/PROCESS/TYPE
externalLHEProducer = cms.EDProducer("ExternalLHEProducer",
args = cms.vstring('LOCATION'),
nEvents = cms.untracked.uint32(5000),
numberOfParameters = cms.uint32(1),
outputFile = cms.string('cmsgrid_final.lhe'),
scriptName = cms.FileInPath('GeneratorInterface/LHEInterface/data/run_generic_tarball_cvmfs.sh')
)
|
[
"dburns@ucdavis.edu"
] |
dburns@ucdavis.edu
|
b8b43d0e7996b984d9e9323ff65e77ef77928c37
|
c88082a909c5d6bb741465bd37063ceb7bb4d887
|
/crawler_ex00_1 2.py
|
a37a8f05ce7f44c91e41e905d718b363a0747033
|
[] |
no_license
|
raningtky/python
|
38b0ad18b716de53e58c0a4b6d56d458986a090b
|
b8a1557afa32140ff05e4ae9c5943441bef74af6
|
refs/heads/master
| 2021-01-01T03:34:20.197627
| 2017-08-10T09:26:19
| 2017-08-10T09:26:19
| 56,868,239
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 493
|
py
|
# -*- coding: utf-8 -*-
# CopyRight by heibanke
import urllib
from bs4 import BeautifulSoup
import re
url='http://www.heibanke.com/lesson/crawler_ex00/'
number=['']
loops = 0
while True:
content = urllib.urlopen(url+number[0])
bs_obj = BeautifulSoup(content,"html.parser")
tag_number = bs_obj.find("h3")
number= re.findall(r'\d+',tag_number.get_text())
if not number or loops>100:
break
else:
print number[0]
loops+=1
print bs_obj.text
|
[
"hxt1108@163.com"
] |
hxt1108@163.com
|
fb6d30d0717d9671573299e2c339d1946962da2f
|
16fcf452e6165a0de5bc540c57b6e6b82d822bb1
|
/Learntek_code/25_Sep_18/fun12.py
|
25339a75bfd72a9f1490f439cc1c26fc2eb07c0f
|
[] |
no_license
|
mohitraj/mohitcs
|
e794e9ad2eb536e3b8e385fb8d222e8ade95c802
|
d6399b2acf69f5667c74f69715a0b55060bf19d1
|
refs/heads/master
| 2021-09-09T00:21:23.099224
| 2021-09-07T16:39:07
| 2021-09-07T16:39:07
| 87,798,669
| 5
| 6
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 145
|
py
|
k = 10
def fun1():
global k
#k = k+10 #
k = k+10
a = k+10
print (k)
print (a)
fun1()
print ("outside ", k)
print (len("hello"))
|
[
"mohitraj.cs@gmail.com"
] |
mohitraj.cs@gmail.com
|
b862cd7a47b8262ef7b59d3a12ff63f1fbe58f40
|
274f6fc92de000145d19bf1d8fc3d6ec1f33dc15
|
/thegame/settings.py
|
64ca557e0e2563961c85949accff4ec7b537c914
|
[] |
no_license
|
Harsh77480/ONLINE_GAME-using-websockets
|
ebc3cfe95bcfc67ca5973394f6c95ce2c090e0e8
|
355af45119ddb49d18e12bf1f585b15a3c2e85ef
|
refs/heads/master
| 2023-05-06T06:54:10.464592
| 2021-05-26T19:19:16
| 2021-05-26T19:19:16
| 370,670,077
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,813
|
py
|
"""
Django settings for thegame project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('game_secret_key')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'game.apps.GameConfig',
'channels',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'thegame.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# WSGI_APPLICATION = 'thegame.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'game',
'USER': 'postgres',
'PASSWORD': os.environ.get('game_db_password'),
'HOST': 'localhost',
'PORT': '5432',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
ASGI_APPLICATION = 'thegame.asgi.application'
# ASGI_APPLICATION = 'mysite.asgi.application'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [('127.0.0.1', 6379)],
},
},
}
|
[
"haesh77@gmai.com"
] |
haesh77@gmai.com
|
f083dced3502518dab0883992fa9044778b96520
|
fef8f43025cff430d9aea080885173d9c22b3cb6
|
/etalia/library/migrations/0005_paperuser_store.py
|
cae9e7afa809c463d95225ed65b389504668cf50
|
[] |
no_license
|
GemmaAA1/etalia-open
|
30a083141330e227ac1de9855894bfb6e476e3cc
|
260ce54d2da53c943d8b82fa9d40bb0c0df918a6
|
refs/heads/master
| 2023-03-28T03:33:13.771987
| 2017-10-30T00:55:27
| 2017-10-30T00:55:27
| 351,120,827
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('library', '0004_paperuserhistory'),
]
operations = [
migrations.AddField(
model_name='paperuser',
name='store',
field=models.PositiveIntegerField(choices=[(1, 'Pinned'), (2, 'Trashed')], default=None, null=True),
),
]
|
[
"nicolas.pannetier@gmail.com"
] |
nicolas.pannetier@gmail.com
|
ed772c7502b6135f7aacb792dc4ee4125b01e12e
|
e2e470577eb6ec34b581dbcc718ea74a00d19fa5
|
/account/views.py
|
4dbb2eb47e131d91884926c5f97fa5c9dec19397
|
[] |
no_license
|
Bob-Al-Max/bookmarks
|
ce8b6264ef0c69f4571627d556d3cbe6ba9fe4d7
|
955c726ee8712a4ce7f2a574ed26b8a13b1f9264
|
refs/heads/master
| 2022-12-06T03:35:53.620897
| 2020-08-31T10:55:27
| 2020-08-31T10:55:27
| 291,689,650
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,419
|
py
|
from django.shortcuts import render
from django.http import HttpResponse
from django.shortcuts import render
from django.contrib.auth import authenticate, login
from .forms import LoginForm
from django.contrib.auth.decorators import login_required
from .forms import LoginForm, UserRegistrationForm
from .models import Profile
from .forms import LoginForm, UserRegistrationForm, UserEditForm, ProfileEditForm
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.http import JsonResponse
from django.views.decorators.http import require_POST
from common.decorators import ajax_required
from .models import Contact
from actions.utils import create_action
from actions.models import Action
def user_login(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
user = authenticate(request,username=cd['username'], password=cd['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('Authenticated successfully')
else:
return HttpResponse('Disabled account')
else:
return HttpResponse('Invalid login')
else:
form = LoginForm()
return render(request, 'account/login.html', {'form': form})
@login_required
def dashboard(request):
# Display all actions by default
actions = Action.objects.exclude(user=request.user)
following_ids = request.user.following.values_list('id',
flat=True)
if following_ids:
# If user is following others, retrieve only their actions
actions = actions.filter(user_id__in=following_ids)
actions = actions.select_related('user', 'user__profile')\
.prefetch_related('target')[:10]
return render(request,
'account/dashboard.html',
{'section': 'dashboard',
'actions': actions})
def register(request):
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST)
if user_form.is_valid():
# Создаем нового пользователя, но пока не сохраняем в базу данных.
new_user = user_form.save(commit=False)
# Задаем пользователю зашифрованный пароль.
new_user.set_password(user_form.cleaned_data['password'])
# Сохраняем пользователя в базе данных.
new_user.save()
# Создание профиля пользователя.
Profile.objects.create(user=new_user)
create_action(new_user, 'has created an account')
return render(request, 'account/register_done.html', {'new_user': new_user})
else:
user_form = UserRegistrationForm()
return render(request,'account/register.html',{'user_form': user_form})
@login_required
def edit(request):
if request.method == 'POST':
user_form = UserEditForm(instance=request.user,
data=request.POST)
profile_form = ProfileEditForm(instance=request.user.profile,
data=request.POST,
files=request.FILES)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request, 'Profile updated successfully')
else:
messages.error(request, 'Error updating your profile')
else:
user_form = UserEditForm(instance=request.user)
profile_form = ProfileEditForm(instance=request.user.profile)
return render(request,
'account/edit.html',
{'user_form': user_form,
'profile_form': profile_form})
@login_required
def user_list(request):
users = User.objects.filter(is_active=True)
return render(request,
'account/user/list.html',
{'section': 'people',
'users': users})
@login_required
def user_detail(request, username):
user = get_object_or_404(User,
username=username,
is_active=True)
return render(request,
'account/user/detail.html',
{'section': 'people',
'user': user})
@ajax_required
@require_POST
@login_required
def user_follow(request):
user_id = request.POST.get('id')
action = request.POST.get('action')
if user_id and action:
try:
user = User.objects.get(id=user_id)
if action == 'follow':
Contact.objects.get_or_create(user_from=request.user, user_to=user)
create_action(request.user, 'is following', user)
else:
Contact.objects.filter(user_from=request.user,
user_to=user).delete()
return JsonResponse({'status':'ok'})
except User.DoesNotExist:
return JsonResponse({'status':'ko'})
return JsonResponse({'status':'ko'})
|
[
"borq_10@mail.ru"
] |
borq_10@mail.ru
|
13a9f4b483fbac1ac0450b8685ceba8953d86c21
|
cd0bb37a673983ce1cf1a2a378c95976a64650e2
|
/WEBRTC_Angular_7_Typescript/node_modules/uws/build/config.gypi
|
0be03fb11bdfb341b50a679727994305b6032520
|
[
"Zlib"
] |
permissive
|
FuriousTurtle/Refrac-AngularJS-To-Angular7-Typescript
|
bd935153bf7b0d4c8bc810928912cb54b394b791
|
8eb003ec37a9dd09167b96612bb5e34cdab44374
|
refs/heads/master
| 2023-01-23T13:38:28.998476
| 2019-06-14T10:55:31
| 2019-06-14T10:55:31
| 189,003,204
| 0
| 0
| null | 2023-01-07T20:38:57
| 2019-05-28T10:02:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 5,381
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"build_v8_with_gn": "false",
"coverage": "false",
"debug_nghttp2": "false",
"enable_lto": "false",
"enable_pgo_generate": "false",
"enable_pgo_use": "false",
"force_dynamic_crt": 0,
"gas_version": "2.27",
"host_arch": "x64",
"icu_data_in": "../../deps/icu-small/source/data/in/icudt62l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps/icu-small",
"icu_small": "true",
"icu_ver_major": "62",
"llvm_version": 0,
"node_byteorder": "little",
"node_debug_lib": "false",
"node_enable_d8": "false",
"node_enable_v8_vtunejit": "false",
"node_install_npm": "true",
"node_module_version": 64,
"node_no_browser_globals": "false",
"node_prefix": "/",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared": "false",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_nghttp2": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_target_type": "executable",
"node_use_bundled_v8": "true",
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_large_pages": "false",
"node_use_openssl": "true",
"node_use_pch": "false",
"node_use_perfctr": "false",
"node_use_v8_platform": "true",
"node_with_ltcg": "false",
"node_without_node_options": "false",
"openssl_fips": "",
"openssl_no_asm": 0,
"shlib_suffix": "so.64",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_enable_inspector": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_promise_internal_field_count": 1,
"v8_random_seed": 0,
"v8_trace_maps": 0,
"v8_typed_array_max_size_in_heap": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"nodedir": "/home/tonym/.node-gyp/10.15.3",
"standalone_static_library": 1,
"cache_lock_stale": "60000",
"ham_it_up": "",
"legacy_bundling": "",
"sign_git_tag": "",
"user_agent": "npm/6.9.0 node/v10.15.3 linux x64",
"always_auth": "",
"bin_links": "true",
"key": "",
"allow_same_version": "",
"description": "true",
"fetch_retries": "2",
"heading": "npm",
"if_present": "",
"init_version": "1.0.0",
"user": "",
"prefer_online": "",
"noproxy": "",
"force": "",
"only": "",
"read_only": "",
"cache_min": "10",
"init_license": "ISC",
"editor": "vi",
"rollback": "true",
"tag_version_prefix": "v",
"cache_max": "Infinity",
"timing": "",
"userconfig": "/home/tonym/.npmrc",
"engine_strict": "",
"init_author_name": "",
"init_author_url": "",
"preid": "",
"tmp": "/tmp",
"depth": "Infinity",
"package_lock_only": "",
"save_dev": "",
"usage": "",
"metrics_registry": "https://registry.npmjs.org/",
"otp": "",
"package_lock": "true",
"progress": "true",
"https_proxy": "",
"save_prod": "",
"audit": "true",
"cidr": "",
"onload_script": "",
"sso_type": "oauth",
"rebuild_bundle": "true",
"save_bundle": "",
"shell": "/bin/bash",
"prefix": "/usr/local",
"dry_run": "",
"scope": "",
"browser": "",
"cache_lock_wait": "10000",
"ignore_prepublish": "",
"registry": "https://registry.npmjs.org/",
"save_optional": "",
"searchopts": "",
"versions": "",
"cache": "/home/tonym/.npm",
"send_metrics": "",
"global_style": "",
"ignore_scripts": "",
"version": "",
"local_address": "",
"viewer": "man",
"node_gyp": "/usr/local/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js",
"audit_level": "low",
"prefer_offline": "",
"color": "true",
"sign_git_commit": "",
"fetch_retry_mintimeout": "10000",
"maxsockets": "50",
"offline": "",
"sso_poll_frequency": "500",
"umask": "0002",
"fetch_retry_maxtimeout": "60000",
"logs_max": "10",
"message": "%s",
"ca": "",
"cert": "",
"global": "",
"link": "",
"access": "",
"also": "",
"save": "true",
"unicode": "true",
"before": "",
"long": "",
"production": "",
"searchlimit": "20",
"unsafe_perm": "true",
"update_notifier": "true",
"auth_type": "legacy",
"node_version": "10.15.3",
"tag": "latest",
"git_tag_version": "true",
"commit_hooks": "true",
"script_shell": "",
"shrinkwrap": "true",
"fetch_retry_factor": "10",
"save_exact": "",
"strict_ssl": "true",
"globalconfig": "/etc/npmrc",
"dev": "",
"init_module": "/home/tonym/.npm-init.js",
"parseable": "",
"globalignorefile": "/etc/npmignore",
"cache_lock_retries": "10",
"searchstaleness": "900",
"node_options": "",
"save_prefix": "^",
"scripts_prepend_node_path": "warn-only",
"group": "1000",
"init_author_email": "",
"searchexclude": "",
"git": "git",
"optional": "true",
"json": ""
}
}
|
[
"tmourier@xivo.solutions"
] |
tmourier@xivo.solutions
|
2aaa8256b36374a734450a22978b2e29400e882b
|
0723be4044582ef43d36368b8a5638656e5d9889
|
/venv/lib/bullet.py
|
ebf2320394386bc0f9a87b9a913996634bc659ee
|
[] |
no_license
|
Encyf/Space_invaders
|
e94ff464905f639dc2fce33f364528994a26e07f
|
ff70b8dd59a2f2070f78b565973e4818dcf4afdf
|
refs/heads/master
| 2022-11-24T03:08:38.351649
| 2020-07-19T10:58:26
| 2020-07-19T10:58:26
| 279,367,038
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 630
|
py
|
import pygame
from pygame.sprite import Sprite
class Bullet(Sprite):
def __init__(self, ai_game):
super().__init__()
self.screen = ai_game.screen
self.settings = ai_game.settings
self.color = self.settings.bullet_color
self.rect = pygame.Rect(0, 0, self.settings.bullet_width, self.settings.bullet_height)
self.rect.midright = ai_game.ship.rect.midright
self.x = float(self.rect.x)
def update(self):
self.x += self.settings.bullet_speed
self.rect.x = self.x
def draw_bullet(self):
pygame.draw.rect(self.screen, self.color, self.rect)
|
[
"67859752+Encyf@users.noreply.github.com"
] |
67859752+Encyf@users.noreply.github.com
|
53372f4846a7d187b171c35df0aa90005f3f6c75
|
4e3763a5136576d77cd10d8bae2e66b0925688c8
|
/AWS_CICD/libs/job.py
|
44349c1ad41a8230010889595418f61ffb86d6de
|
[] |
no_license
|
lePaulo/AWSDatalakeDataTransformationOrchestration
|
99159027e4b5c0f09b05997a7b511d1068cd0912
|
c339d42a8062705dcfc5367bca186b5bd5cfcd8a
|
refs/heads/master
| 2020-03-21T13:56:04.963067
| 2019-03-08T09:26:04
| 2019-03-08T09:26:04
| 138,632,646
| 5
| 6
| null | 2018-06-26T13:38:15
| 2018-06-25T18:11:08
|
Python
|
UTF-8
|
Python
| false
| false
| 776
|
py
|
import boto3
import os
BATCH_CLIENT = boto3.client('batch')
def send_command(bucket_name, input_prefix, output_prefix, timestr, job_name, job_dependencies=[], environment_variables=[], command=[], memory = None, vcpus = None):
container = {
'environment': environment_variables
}
if memory :
container.update({'memory' : memory})
if vcpus :
container.update({'vcpus' : vcpus})
response = BATCH_CLIENT.submit_job(
jobName= ('pricing-data-transformation'
+ '-' + job_name
+ '-' + timestr),
jobQueue=os.getenv('JobQueue'),
dependsOn=job_dependencies,
jobDefinition=os.getenv('JobDefinition'),
containerOverrides=container
)
return response['jobId']
|
[
"monchyp@amazon.fr"
] |
monchyp@amazon.fr
|
9dad92a32edc5005e54baa1ad84ff10789f971f0
|
6eacbf7828396f0e13ffb0ac0f8d0135bc480055
|
/test.py
|
7ff82bd3365b494993be09a4e4fd1b147fbf3aa5
|
[] |
no_license
|
Abdel-Fadhyl/France_Inter
|
1a40e11d47acb767b0ae74f3671513e875710a9c
|
faafccb84cc8a999a23e47eb42155fc28c63a5ef
|
refs/heads/master
| 2023-08-24T22:35:12.031275
| 2021-10-13T13:44:13
| 2021-10-13T13:44:13
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,839
|
py
|
import requests, uuid, json
from azure.cognitiveservices.speech import AudioDataStream, SpeechConfig, SpeechSynthesizer, SpeechSynthesisOutputFormat
from azure.cognitiveservices.speech.audio import AudioOutputConfig
import tkinter as tk
gui = tk.Tk()
gui.geometry("500x200")
def getEntry():
speech_config = SpeechConfig(subscription="c138fb721a42436caa2709a946f19b7d", region="francecentral")
audio_config = AudioOutputConfig(filename="FranceInter.wav")
synthesizer = SpeechSynthesizer(speech_config=speech_config, audio_config=audio_config)
synthesizer.speak_text_async(myEntry.get())
subscription_key = "8460cf8fbcd642d898f99e3c78207689"
endpoint = "https://api.cognitive.microsofttranslator.com/"
location = "francecentral"
path = '/translate'
constructed_url = endpoint + path
params = {
'api-version': '3.0',
'from': 'fr',
'to': ['en']
}
constructed_url = endpoint + path
headers = {
'Ocp-Apim-Subscription-Key': subscription_key,
'Ocp-Apim-Subscription-Region': location,
'Content-type': 'application/json',
'X-ClientTraceId': str(uuid.uuid4())
}
body = [{
'text': ' '
}]
body[0]['text'] = myEntry.get()
request = requests.post(constructed_url, params=params, headers=headers, json=body)
response = request.json()
text_trad = response[0]['translations'][0]['text']
audio_config = AudioOutputConfig(filename="Traduction.wav")
synthesizer_traduction = SpeechSynthesizer(speech_config=speech_config, audio_config=audio_config)
synthesizer_traduction.speak_text_async(text_trad)
print(text_trad)
myEntry = tk.Entry(gui, width=40)
myEntry.pack(pady=20)
btn = tk.Button(gui, height=1, width=10, text="Traduire", bg="red", command=getEntry)
btn.pack()
gui.mainloop()
|
[
"djoi_a@etna-alternance.net"
] |
djoi_a@etna-alternance.net
|
d1ac8f2d290ad912ee598b9de46bc0a8af886db2
|
aa0a367248df4833d3165c318e5b03544a93acb4
|
/des.py
|
e373e5350086d2911a406142d4bbffb42ad655f9
|
[] |
no_license
|
rojla123/selmi
|
c7fe387bbd9d6f9d17cd62246cc721f00d2af918
|
d328c6c6d3cd7e449b0e71fc629028fb30c5036b
|
refs/heads/master
| 2022-11-30T00:22:44.514911
| 2020-08-04T13:51:33
| 2020-08-04T13:51:33
| 284,993,964
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,523
|
py
|
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import pandas as pd
import os
import sqlite3
from pathlib import Path
import pandas as pd
import streamlit as st
import time
import numpy as np
def load_data():
data=pd.read_csv('ammar.csv')
return data
df=load_data()
st.sidebar.success("# L ecole nationale des sciences de l informatique")
st.success("# my first app" )
st.success(" # SELMI NIZAR")
st.video("3.Mp4")
# Create a list of possible values and multiselect menu with them in it.
COUNTRIES = df['location'].unique()
se = st.sidebar.multiselect('Select countries', COUNTRIES)
mask = df['location'].isin(se)
df=df[mask]
st.write(" # le nombre maximale de cases ",se ,df["total_cases"].max())
st.write(" # le nombre maximale de deaths ",se ,df["total_deaths"].max())
st.write(df)
if st.button('Say hello'):
fig2 = px.area(df, x='date', y='total_cases',color='location')
fig1= px.scatter(df, x='date', y='total_deaths',color='location')
st.success(" # l volution de la pandemie_19 dans le monde")
ts_chart = st.plotly_chart(fig2)
st.success('# l volution de la pandemie_19 dans le monde')
ts_char= st.plotly_chart(fig1)
time.sleep(60) # This makes the function take 2s to run
st.image("ensi.png")
pics = {
"Cat":"https://cdn.pixabay.com/photo/2019/03/15/19/19/puppy-4057786_960_720.jpg",
"Puppy": "https://cdn.pixabay.com/photo/2019/03/15/19/19/puppy-4057786_960_720.jpg",
"Sci-fi city": "https://storage.needpix.com/rsynced_images/science-fiction-2971848_1280.jpg"
}
for i in list(pics.keys()):
st.image(pics[i], use_column_width=True, caption=pics[i])
x=st.text_area("your comment")
ret = st.radio('Select countries', ('help','contact'))
a=st.slider("choose",1,8)
a=st.button("valider")
if(st=="valider"):
st.write("nizar selmi est u fondateur de rhrvgrveytvrygtrygtyrtyrytreytyrgtyrgtyrgtyregtgreytgryegtyregtyergeryyrygtyr")
v=st.slider('hour',1,4,3)
db_loc = sqlite3.connect('Nizar.db')
cursor = db_loc.cursor()
if(ret=="contact"):
name=x
cursor.execute(''' insert into sel (?)''' ,values(name))
cursor.execute('''SELECT * FROM sel;''')
first_eleve = cursor.fetchone() # récupère le premier élève
st.write(first_eleve)
st.markdown('Streamlit is **_really_ cool**.')
map_data = pd.DataFrame(
np.random.randn(1000, 2) / [50, 50] + [37.76, -122.4],
columns=['lat', 'lon'])
st.map(map_data)
|
[
"noreply@github.com"
] |
rojla123.noreply@github.com
|
52cc4176f1a4d4bc97320e8ad23192457f7d5695
|
0b9f4d621264fc643bc13758e59b9b81df250862
|
/fantastic_lamp/fantastic_lamp/asgi.py
|
3f871a77e8fe45d1d74ee3b64cb92a63a3fe38c7
|
[
"MIT"
] |
permissive
|
XrossFox/fantastic-lamp-django-framework
|
f50b09b4cd4c3ed9045da885952b45294296d140
|
61f3e252237bb3ea2e9929237e3701b68f78c226
|
refs/heads/main
| 2023-02-01T04:58:38.065041
| 2020-12-21T00:32:16
| 2020-12-21T00:32:16
| 322,911,911
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 405
|
py
|
"""
ASGI config for fantastic_lamp project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'fantastic_lamp.settings')
application = get_asgi_application()
|
[
"warhead_1090@hotmail.com"
] |
warhead_1090@hotmail.com
|
2a0581eafc31abc7fc1ad21c4a8ee84f9e530959
|
713014c2050a7e25e8f25bfb941a5a59c6510010
|
/venv/bin/easy_install-3.7
|
5a732bb81b4936a000412e2cabb4987e9943dd6d
|
[] |
no_license
|
darckwind/face_of-_china
|
20a9ff04af9161ac558dc75bc6afb6e196fb64d0
|
a946663f04fe720d918f58338d0cae5d1e8ac587
|
refs/heads/master
| 2020-06-25T06:05:12.881170
| 2019-07-31T19:41:37
| 2019-07-31T19:41:37
| 199,224,945
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 459
|
7
|
#!/Users/franciscolagos/PycharmProjects/untitled8/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')()
)
|
[
"f.bunout01@ufromail.cl"
] |
f.bunout01@ufromail.cl
|
241094939f15196f1c39915afe3c309568d1eaa9
|
4d40e4f747ee5b2e2a7b63ca62a7edaecd5e9bfe
|
/phrases.py
|
84b7c1a3df5cf38dfdc301fa4238b3e6b15caf73
|
[] |
no_license
|
mike-conner/Unit-3
|
5700a57cfe00606df7e5b16041a3c0c665cefb16
|
9385ecec4987a35411cc5e2d2250ab4c16bc06e2
|
refs/heads/master
| 2022-08-07T00:12:36.553800
| 2020-05-16T17:35:32
| 2020-05-16T17:35:32
| 263,072,090
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 263
|
py
|
# phrases to be used for the game
PHRASE_LIST = [
'BETTER LATE THAN NEVER',
'HANG IN THERE',
'HELLO WORLD',
'A DIAMOND IN THE ROUGH',
'BEST OF BOTH WORLDS',
'PIECE OF CAKE',
'YOUR GUESS IS AS GOOD AS MINE',
'SO FAR SO GOOD',
'HANG IN THERE',
]
|
[
"noreply@github.com"
] |
mike-conner.noreply@github.com
|
fab197008b9ba50ab6d79be8054d8b56f37a35d0
|
bf06236de52938e8bb8491bdde56edcad0562356
|
/Vorna/Vorna/asgi.py
|
e6f53b14326b3cc2b35e3c84c15f4cd4d8ec31cc
|
[] |
no_license
|
webclinic017/Mt_v
|
a68603753e629ed3f3e65f06f924ad80befaf241
|
08b90c5edf0da588fe4b5f3756797c02ab4ed0d7
|
refs/heads/main
| 2023-01-05T03:12:24.481276
| 2020-09-29T20:17:32
| 2020-09-29T20:17:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 387
|
py
|
"""
ASGI config for Vorna project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Vorna.settings')
application = get_asgi_application()
|
[
"pooriaxana@gmail.com"
] |
pooriaxana@gmail.com
|
a167d5dc2cdec2921991ce9f4f2c0efe7f0d135e
|
71c1e6a2c376815ca0a4f96191f971bdff90ebf5
|
/ryu/tests/integrated/vrrp_common.py
|
6ab48e69b127581975afe0c0eae51c555a418412
|
[
"Apache-2.0"
] |
permissive
|
ntts-clo/ryu
|
d0ba49e281327fca0d5321bbd137f401436a779e
|
abafab91493f40be60d3fe0e580ee0f01709f09f
|
refs/heads/master
| 2021-01-18T14:38:11.539342
| 2014-05-06T09:57:39
| 2014-05-09T16:14:57
| 8,921,181
| 2
| 0
| null | 2013-05-27T01:57:36
| 2013-03-21T04:31:47
|
Python
|
UTF-8
|
Python
| false
| false
| 8,658
|
py
|
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import random
from ryu.base import app_manager
from ryu.lib import hub
from ryu.lib import mac as lib_mac
from ryu.lib.packet import vrrp
from ryu.services.protocols.vrrp import api as vrrp_api
from ryu.services.protocols.vrrp import event as vrrp_event
_VRID = 7
_PRIMARY_IP_ADDRESS0 = '10.0.0.2'
_PRIMARY_IP_ADDRESS1 = '10.0.0.3'
class VRRPCommon(app_manager.RyuApp):
_IFNAME0 = None
_IFNAME1 = None
def __init__(self, *args, **kwargs):
super(VRRPCommon, self).__init__(*args, **kwargs)
def _main(self):
self._main_version(vrrp.VRRP_VERSION_V3)
self._main_version(vrrp.VRRP_VERSION_V2)
print "done!"
def _main_version(self, vrrp_version):
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_ADDRESS_OWNER)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_MAX)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_DEFAULT)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_MIN)
def _main_version_priority(self, vrrp_version, priority):
self._main_version_priority_sleep(vrrp_version, priority, False)
self._main_version_priority_sleep(vrrp_version, priority, True)
def _check(self, vrrp_api, instances):
while True:
while True:
rep = vrrp_api.vrrp_list(self)
if len(rep.instance_list) >= len(instances) * 2:
if any(i.state == vrrp_event.VRRP_STATE_INITIALIZE
for i in rep.instance_list):
continue
break
print len(rep.instance_list), '/', len(instances) * 2
time.sleep(1)
# for i in rep.instance_list:
# print i.instance_name, i.monitor_name, i.config, \
# i.interface, i.state
assert len(rep.instance_list) == len(instances) * 2
num_of_master = 0
d = dict(((i.instance_name, i) for i in rep.instance_list))
bad = 0
for i in rep.instance_list:
assert i.state in (vrrp_event.VRRP_STATE_MASTER,
vrrp_event.VRRP_STATE_BACKUP)
if i.state == vrrp_event.VRRP_STATE_MASTER:
num_of_master += 1
vr = instances[i.config.vrid]
if (vr[0].config.priority > vr[1].config.priority and
i.instance_name == vr[1].instance_name) or \
(vr[0].config.priority < vr[1].config.priority and
i.instance_name == vr[0].instance_name):
if i.state == vrrp_event.VRRP_STATE_MASTER:
print "bad master:"
print d[vr[0].instance_name].state, \
d[vr[0].instance_name].config.priority
print d[vr[1].instance_name].state, \
d[vr[1].instance_name].config.priority
bad += 1
# assert i.state != vrrp_event.VRRP_STATE_MASTER
if bad > 0:
# this could be a transient state
print bad, "bad masters"
time.sleep(1)
continue
if num_of_master >= len(instances):
assert num_of_master == len(instances)
break
print num_of_master, '/', len(instances)
time.sleep(1)
continue
def _main_version_priority_sleep(self, vrrp_version, priority, do_sleep):
app_mgr = app_manager.AppManager.get_instance()
self.logger.debug('%s', app_mgr.applications)
vrrp_mgr = app_mgr.applications['VRRPManager']
step = 5
instances = {}
for vrid in xrange(1, 256, step):
if vrid == _VRID:
continue
print "vrid", vrid
l = {}
prio = max(vrrp.VRRP_PRIORITY_BACKUP_MIN,
min(vrrp.VRRP_PRIORITY_BACKUP_MAX, vrid))
rep0 = self._configure_vrrp_router(vrrp_version,
prio,
_PRIMARY_IP_ADDRESS0,
self._IFNAME0,
vrid)
assert not rep0.instance_name is None
l[0] = rep0
prio = max(vrrp.VRRP_PRIORITY_BACKUP_MIN,
min(vrrp.VRRP_PRIORITY_BACKUP_MAX, 256 - vrid))
rep1 = self._configure_vrrp_router(vrrp_version,
prio,
_PRIMARY_IP_ADDRESS1,
self._IFNAME1,
vrid)
assert not rep1.instance_name is None
l[1] = rep1
instances[vrid] = l
print "vrid", _VRID
l = {}
rep0 = self._configure_vrrp_router(vrrp_version, priority,
_PRIMARY_IP_ADDRESS0,
self._IFNAME0, _VRID)
assert not rep0.instance_name is None
l[0] = rep0
rep1 = self._configure_vrrp_router(
vrrp_version, vrrp.VRRP_PRIORITY_BACKUP_DEFAULT,
_PRIMARY_IP_ADDRESS1, self._IFNAME1, _VRID)
assert not rep1.instance_name is None
l[1] = rep1
instances[_VRID] = l
self.logger.debug('%s', vrrp_mgr._instances)
if do_sleep:
print "priority", priority
print "waiting for instances starting"
self._check(vrrp_api, instances)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = vrid & 1
new_priority = int(random.uniform(vrrp.VRRP_PRIORITY_BACKUP_MIN,
vrrp.VRRP_PRIORITY_BACKUP_MAX))
i = instances[vrid][which]
vrrp_api.vrrp_config_change(self, i.instance_name,
priority=new_priority)
i.config.priority = new_priority
if do_sleep:
print "priority shuffled"
self._check(vrrp_api, instances)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = vrid & 1
vrrp_api.vrrp_shutdown(self, instances[vrid][which].instance_name)
vrrp_api.vrrp_shutdown(self, instances[_VRID][0].instance_name)
if do_sleep:
print "shutting down instances"
while True:
rep = vrrp_api.vrrp_list(self)
if len(rep.instance_list) <= len(instances):
break
print "left", len(rep.instance_list)
time.sleep(1)
assert len(rep.instance_list) == len(instances)
print "waiting for the rest becoming master"
while True:
rep = vrrp_api.vrrp_list(self)
if all(i.state == vrrp_event.VRRP_STATE_MASTER
for i in rep.instance_list):
break
time.sleep(1)
vrrp_api.vrrp_shutdown(self, instances[_VRID][1].instance_name)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = 1 - (vrid & 1)
vrrp_api.vrrp_shutdown(self, instances[vrid][which].instance_name)
print "waiting for instances shutting down"
while True:
rep = vrrp_api.vrrp_list(self)
if not rep.instance_list:
break
print "left", len(rep.instance_list)
time.sleep(1)
|
[
"fujita.tomonori@lab.ntt.co.jp"
] |
fujita.tomonori@lab.ntt.co.jp
|
4d29b40f431711f553ef332c6c2ebe3ff2b4f174
|
3da522a5f394b2520e727651b2a5b5a9bec0be41
|
/app/views/user.py
|
db7c28d90738cf9ce167ed9e34177436a6c31b5b
|
[] |
no_license
|
unicode-tech/django-project-starter
|
2dcd29e17579a91c65c1120f4e64555b6b3ec4aa
|
ab64486f53da230846492506576a883794d2654a
|
refs/heads/master
| 2023-07-17T20:11:59.044487
| 2021-08-24T16:17:57
| 2021-08-24T16:17:57
| 392,238,225
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,331
|
py
|
from django.db.models import Q
from django.template.loader import render_to_string
from django.urls import reverse_lazy
from django.views.generic import TemplateView, UpdateView, DeleteView, CreateView
from django_datatables_view.base_datatable_view import BaseDatatableView
from src.models import User
from app.forms import UserForm, UserPasswordChangeForm, UserProfileEditForm
from src.libraries import AuthGuard
class UserDataView(AuthGuard, BaseDatatableView):
model = User
columns = [
'email',
'name',
'is_active',
'action',
]
def render_column(self, row, column):
if column == 'action':
return render_to_string(
'app/snippets/action.html', {
'password_link': reverse_lazy('user_password', kwargs={'pk': row.id}),
'update_link': reverse_lazy('user_update', kwargs={'pk': row.id}),
'delete_link': reverse_lazy('user_delete', kwargs={'pk': row.id}),
},
)
return super().render_column(row, column)
def filter_queryset(self, qs):
search = self.request.GET.get('search[value]', None)
if search:
qs = qs.filter(
Q(id__icontains=search) |
Q(email__icontains=search) |
Q(name__icontains=search)
)
return qs
class UserListView(AuthGuard, TemplateView):
template_name = 'app/user/index.html'
class UserCreateView(AuthGuard, CreateView):
model = User
template_name = 'app/user/create.html'
form_class = UserForm
success_url = reverse_lazy('user_index')
class UserUpdateView(AuthGuard, UpdateView):
model = User
template_name = 'app/user/update.html'
form_class = UserProfileEditForm
success_url = reverse_lazy('user_list')
class UserDeleteView(AuthGuard, DeleteView):
model = User
template_name = 'app/user/delete.html'
success_url = reverse_lazy('user_list')
class UserPasswordChangeView(AuthGuard, UpdateView):
model = User
template_name = 'app/user/change_password.html'
form_class = UserPasswordChangeForm
success_url = reverse_lazy('user_list')
def get_initial(self):
initial = super().get_initial()
initial['password'] = ''
return initial
|
[
"calvinbenhardi@gmail.com"
] |
calvinbenhardi@gmail.com
|
438537c819c1db70d5539ab1446160e25797b3cc
|
73e4a072c283a58854abcaeab0e1ac33eb11a6bf
|
/mrb_automation_test/api_automation/api_automation_perfect/mrb_api_perfectreport.py
|
1faedaa3e713f8c0cc0bf0a8fa8226691a82a049
|
[] |
no_license
|
amez7089/mrbtest
|
d52e3b3d6eea6aa836e259c563a3e2f755ff8794
|
2113f58b551d17ec792dffeb9fcbaea5885b8f54
|
refs/heads/master
| 2020-04-06T13:48:44.750285
| 2019-09-30T09:43:41
| 2019-09-30T09:43:41
| 157,515,551
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,208
|
py
|
# -*- coding: utf-8 -*-
import time
import unittest
# import HTMLTestRunner
import HTMLTestRunnerCN
def Suite():
testunit=unittest.TestSuite()
test_dir = "E:\\PythonProject\\mrbtest\\mrbtest\\mrb_automation_test\\api_automation\\api_automation_perfect"
discover=unittest.defaultTestLoader.discover(test_dir,pattern='mrb_api_perfect.py',top_level_dir=None)
for test_suite in discover:
for test_case in test_suite:
testunit.addTests(test_case)
print(testunit)
return testunit
if __name__=="__main__":
#测试报告的存放路径
test_report = "E:\\PythonProject\\mrbtest\\mrbtest\\mrb_automation_test\\api_automation\\api_automation_perfect"
#按照一定的格式获取当前的时间
now = time.strftime("%Y-%m-%d_%H-%M-%S")
#定义报告存放路径
filename = test_report+'\\'+'Reportresult_'+now+'.html'
fp = open(filename,'wb')
#定义测试报告
runner = HTMLTestRunnerCN.HTMLTestRunner(
stream=fp,
tester=u'周楚奇',
title=u'美容邦API接口测试报告:',
description=u'测试用例执行情况:'
)
runner.run(Suite())
#关闭报告文件
fp.close()
|
[
"1269758616@qq.com"
] |
1269758616@qq.com
|
29f0a079f6e5a4b601f5c4bbc7925986e11e105d
|
bc70b275257d0e19ee8d3983edaf6a0aa6068b4f
|
/custom_env/__init__.py
|
6a1e5222426c1d4b0aad3da0b34a3c9cc78cbb34
|
[
"MIT"
] |
permissive
|
x2ever/delay_control_rl
|
bc2bcd72e4d4010b8be13688ca4bcca7508d816d
|
1427c024f36320f976e3eef736d433d123bce324
|
refs/heads/master
| 2023-04-05T09:22:17.876993
| 2021-03-21T01:00:00
| 2021-04-06T17:01:14
| 298,260,254
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 35
|
py
|
from .delay_env import DelayWrapper
|
[
"x2ever@naver.com"
] |
x2ever@naver.com
|
b34c91a555ccc75421794ee67d4b0c298663856e
|
a40c0f432804b42bef9bf4790f0445ac3d6da36f
|
/lab7/example1.py
|
a88afdbf2166141adff6e1a845573353891bdf54
|
[] |
no_license
|
b55888938/270201050
|
c1c2ec7d47d33abde2627d29b758214b274a6f09
|
142c751047f6b2733ea8e3ecb99e2e58687557a3
|
refs/heads/master
| 2023-02-07T18:22:03.728359
| 2020-12-27T20:38:22
| 2020-12-27T20:38:22
| 305,070,573
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 180
|
py
|
def funct(number):
sum = 0
for i in range(number):
sum += a_list[i]
return sum**2
a_list = [12, -7, 5, -89.4, 3, 27, 56, 57.3]
a = funct(len(a_list))
print(a)
|
[
"ahmetozdemir@std.iyte.edu.tr"
] |
ahmetozdemir@std.iyte.edu.tr
|
d1ad93b138e8063d8d19f43af0d411ce7c5d9886
|
6c99d2ddc48847155f9863c8c0d86ed1d98eafef
|
/homework_!_softmax/baseuse_softmax.py
|
a1a6424d3ece2df91705bf89eae228cb9297d5b3
|
[] |
no_license
|
sakurasakura1996/cs231n_homework
|
9fb15231ffff972241ad1b31ecc24182c2255489
|
aed4e88bcdff4aa7e3c7dd7c1a3f5b8688b42414
|
refs/heads/master
| 2020-09-09T05:26:51.188240
| 2019-11-15T08:37:26
| 2019-11-15T08:37:26
| 221,361,036
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 717
|
py
|
import math
import numpy as np
socres = np.array([[1,2,3],
[1,2,3]])
scores = np.array([1,2,3])
exp_scores = np.exp(socres) #numpy 还是很灵活的,exp操作记住不需要math包,numpy直接有
print(socres)
print(exp_scores)
# 下面测试对numpy二维数组每行进行归一化
socres_normalize = socres / np.sum(socres)
print(socres_normalize)
a = np.array([1,2,3])
a_normalize = a / np.sum(a)
print(a_normalize)
print(-np.log(a_normalize))
# np.dot, np.outer
outer_1 = np.array([1,2,3])
outer_2 = np.array([2,2,3])
print(np.outer(outer_1,outer_2)) # 第一个参数的每个数和第二个向量分别相乘,这尼玛也太灵活了
print(np.dot(outer_1,outer_2)) # 内积
|
[
"2470375551@qq.com"
] |
2470375551@qq.com
|
8bd97f6ce4e21e6e1e0cdfc3b3ecfe1ee2a583cf
|
838f6e9e242c380e99cfca9d10210dd662355776
|
/data.py
|
58eb4dccd2280dcf929cc286c03a51e0ec57cc41
|
[] |
no_license
|
yukunfeng/char_word_lm
|
087c9caf090a46cd9d9e99fa26f61574bb5e0d54
|
881fef9c8bb5cdbc6ef18288d11b3c3e9bff7e7a
|
refs/heads/master
| 2022-01-22T08:46:24.313462
| 2021-08-29T05:51:19
| 2021-08-29T05:51:19
| 208,559,799
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,727
|
py
|
from collections import Counter
import collections
import os
import torch
def get_word_len(counter, confidence):
word2len = {}
character_num = 0
for word in counter.keys():
word_len = len(word)
word2len[word] = word_len
character_num += (word_len * counter[word])
word_num = sum(counter.values())
avg_len_per_word = character_num / word_num
sorted_word2len = collections.OrderedDict(
sorted(word2len.items(), reverse=False, key=lambda t: t[1])
)
accumulated_len = 0
for word, word_len in sorted_word2len.items():
freq = counter[word]
accumulated_len += freq
percentage = accumulated_len / word_num
if percentage > confidence:
break
most_long_word_len = word_len
return avg_len_per_word, most_long_word_len
def test_get_word_len():
counter = Counter()
file_path = "/home/lr/yukun/pytorch_examples/word_lm/data/50lm/penn/valid.txt"
lines = [
counter.update(line.strip().split())
for line in open(file_path, 'r').readlines()
]
avg_len_per_word, most_long_word_len = get_word_len(counter, 0.998)
# most_long_word_len = min(most_long_word_len, 30)
print(avg_len_per_word)
print(most_long_word_len)
class Dictionary(object):
def __init__(self):
self.word2idx = {}
self.idx2word = []
def add_word(self, word):
if word not in self.word2idx:
self.idx2word.append(word)
self.word2idx[word] = len(self.idx2word) - 1
return self.word2idx[word]
def __len__(self):
return len(self.idx2word)
class CharNgrams(object):
def __init__(self, n, add_begin_end, max_len, specials=None):
self.specials = specials
self.counter = Counter()
self.chars2idx = {}
# add special ngrams
self.pad = "<pad>"
self.idx2chars = [self.pad]
self.chars2idx[self.pad] = len(self.idx2chars) - 1
self.unk_char = "<unk_chars>"
self.idx2chars.append(self.unk_char)
self.chars2idx[self.unk_char] = len(self.idx2chars) - 1
self.pad_index = self.chars2idx[self.pad]
self.unk_char_index = self.chars2idx[self.unk_char]
# adding specials
if specials is not None:
for special in specials:
self.idx2chars.append(special)
self.chars2idx[special] = len(self.idx2chars) - 1
self.n = n
self.max_len = max_len
self.add_begin_end = add_begin_end
def get_ngrams(self, word):
if self.specials is not None and word in self.specials:
return [word]
if self.add_begin_end:
word = f"${word}^"
n = self.n
chars_list = [word[i:i+n] for i in range(len(word)-n+1)]
return chars_list
def get_ngrams_index(self, word, padding=True):
chars_list = self.get_ngrams(word)
chars_list = chars_list[0:self.max_len]
real_length = len(chars_list)
if padding:
chars_list.extend([self.pad] * (self.max_len - len(chars_list)))
index_list = []
for chars in chars_list:
index = self.unk_char_index
if chars in self.chars2idx:
index = self.chars2idx[chars]
index_list.append(index)
return index_list, real_length
def add_word(self, word):
chars_list = self.get_ngrams(word)
chars_list = chars_list[0:self.max_len]
self.counter.update(chars_list)
for chars in chars_list:
if chars not in self.chars2idx:
self.idx2chars.append(chars)
self.chars2idx[chars] = len(self.idx2chars) - 1
class Corpus(object):
def __init__(self, path, use_ngram=True, max_gram_n=3,
add_begin_end=True, max_ngram_len=20, input_freq=None, input_extra_unk="<input_extra_unk>"):
"""
input_extra_unk: when using fixed input vocab decided by input_freq. If this param is None
and input_freq is 1. This tag will not be appended to input vocab and thus can tie input
and output word embedding.
"""
self.dictionary = Dictionary()
self.input_dict = Dictionary()
self.dict_for_ngram = Dictionary()
# sometimes unk_tag appears in the corpus
self.unk_tag = "<unk>"
# real tags to represents words not appearing in training data for input data
self.input_extra_unk = input_extra_unk
self.eos_tag = "<eos>"
train_path = os.path.join(path, 'train.txt')
self.counter = Counter()
lines = [
self.counter.update(line.strip().split())
for line in open(train_path, 'r').readlines()
]
if input_freq is None:
type_token_ratio = f"{len(self.counter.keys()) / sum(self.counter.values()):5.2f}"
self.type_token = float(type_token_ratio) * 100
self.type_token = int(self.type_token)
if self.type_token <= 5:
self.input_freq = 5
elif self.type_token >= 10:
self.input_freq = 10
else:
self.input_freq = self.type_token
print(f"automatically chosen input_freq: {self.input_freq}")
else:
self.input_freq = input_freq
self.use_ngram = use_ngram
if self.use_ngram:
avg_len_per_word, most_long_word_len = get_word_len(self.counter, 0.99999)
most_long_word_len = min(most_long_word_len + 1, 40)
gram_n = min(max_gram_n, int(avg_len_per_word))
print(f"max length of word:{most_long_word_len}")
print(f"n value in n-gram: {gram_n}")
specials = [self.unk_tag, self.eos_tag]
self.char_ngrams = CharNgrams(
gram_n,
add_begin_end,
most_long_word_len,
specials
)
if self.use_ngram:
self.train, self.train_ngram = self.tokenize(
train_path,
add_to_vocab=True,
return_ngram=True
)
self.valid, self.valid_ngram = self.tokenize(
os.path.join(path, 'valid.txt'),
return_ngram=True
)
self.test, self.test_ngram = self.tokenize(
os.path.join(path, 'test.txt'),
return_ngram=True
)
else:
self.train = self.tokenize(train_path, add_to_vocab=True)
self.valid = self.tokenize(os.path.join(path, 'valid.txt'))
self.test = self.tokenize(os.path.join(path, 'test.txt'))
# preprare for fixed-vocab input
self.train_fixed = self.get_fixed_input_data(self.train)
self.valid_fixed = self.get_fixed_input_data(self.valid)
self.test_fixed = self.get_fixed_input_data(self.test)
if self.input_extra_unk is not None and self.input_extra_unk in self.input_dict.word2idx:
self.input_unseen_idx = self.input_dict.word2idx[self.input_extra_unk]
else:
self.input_unseen_idx = self.input_dict.word2idx[self.unk_tag]
# add ngram input
if self.use_ngram:
self.ngram_train, self.ngram_train_len = self.get_ngram_data(self.train_ngram)
self.ngram_test, self.ngram_test_len = self.get_ngram_data(self.test_ngram)
self.ngram_valid, self.ngram_valid_len = self.get_ngram_data(self.valid_ngram)
def get_fixed_input_data(self, data):
fixed_data = torch.zeros(
data.size(0),
dtype=data.dtype
)
for word_int_index, word_int in enumerate(data, 0):
word_str = self.dictionary.idx2word[word_int]
if word_str not in self.input_dict.word2idx:
word_str = self.unk_tag
if self.input_extra_unk in self.input_dict.word2idx:
word_str = self.input_extra_unk
fixed_data[word_int_index] = self.input_dict.word2idx[word_str]
return fixed_data
def get_ngram_data(self, data):
ngram_data = torch.zeros(
data.size(0),
self.char_ngrams.max_len,
dtype=data.dtype
)
ngram_length = torch.zeros(
data.size(0),
dtype=data.dtype
)
for word_int_index, word_int in enumerate(data, 0):
word_str = self.dict_for_ngram.idx2word[word_int]
ngram_list, real_length = self.char_ngrams.get_ngrams_index(word_str)
ngram_data[word_int_index] = torch.tensor(ngram_list, dtype=data.dtype)
ngram_length[word_int_index] = real_length
return ngram_data, ngram_length
def id_to_words(self, idx_list):
word_list = []
for idx in idx_list:
word = self.dictionary.idx2word[idx]
word_list.append(word)
return word_list
def tokenize(self, path, add_to_vocab=False, return_ngram=False):
"""Tokenizes a text file."""
assert os.path.exists(path)
# Add words to the dictionary
with open(path, 'r', encoding="utf8") as f:
tokens = 0
for line in f:
words = line.split() + [self.eos_tag]
tokens += len(words)
for word in words:
if add_to_vocab:
self.dictionary.add_word(word)
if word in self.counter and self.counter[word] >= self.input_freq:
self.input_dict.add_word(word)
elif word == self.eos_tag:
self.input_dict.add_word(word)
if self.use_ngram:
self.char_ngrams.add_word(word)
self.dict_for_ngram.add_word(word)
if add_to_vocab:
# no unk_tag in this corpus
if self.unk_tag not in self.dictionary.word2idx:
self.dictionary.add_word(self.unk_tag)
self.dict_for_ngram.add_word(self.unk_tag)
self.input_dict.add_word(self.unk_tag)
else:
# unk_tag already exists in this corpus. Define a another tag for fixed input
# vocab. Thus the orginal unk_tag will be treated as normal word.
if self.input_extra_unk is not None:
self.input_dict.add_word(self.input_extra_unk)
# Tokenize file content
with open(path, 'r', encoding="utf8") as f:
ids = torch.LongTensor(tokens)
if self.use_ngram:
ids_for_ngram = torch.LongTensor(tokens)
token = 0
for line in f:
words = line.split() + [self.eos_tag]
for word in words:
if self.use_ngram:
ids_for_ngram[token] = self.dict_for_ngram.word2idx[word]
if word not in self.dictionary.word2idx:
word = self.unk_tag
ids[token] = self.dictionary.word2idx[word]
token += 1
if return_ngram:
return ids, ids_for_ngram
else:
return ids
def test_charngrams():
cn = CharNgrams(2, True, 8)
cn.add_word("happniess")
cn.add_word("am")
print("dict")
print(cn.chars2idx)
print(cn.idx2chars)
print(f"max_len: {cn.max_len}")
word = "happniess"
print(f"word {word} ngrams")
print(cn.get_ngrams(word))
word = "happysdfsdfsdfdsf"
print(f"word {word} ngrams index")
print(cn.get_ngrams_index(word))
if __name__ == "__main__":
# test_charngrams()
test_get_word_len()
|
[
"yukunfg@gmail.com"
] |
yukunfg@gmail.com
|
bebdae1318fbbbe9bc0e9c03a7ef871a75950c2d
|
cbdef2e8ed259adc4653ade34db12d8bcc0cea9f
|
/dominion/cards/Card_Astrolabe.py
|
3671e3165a04e13e64994d68ab481283b3dc43d6
|
[] |
no_license
|
dwagon/pydominion
|
8dd5afef8ec89c63ade74c4ae6c7473cd676799f
|
545709f0a41529de74f33aa83b106c456900fa5b
|
refs/heads/main
| 2023-08-29T10:02:26.652032
| 2023-08-23T02:25:00
| 2023-08-23T02:25:00
| 18,776,204
| 1
| 0
| null | 2023-08-23T02:25:02
| 2014-04-14T20:49:28
|
Python
|
UTF-8
|
Python
| false
| false
| 1,941
|
py
|
#!/usr/bin/env python
""" http://wiki.dominionstrategy.com/index.php/Astrolabe"""
import unittest
from dominion import Card, Game, Piles
###############################################################################
class Card_Astrolabe(Card.Card):
"""Astrolabe"""
def __init__(self):
Card.Card.__init__(self)
self.cardtype = [Card.CardType.TREASURE, Card.CardType.DURATION]
self.base = Card.CardExpansion.SEASIDE
self.desc = "Now and at the start of your next turn: $1, +1 Buy"
self.name = "Astrolabe"
self.buys = 1
self.coin = 1
self.cost = 3
def duration(self, game, player):
"""+1 coin, +1 buy"""
player.coins.add(1)
player.buys.add(1)
###############################################################################
class Test_Astrolabe(unittest.TestCase):
"""Test Astrolabe"""
def setUp(self):
self.g = Game.TestGame(numplayers=1, initcards=["Astrolabe"])
self.g.start_game()
self.plr = self.g.player_list(0)
self.card = self.g["Astrolabe"].remove()
self.plr.add_card(self.card, Piles.HAND)
def test_playcard(self):
"""Play an astrolabe"""
self.plr.play_card(self.card)
self.assertEqual(self.plr.buys.get(), 2)
self.assertEqual(self.plr.coins.get(), 1)
self.assertEqual(self.plr.piles[Piles.DURATION].size(), 1)
self.plr.end_turn()
self.plr.start_turn()
self.assertEqual(self.plr.coins.get(), 1)
self.assertEqual(self.plr.piles[Piles.DURATION].size(), 0)
self.assertEqual(self.plr.piles[Piles.PLAYED].size(), 1)
self.assertEqual(self.plr.piles[Piles.PLAYED][-1].name, "Astrolabe")
self.assertEqual(self.plr.buys.get(), 2)
###############################################################################
if __name__ == "__main__": # pragma: no cover
unittest.main()
# EOF
|
[
"dougal.scott@gmail.com"
] |
dougal.scott@gmail.com
|
cf636381529d19dad2768d8600e76d832662df5c
|
991cd70073c162f637fbec3a9e921707aa434b8e
|
/opentelemetry-propagator-gcp/src/opentelemetry/propagators/cloud_trace_propagator/__init__.py
|
8470ad90390c3c75818db36d8aebc5886c723d30
|
[
"Apache-2.0"
] |
permissive
|
GoogleCloudPlatform/opentelemetry-operations-python
|
700a59c954cd18ae6428032339d01908580a4f2d
|
b0ca7decb6a5bb01409822e746b7463f4a7a76ba
|
refs/heads/main
| 2023-08-18T11:24:59.282098
| 2023-08-15T17:02:54
| 2023-08-15T17:02:54
| 244,484,614
| 49
| 42
|
Apache-2.0
| 2023-09-01T14:42:48
| 2020-03-02T22:00:22
|
Python
|
UTF-8
|
Python
| false
| false
| 4,960
|
py
|
# Copyright 2021 The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cloud Trace Span Propagator for X-Cloud-Trace-Context format.
Usage
-----
.. code-block:: python
from opentelemetry.propagate import set_global_textmap
from opentelemetry.propagators.cloud_trace_propagator import (
CloudTraceFormatPropagator,
)
# Set the X-Cloud-Trace-Context header
set_global_textmap(CloudTraceFormatPropagator())
Auto-instrumentation
--------------------
This exporter can also be used with the :envvar:`OTEL_PROPAGATORS` environment variable as
``OTEL_PROPAGATORS=gcp_trace``.
This also works with `OpenTelemetry auto-instrumentation
<https://opentelemetry.io/docs/instrumentation/python/automatic/>`_:
.. code-block:: sh
opentelemetry-instrument --propagator gcp_trace <command> <args>
API
---
"""
import re
import typing
import opentelemetry.trace as trace
from opentelemetry.context.context import Context
from opentelemetry.propagators import textmap
from opentelemetry.trace.span import SpanContext, TraceFlags, format_trace_id
_TRACE_CONTEXT_HEADER_NAME = "x-cloud-trace-context"
_TRACE_CONTEXT_HEADER_FORMAT = r"(?P<trace_id>[0-9a-f]{32})\/(?P<span_id>[\d]{1,20})(;o=(?P<trace_flags>\d+))?"
_TRACE_CONTEXT_HEADER_RE = re.compile(_TRACE_CONTEXT_HEADER_FORMAT)
_FIELDS = {_TRACE_CONTEXT_HEADER_NAME}
class CloudTraceFormatPropagator(textmap.TextMapPropagator):
"""This class is for injecting into a carrier the SpanContext in Google
Cloud format, or extracting the SpanContext from a carrier using Google
Cloud format.
"""
@staticmethod
def _get_header_value(
getter: textmap.Getter,
carrier: textmap.CarrierT,
) -> typing.Optional[str]:
# first try all lowercase header
header = getter.get(carrier, _TRACE_CONTEXT_HEADER_NAME)
if header:
return header[0]
# otherwise try to find in keys for mixed case
for key in getter.keys(carrier):
if key.lower() == _TRACE_CONTEXT_HEADER_NAME:
header = getter.get(carrier, key)
if header:
return header[0]
return None
def extract(
self,
carrier: textmap.CarrierT,
context: typing.Optional[Context] = None,
getter: textmap.Getter = textmap.default_getter,
) -> Context:
if context is None:
context = Context()
header = self._get_header_value(getter, carrier)
if not header:
return context
match = re.fullmatch(_TRACE_CONTEXT_HEADER_RE, header)
if match is None:
return context
trace_id = match.group("trace_id")
span_id = match.group("span_id")
trace_options = match.group("trace_flags") or "0"
if trace_id == "0" * 32 or int(span_id) == 0:
return context
span_context = SpanContext(
trace_id=int(trace_id, 16),
span_id=int(span_id),
is_remote=True,
trace_flags=TraceFlags(trace_options),
)
return trace.set_span_in_context(
trace.NonRecordingSpan(span_context), context
)
def inject(
self,
carrier: textmap.CarrierT,
context: typing.Optional[Context] = None,
setter: textmap.Setter = textmap.default_setter,
) -> None:
span = trace.get_current_span(context)
span_context = span.get_span_context()
if span_context == trace.INVALID_SPAN_CONTEXT:
return
header = "{}/{};o={}".format(
format_trace_id(span_context.trace_id),
span_context.span_id,
int(span_context.trace_flags.sampled),
)
setter.set(carrier, _TRACE_CONTEXT_HEADER_NAME, header)
@property
def fields(self) -> typing.Set[str]:
return _FIELDS
class CloudTraceOneWayPropagator(CloudTraceFormatPropagator):
"""This class extracts Trace Context in the Google Cloud format, but does
not inject this header. It is intended for use in a Composite Propagator to
inject context in a different format than was received.
"""
def inject(
self,
carrier: textmap.CarrierT,
context: typing.Optional[Context] = None,
setter: textmap.Setter = textmap.default_setter,
) -> None:
return
@property
def fields(self) -> typing.Set[str]:
return set()
|
[
"noreply@github.com"
] |
GoogleCloudPlatform.noreply@github.com
|
d1656ae9e0c55bf483e9bb2b3f5bc11f49b140f0
|
35076aa3d703bd10dcc0652e9da05b0e4e1aebbe
|
/models/ingredients_db.py
|
970ac2a5e1f0960677160546c769375e7a770439
|
[] |
no_license
|
dfortuna/Leftoven_Backend
|
d33bf23775b786c169c620623d63e454e35fbaec
|
92b5d3aa20d5866d77259119dab4148ed74b7628
|
refs/heads/master
| 2020-04-19T08:49:44.390403
| 2019-01-29T04:25:38
| 2019-01-29T04:25:38
| 168,090,575
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,473
|
py
|
from db import db
class Ingredient(db.Model):
__tablename__ = 'ingredient'
id = db.Column(db.Integer, primary_key=True)
ingredient_label = db.Column(db.String(80), unique=True, nullable=False)
classifier_trained = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Ingredient %r>' % self.ingredient_label
def __init__(self, ingredient_label, classifier_trained):
self.ingredient_label = ingredient_label
self.classifier_trained = classifier_trained
# INSERT **********************************************************************************
#creating -------> ing = Ingredient(ingredient_label='blabla', classifier_trained=0)
#adding to db ---> db.session.add(ing)
# db.session.commit
def insert(self):
db.session.add(self)
db.session.commit()
#SELECT *********************************************************************************
#@classmethod allows the method to be called without an instance of the class
#'cls' is the representation of the class
#convert instance of INGREDIENT to a list that can be sent as JSON format to a GET request
@classmethod
def select_ingredient_name(cls, _name):
return cls.query.filter_by(ingredient_label=_name).first()
@classmethod
def select_by_id(cls, _id):
return cls.query.filter_by(id=_id).first()
@classmethod
def select_by_classifier_trained(cls, _classifier_trained):
ingredients_result = cls.query.filter(Ingredient.classifier_trained>=_classifier_trained).all()
ingredient_list = []
for i in ingredients_result:
ingredient = {'ingredient_label': i.ingredient_label, 'classifier_trained': i.classifier_trained}
ingredient_list.append(ingredient)
return {"Ingredient":ingredient_list}
#UPDATE **********************************************************************************
#by querying the entity, changing it and commiting, updates it
#cls.query.filter_by(my_field_1 = 'value1').update(my_field2 = 'new_value') -> updates all objects
@classmethod
def update_classifier_trained(cls, name):
#ingredient = cls.query.filter_by(ingredient_label=name).first()
ingredient = db.session.query(Ingredient).filter_by(ingredient_label=name).first()
number = ingredient.classifier_trained + 1
ingredient.classifier_trained = number
db.session.commit
#DELETE *********************************************************************************
@classmethod
def delete_row(cls, row):
db.session.delete(row)
db.session.commit()
|
[
"denisfortuna@Deniss-MacBook-Pro.local"
] |
denisfortuna@Deniss-MacBook-Pro.local
|
c8d1be0c9067f432037453243688615f33aa2e12
|
171e781bf129a647df520ac04cfeb2343fab1c2d
|
/skin-lesion-detection-service/test/test_lesion_detection_model.py
|
5607d00d4afd5b0b05f625f9b5ae002b1686c0f8
|
[
"MIT"
] |
permissive
|
Joro97/microservices-hospital-webapp
|
f21624e093c04b2fc8a639e30a21d4e14a96bf87
|
0300d115f15b33832f1035c609945bd47685f06f
|
refs/heads/master
| 2022-12-09T13:40:30.878751
| 2020-01-06T08:48:36
| 2020-01-06T08:48:36
| 160,716,929
| 13
| 2
|
MIT
| 2022-12-08T01:30:56
| 2018-12-06T18:42:34
|
CSS
|
UTF-8
|
Python
| false
| false
| 2,648
|
py
|
import os
import unittest
import src.model.lesion_detection_model as ldm
from keras.preprocessing import image
class LesionDetectionTest(unittest.TestCase):
RESOURCES_PATH = os.path.abspath(os.path.dirname(__file__))
def setUp(self):
self._model = ldm.LesionDetectionModel()
def tearDown(self):
self._model = None
def _load_image(self, path):
img = image.load_img(path, target_size=(224, 224))
return img
def test_nv_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/nv.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_bcc_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/bcc.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_akiec_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/akiec.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_bkl_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/bkl.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_df_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/df.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_mel_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/mel.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(label)
print(probability)
def test_vasc_lesion(self):
img = self._load_image(os.path.join(self.RESOURCES_PATH, 'resources/vasc.jpg'))
result = self._model.predict(img)
print(result)
label, probability = self._model.get_most_probable_result(result)
print(probability)
if __name__ == '__main__':
unittest.main()
|
[
"tsvetkovt@tsvetkovt-a02.vmware.com"
] |
tsvetkovt@tsvetkovt-a02.vmware.com
|
c81d09b9d8607b73e3502e7e3e7c8a23a362894d
|
8d8e886fd873d3d2955628f39263661a6d2140b1
|
/mediplus_backend/project/settings/production.py
|
ea53cf2c3e53aa9c78f06d6d7eb676fe7a793be2
|
[] |
no_license
|
mikietechie/mediplus_backend
|
187a9e3ccc54f2a6a47fc6cfda12bbc0f7ab1754
|
16cb5f1f85c54fcaf4b2d02cd4963fd697581501
|
refs/heads/master
| 2023-07-12T17:25:35.727586
| 2021-08-20T14:44:06
| 2021-08-20T14:44:06
| 394,951,846
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 512
|
py
|
from .base import *
ALLOWED_HOSTS = ["mediplus.co.zw"]
CORS_ORIGIN_WHITELIST = ["mediplus.co.zw"]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': os.getenv('mediplus_db_password', 'a1mtwstdb'),
'USER': os.getenv('mediplus_db_password', 'postgres'),
'PASSWORD': os.getenv('mediplus_db_password', 'mediplus1234'),
'HOST': os.getenv('mediplus_db_password', '127.0.0.1'),
'PORT': os.getenv('mediplus_db_password', '5432')
}
}
|
[
"mzinyoni7@outlook.com"
] |
mzinyoni7@outlook.com
|
69f9bacc67a6e72c89930a9ce1ec7fff866b1469
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/verbs/_outplay.py
|
3b5a7058b53f990bc21deadc6ccbc62f475177fb
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379
| 2017-01-28T02:00:50
| 2017-01-28T02:00:50
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 356
|
py
|
#calss header
class _OUTPLAY():
def __init__(self,):
self.name = "OUTPLAY"
self.definitions = [u'to play a game more cleverly and successfully than another person or team: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'verbs'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"xingwang1991@gmail.com"
] |
xingwang1991@gmail.com
|
ac880b2de85169f05e40a1abe6a70c57ddef7333
|
3411e7d0d8876089668735a37208dda295d6ee08
|
/src/orig/MLiP_setup.py
|
4bbd44ddb2f87211ee7180755bb7d245d67bc887
|
[] |
no_license
|
Wassasin/atpscheduler
|
eddfa77b639828e99acb706136c5f22820d978f9
|
aad64f2da546461071b6e682298d73832809f043
|
refs/heads/master
| 2021-01-10T21:08:35.903181
| 2014-04-21T21:19:54
| 2014-04-21T21:19:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,242
|
py
|
'''
Created on Mar 23, 2014
@author: Daniel Kuehlwein
'''
import logging
import os
from readData import get_e_features
from Strategy import load_strategies
from random import shuffle
PATH = '/scratch/kuehlwein/males/E'
def create_data_file(fileName,problemList):
with open(fileName,'w') as OS:
OS.write(' #')
OS.write(','.join(['f'+str(i) for i in range(fLength)]))
OS.write('#')
OS.write(','.join(names))
OS.write('\n')
for p in problemList:
OS.write(p+'#')
OS.write(','.join([str(f) for f in featureDict[p]]))
OS.write('#')
p_extended = '/scratch/kuehlwein/TPTP-v5.4.0/' + p
stratTimes = []
for n in names:
s = stratDict[n]
try:
stratTimes.append(str(s.solvedProblems[p_extended]))
except:
stratTimes.append('-1')
OS.write(','.join(stratTimes))
OS.write('\n')
logging.basicConfig(level=logging.INFO,
format='%% %(message)s',
datefmt='%d-%m %H:%M:%S')
logger = logging.getLogger('MLiP Setup')
# Create Train/Test problems
problemFile = os.path.join(PATH,'data','CASC24Training')
problems = []
with open(problemFile,'r') as pFile:
for p in pFile:
problems.append((p.strip()))
shuffle(problems)
problemsTrain = problems[:900]
problemsTest = problems[900:]
stratFolder = os.path.join(PATH,'results')
strategies = load_strategies(stratFolder)
names = sorted([s.name for s in strategies])
stratDict = {}
for s in strategies:
stratDict[s.name] = s
featureDict = {}
#problemsTrain = problemsTrain[:30]
for p in problems:
featureDict[p] = get_e_features(p)
fLength = len(featureDict[problems[0]])
create_data_file('MLiP_train',problemsTrain)
create_data_file('MLiP_test',problemsTest)
with open('MLiP_train_example_schedule','w') as OS:
for p in problemsTrain:
OS.write(p+'#NewStrategy101164:150.0,NewStrategy101980:150.0')
OS.write('\n')
with open('MLiP_test_features','w') as OS:
for p in problemsTest:
OS.write(p+'#')
OS.write(','.join([str(f) for f in featureDict[p]]))
OS.write('\n')
|
[
"git@woutergeraedts.nl"
] |
git@woutergeraedts.nl
|
79f5aef07082705f1d3fab093de1f12036af1c58
|
c092cc56f30507e80db8e647f529c10e7f9dc9e1
|
/app/venv/bin/easy_install-3.8
|
ea67c002831e03e40d532b8aab2c1774d52cd47e
|
[] |
no_license
|
admin341/INSTA341
|
bc023541029e6f7cc8b75f0d31265446e236566b
|
475f35e87391786a4fee249cadff11dc21435418
|
refs/heads/develop
| 2022-04-13T17:22:05.757339
| 2020-03-27T23:22:10
| 2020-03-27T23:22:10
| 237,218,348
| 0
| 2
| null | 2020-03-24T03:05:43
| 2020-01-30T13:25:30
|
Python
|
UTF-8
|
Python
| false
| false
| 451
|
8
|
#!/Users/zubairnurie/Desktop/INSTA341/app/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.8'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.8')()
)
|
[
"mznurie@msn.com"
] |
mznurie@msn.com
|
ab96fd42f458840cbde4665fa536683c1496eebb
|
08f8f1ab5d16bff856381467de03502e353ff9c5
|
/results/views.py
|
1983364fadf8dc4e37d7d54c8daaaa7db92dfaa0
|
[] |
no_license
|
kevin8519/Python_django_Project
|
934bbb3923e7af4cb7f1c8bcfe8b7a02b18b8032
|
cb8cc8b3f521053d3acc1e0a498a9f63abed2651
|
refs/heads/master
| 2021-01-12T08:46:01.303002
| 2016-12-16T20:00:13
| 2016-12-16T20:00:13
| 76,681,622
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,514
|
py
|
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.mixins import LoginRequiredMixin,\
PermissionRequiredMixin
from django.db import connection
from django.http.response import HttpResponse
from django.shortcuts import render, render_to_response
from django.template.context import RequestContext
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, DeleteView, UpdateView
from django.views.generic.list import ListView
from results.forms import Studentform, Markform
from results.models import Student, Marks
# Create your views here.
def dear(request):
return HttpResponse('welcome to django kevin'+request.META['HTTP_USER_AGENT'])
def display(request):
name=request.GET['name']
city=request.GET['city']
age=request.GET['age']
phone=request.GET['phone']
email=request.GET['email']
sql="INSERT INTO projectweb.webproject (name, city, age, phone, email) VALUES ('"+name+"','"+city+"','"+age+"','"+phone+"','"+email+"') "
print(name,city,age,phone,email)
print sql
cursor=connection.cursor()
cursor.execute(sql)
return HttpResponse(" form uploaded done")
def display_projectweb(request,id):
sql="select name, city, age, phone, email from webproject where id='"+id+"' "
cursor=connection.cursor()
cursor.execute(sql)
result=cursor.fetchall()
my_html=''
for r in result:
print r
print my_html
my_html="<html><head><style>body {background-color: powderblue;}th,td{color:red;}</style></head><body><table border=3><tr><th><label>Name</label></th><td>"+r[0]+"</td></tr><tr><th><label>City</label></th><td>"+r[1]+"</td></tr><tr><th><label>Age</label></th><td>"+str(r[2])+"</td></tr><tr><th><label>Phone</label></th><td>"+str(r[3])+"</td> </tr><tr><th><label>Email</label></th><td>"+r[4]+"</td></tr></table> </body></html>"
return HttpResponse(my_html)
def index(request):
template='my_form.html'
data={
}
return render_to_response(template,data,RequestContext(request))
def index1(request):
template='student_registration.html'
data={'student_form': Studentform() }
return render(request,template,data)
@login_required
def registrationstudent(request):
student=Student()
student.name=request.POST['name']
student.city=request.POST['city']
student.age=request.POST['age']
student.gender=request.POST['gender']
student.address=request.POST['address']
student.phone=request.POST['phone']
student.save()
return HttpResponse('User successfully registered')
class Greeting(View):
greet='hi man how are you'
def get(self,request):
return HttpResponse(self.greet)
@login_required
def index2(request):
template='welcome.html'
data={}
return render(request,template,data)
def searh(request):
template='search.html'
data={}
return render(request,template,data)
@login_required
def stureg(request):
template='student_registrationclass.html'
data={'student_form': Studentform() }
return render(request,template,data)
class Studentcreate(LoginRequiredMixin,CreateView):
model=Student
success_url='/welcome/stureg/student_list/'
form_class=Studentform
class Studentlist(PermissionRequiredMixin,ListView):
model=Student
context_object_name='student_list'
template_name='student_list.html'
permission_required='results.change_student'
class StudentDetail(DetailView):
model=Student
#template_name='student_list.html'
class DeleteStudent(DeleteView):
model=Student
success_url='/welcome/stureg/student_list/'
def get_object(self, queryset=None):
obj=Student.objects.get(id=self.kwargs['id'])
return obj
class StudentUpdate(UpdateView):
model=Student
success_url='/welcome/stureg/student_list/'
form_class=Studentform
def get_object(self, queryset=None):
obj=Student.objects.get(id=self.kwargs['id'])
return obj
class Markcreate(CreateView):
model=Marks
success_url='/welcome/stureg/results/createmarks/'
form_class=Markform
def ajaxstudentresults(request):
if request.is_ajax():
querry_string=request.GET.get('search_text')
if querry_string is not None:
results=Marks.objects.filter(studentmark_id=querry_string).order_by('subject')
try:
studentmark=Student.objects.get(pk=querry_string)
studentmark=studentmark.name
except:
studentmark=''
template='results/student_results.html'
data={
'results':results,
'student_name':studentmark
}
return render_to_response(template, data, RequestContext(request))
def contactus(request):
template='contactus.html'
data={}
return render(request,template,data)
|
[
"thanakevin85@gmail.com"
] |
thanakevin85@gmail.com
|
a00e528dba063a65c21e0ebd0fe662e0500a3217
|
58ee1dc37b57e0b4f06cf383c6a9e0654f490150
|
/pypy3-armv6h/lilac.py
|
cdd21f6434f234e6e816d4abc95161d6f4918ff3
|
[] |
no_license
|
MikeyBaldinger/arch4edu
|
f3af87ef3a8d4cd78fde7e0ef75658c17dbe8c06
|
c1775bf7fe0ffc87f3c8b4109fb1e8acde12a430
|
refs/heads/master
| 2022-12-23T16:40:55.513537
| 2020-09-28T21:00:59
| 2020-09-28T21:00:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 420
|
py
|
#!/usr/bin/env python3
from lilaclib import *
maintainers = [{'github': 'petronny', 'email': 'Jingbei Li <i@jingbei.li>'}]
update_on = [{'archpkg': 'pypy3'}]
build_prefix = 'extra-armv6h'
time_limit_hours = 24
def pre_build():
download_official_pkgbuild('pypy3')
add_arch(['armv6h'])
def post_build():
git_add_files('PKGBUILD')
git_commit()
if __name__ == '__main__':
single_main('extra-x86_64')
|
[
"i@jingbei.li"
] |
i@jingbei.li
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.