blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
84bfeef7e85c19ed296904133c14e8638368f918
|
95b4aba4d9b5652eea5c17849205b8fbe484e053
|
/nli/RUN_ERROR_ANALYSIS.py
|
e2fd9bfe8ab11941fcb14a06fc33c074107cc49e
|
[] |
no_license
|
vasudev13/discharge-summary-albert
|
eb8811c2ca5792bc96aa4a60107ea35347f4b1ef
|
332d07c94cac30e648b53e7aecbe2e8a67f19897
|
refs/heads/main
| 2023-05-03T16:27:02.068486
| 2021-05-23T19:26:57
| 2021-05-23T19:26:57
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,465
|
py
|
"""This script is a runner script for executing code during batch jobs."""
import torch
import pytorch_lightning as pl
import transformers
import pandas as pd
import argparse
from path import Path
from config import CONFIG
from nli_finetune import NLIFineTuningModel
from mnli import mnli_df
from error_analysis import get_error_samples
parser = argparse.ArgumentParser(description='')
parser.add_argument('--model-checkpoint', default='./checkpoint/', type=Path,
metavar='DIR', help='path to model checkpoint')
args = parser.parse_args()
# print(args.model_checkpoint), should be similar to something like Path('/checkpoints/biobert_v1-epoch=00-val_loss=0.55.ckpt')
trained_model = NLIFineTuningModel.load_from_checkpoint(checkpoint_path=args.model_checkpoint, # model checkpoint path
num_labels=CONFIG['NUM_CLASSES'],
model_name_or_path=CONFIG['MODEL_NAME_OR_PATH'])
trained_model.freeze()
# Unfortunately cannot save tokenizer
tokenizer = transformers.AutoTokenizer.from_pretrained(
CONFIG['MODEL_NAME_OR_PATH'])
train_df = mnli_df('train')
error_train = get_error_samples(trained_model,
train_df,
tokenizer # reusing tokenizer object
)
error_train.to_csv(
f"CONFIG['MODEL_SAVE_NAME']_error_samples.csv", index=False)
|
[
"vasudev.w13@gmail.com"
] |
vasudev.w13@gmail.com
|
4946710f7142d36c5c8771b62155055faa7fdda0
|
66d323be215fb83fbe3258aac7589a8987b8a2ae
|
/curves.py
|
1d223dfc966eb16facd712a62cef23f055f908aa
|
[] |
no_license
|
strmwalker/sigmoid-knapsack-solver
|
c3d3fb38ccf03c01e0deb551494cb5708ba401e6
|
45878000297eded3c305fca934e788b37cd6719c
|
refs/heads/master
| 2020-12-18T14:55:35.197410
| 2018-05-30T17:56:41
| 2018-05-30T17:56:41
| 235,427,764
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,945
|
py
|
__all__ = ["Sigmoid", "MixedCurve", "Knapsack"]
from functools import partial
from numpy import array, exp, ndarray, linspace
from pandas import Series, DataFrame
from scipy.optimize import minimize
def basic(x, cap=1, ec50=0.5, steep=0, price=1, multiplier=1):
"""
S-shaped curve similar to sigmoid function. Depending on parameters,
can be fully concave given x > 0, or have convex and concave parts.
:param x: input array
:param cap: max level for function.
:param ec50: half-efficiency point. Moves curve horizontally, and serves as bend point.
:param steep: slope coefficient. If < 1, curve will be fully concave on interval (0, +Inf), if > 1, curve will be
concave before ec50 and convex after.
:param price: price
:param multiplier: model coefficient for curve
:return: float with same dimensions as x.
"""
if isinstance(x, int) or isinstance(x, float):
return (cap / (1 + (x / price / cap / ec50) ** (-steep))) * multiplier if x != 0 else 0
elif isinstance(x, Series) or isinstance(x, ndarray):
if 0 not in x:
return array(cap / (1 + (x / price / cap / ec50) ** (-steep))) * multiplier
else:
return array([(cap / (1 + (y / price / cap / ec50) ** (-steep))) * multiplier if y != 0 else 0 for y in x])
#
# return (cap / (1 + (x / price / cap / ec50) ** (-steep))) * multiplier
def basic_derivative(x, cap, ec50, steep, price=1, multiplier=1):
numerator = cap * steep * multiplier * (x / (cap * ec50 * price)) ** steep
denominator = x * (1 + (x / (cap * ec50 * price)) ** steep) ** 2
return numerator / denominator
def log(x, cap, ec50, steep, price=1, multiplier=1):
"""
S-shaped curve based on exponential function
:param x: input array
:param cap: max level for function.
:param ec50: half-efficiency point. Moves curve horizontally, and
serves as bend point.
:param steep: slope coefficient. If < 1, curve will be fully concave on
interval (0, +Inf), if > 1, curve will be concave before ec50 and convex
after.
:param price:
:param multiplier:
:return: float with same dimensions as x.
"""
return (cap / (1 + exp(-steep * x / price / cap - ec50)) - cap / (1 + exp(steep * ec50))) * multiplier
def log_derivative(x, cap, ec50, steep, price=1, multiplier=1):
numerator = steep * multiplier * exp(steep * x / price / cap + ec50)
denominator = (exp(steep * x / price / cap + ec50) + 1) ** 2
return numerator / denominator
def art(x, a, b, multiplier=1):
first_term = 100 / (1 + exp(a * exp(x * - a / b)))
second_term = 100 / (1 + exp(a))
return (first_term - second_term) * multiplier
def art_derivative(x, a, b, multiplier=1.0):
numerator = -100 * a * multiplier * exp(a * exp(- (a / b) * x - (a / b)))
denominator = (exp(a * exp(-a / b) * x) + 1) ** 2
return numerator / denominator
class Sigmoid(object):
# TODO LaTeX curve equation rendering
def __init__(self, cap, ec50, steep, multiplier=1, price=1, curve_type='basic'):
"""
:param cap: maximum level for function
:param ec50: half-efficiency point. Moves curve horizontally, and
serves as bend point.
:param steep: slope coefficient. If < 1, curve will be fully concave on
interval (0, +Inf), if < 1, curve will be convex before ec50 and concave
after.
:param multiplier: model coefficient for curve
:param curve_type: regular or logistic response curve, can be 'basic' or 'log'
"""
self.cap = cap
self.ec50 = ec50
self.steep = steep
self.multiplier = multiplier
self.price = price
self.type = curve_type
@property
def fun(self):
if self.type == 'basic':
return partial(basic,
cap=self.cap,
ec50=self.ec50,
steep=self.steep,
price=self.price,
multiplier=self.multiplier)
elif self.type == 'log':
return partial(log,
cap=self.cap,
ec50=self.ec50,
steep=self.steep,
price=self.price,
multiplier=self.multiplier)
@property
def derivative(self):
if self.type == 'basic':
return partial(basic_derivative,
cap=self.cap,
ec50=self.ec50,
steep=self.steep,
price=self.price,
multiplier=self.multiplier)
elif self.type == 'log':
return partial(log_derivative,
cap=self.cap,
ec50=self.ec50,
steep=self.steep,
price=self.price,
multiplier=self.multiplier)
def __call__(self, x):
"""
Calculate response
:param x: budget
:return: float64
"""
return self.fun(x)
def plot(self, budget):
DataFrame(
{
"media": self(linspace(0, budget, 1000))
}
) \
.plot(
kind='line',
grid=True,
figsize=(12, 10)
)
# noinspection PyMissingConstructor
class ArtyomCurve(object):
def __init__(self, a, b, multiplier=1.0):
self.a = a
self.b = b
self.multiplier = multiplier
@property
def fun(self):
return partial(art, a=self.a, b=self.b, multiplier=self.multiplier)
@property
def derivative(self):
return partial(art_derivative, a=self.a, b=self.b, multiplier=self.multiplier)
def __call__(self, x):
return self.fun(x)
class MixedCurve(object):
"""
Mixed curve is designed for POEM and should represent response from one media. Constructed from a basic Curve objects.
"""
def __init__(self, *curves):
self.curves = curves
def __call__(self, x):
return self.fun(x)
@property
def fun(self):
"""
Callable that can be passed further.
"""
def f(x):
return sum([curve(x) for curve in self.curves])
return f
@property
def derivative(self):
def d(x):
return sum([curve.derivative(x) for curve in self.curves])
return d
class Budget(object):
"""
Optimization solver class.
"""
notebook_mode = True
def __init__(self, budget):
"""
:param budget: total budget for problem
"""
self.budget = budget
self.solution = None
self.__bounds = []
self.__curves = []
def get_curves(self):
"""
:return: Curve objects assigned to this budget
"""
return self.__curves
def get_bounds(self):
"""
:return: bounds for Curve objects assigned to this budget
"""
return self.__bounds
def add_curve(self, curve, lower=None, upper=None):
"""
Add Curve (which essentially means media) to optimization problem.
:param curve: Curve/MixedCurve instance
:param upper: Upper bound for budget
:param lower: Lower bound for budget
:return:
"""
if not lower:
lower = 1
if not upper:
upper = self.budget
self.__curves.append(curve)
self.__bounds.append([lower, upper])
@property
def fun(self):
def f(x, sign=1.0):
impact = 0
for i, curve in enumerate(self.__curves):
impact += curve(x[i])
return sign * impact
return f
def __call__(self, x):
"""
Calculate response for given spends.
:param x: int/float/numpy.ndarray/Series
:return: float64
"""
return self.fun(x)
@property
def mix(self):
if self.solution:
return self.solution.x
else:
return [0.0 for _ in self.__curves]
@property
def derivative(self):
# actual derivative sign is 1.0, but since we're optimizing for minimum,
# in solver call we pass -1.0 as argument
def f(x, sign=1.0):
return [sign * curve.derivative(x[i]) for i, curve in enumerate(self.__curves)]
return f
@property
def constraints(self):
"""
Generate callable constraints for SLSQP optimization.
:return: dict{str, callable, callable}
"""
def fun(x):
spend = sum(x)
return spend - self.budget
def jac(x):
return array([1.0 for _ in range(len(x))])
constraints = (
{
'type': 'eq',
'fun': fun,
'jac': jac
},
)
return constraints
def solve(self, disp=True, maxiter=100):
"""
Solve optimization problem for budget.
:param disp: Set to True to print convergence messages
:param maxiter: Maximum number of iterations to perform
:return: numpy.array with corresponding budgets
"""
constraints = self.constraints
derivative = self.derivative
x0 = array([bound[0] for bound in self.__bounds])
self.solution = minimize(
fun=self.fun,
x0=x0,
args=(-1.0,),
method='SLSQP',
jac=derivative,
bounds=self.__bounds,
constraints=constraints,
options={
'disp': disp,
'maxiter': maxiter
}
)
return self.solution.x
def plot(self, names=None, budget=None, ext='png'):
"""
Render all response curves to single plot. If ```notebook_mode``` is ```True```,
return matplotlib subplot, else save image to file `plot.ext`.
:param names: verbose names for plot
:param budget: max x axis for plot
:param ext: file extension if saving image to disk
:return:
"""
if budget:
x = linspace(0, budget, 1000)
else:
x = linspace(0, self.budget + int(self.budget / 100), 1000)
if names:
data = {name: curve(x) for name, curve in zip(names, self.__curves)}
else:
data = {'y {0}'.format({i + 1}): curve(x) for i, curve in enumerate(self.__curves)}
lines = DataFrame(
data=data,
index=x
) \
.plot(
kind='line',
figsize=(12, 10),
grid=True
)
if self.notebook_mode:
return lines
else:
fig = lines.get_figure()
fig.savefig("plot.".format(ext))
|
[
"yurathestorm@gmail.com"
] |
yurathestorm@gmail.com
|
77ffe1bf4264412518a3abe490c66cde3c819ab3
|
2f2079f23ce7373fe96d08cfd24577fac6f40962
|
/accounts/views.py
|
a1e39e376209acaeb346a33be8cd3bb29bbcbf4f
|
[] |
no_license
|
shinhaan/greatkart
|
f1195eb81276be3541df22c1830e89cc77a8dff1
|
bd0215de9ec98f17de7084cc94f48647a4401151
|
refs/heads/main
| 2023-06-14T10:26:20.195947
| 2021-07-11T11:09:55
| 2021-07-11T11:09:55
| 384,908,455
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,895
|
py
|
from django.shortcuts import render, redirect, get_object_or_404
from .forms import RegistrationForm, UserForm, UserProfileForm
from .models import Account, UserProfile
from orders.models import Order, OrderProduct
from django.contrib import messages, auth
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
# Verification email
from django.contrib.sites.shortcuts import get_current_site
from django.template.loader import render_to_string
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.utils.encoding import force_bytes
from django.contrib.auth.tokens import default_token_generator
from django.core.mail import EmailMessage
from carts.views import _cart_id
from carts.models import Cart, CartItem
import requests
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
first_name = form.cleaned_data['first_name']
last_name = form.cleaned_data['last_name']
phone_number = form.cleaned_data['phone_number']
email = form.cleaned_data['email']
password = form.cleaned_data['password']
username = email.split("@")[0]
user = Account.objects.create_user(first_name=first_name, last_name=last_name, email=email, username=username, password=password)
user.phone_number = phone_number
user.save()
# Create a user profile
profile = UserProfile()
profile.user_id = user.id
profile.profile_picture = 'default/default-user.png'
profile.save()
# USER ACTIVATION
current_site = get_current_site(request)
mail_subject = 'Please activate your account'
message = render_to_string('accounts/account_verification_email.html', {
'user': user,
'domain': current_site,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': default_token_generator.make_token(user),
})
to_email = email
send_email = EmailMessage(mail_subject, message, to=[to_email])
send_email.send()
# messages.success(request, 'Thank you for registering with us. We have sent you a verification email to your email address [rathan.kumar@gmail.com]. Please verify it.')
return redirect('/accounts/login/?command=verification&email='+email)
else:
form = RegistrationForm()
context = {
'form': form,
}
return render(request, 'accounts/register.html', context)
def login(request):
if request.method == 'POST':
email = request.POST['email']
password = request.POST['password']
user = auth.authenticate(email=email, password=password)
if user is not None:
try:
cart = Cart.objects.get(cart_id=_cart_id(request))
is_cart_item_exists = CartItem.objects.filter(cart=cart).exists()
if is_cart_item_exists:
cart_item = CartItem.objects.filter(cart=cart)
# Getting the product variations by cart id
product_variation = []
for item in cart_item:
variation = item.variations.all()
product_variation.append(list(variation))
# Get the cart items from the user to access his product variations
cart_item = CartItem.objects.filter(user=user)
ex_var_list = []
id = []
for item in cart_item:
existing_variation = item.variations.all()
ex_var_list.append(list(existing_variation))
id.append(item.id)
# product_variation = [1, 2, 3, 4, 6]
# ex_var_list = [4, 6, 3, 5]
for pr in product_variation:
if pr in ex_var_list:
index = ex_var_list.index(pr)
item_id = id[index]
item = CartItem.objects.get(id=item_id)
item.quantity += 1
item.user = user
item.save()
else:
cart_item = CartItem.objects.filter(cart=cart)
for item in cart_item:
item.user = user
item.save()
except:
pass
auth.login(request, user)
messages.success(request, 'You are now logged in.')
url = request.META.get('HTTP_REFERER')
try:
query = requests.utils.urlparse(url).query
# next=/cart/checkout/
params = dict(x.split('=') for x in query.split('&'))
if 'next' in params:
nextPage = params['next']
return redirect(nextPage)
except:
return redirect('dashboard')
else:
messages.error(request, 'Invalid login credentials')
return redirect('login')
return render(request, 'accounts/login.html')
@login_required(login_url = 'login')
def logout(request):
auth.logout(request)
messages.success(request, 'You are logged out.')
return redirect('login')
def activate(request, uidb64, token):
try:
uid = urlsafe_base64_decode(uidb64).decode()
user = Account._default_manager.get(pk=uid)
except(TypeError, ValueError, OverflowError, Account.DoesNotExist):
user = None
if user is not None and default_token_generator.check_token(user, token):
user.is_active = True
user.save()
messages.success(request, 'Congratulations! Your account is activated.')
return redirect('login')
else:
messages.error(request, 'Invalid activation link')
return redirect('register')
@login_required(login_url = 'login')
def dashboard(request):
orders = Order.objects.order_by('-created_at').filter(user_id=request.user.id, is_ordered=True)
orders_count = orders.count()
userprofile = UserProfile.objects.get(user_id=request.user.id)
context = {
'orders_count': orders_count,
'userprofile': userprofile,
}
return render(request, 'accounts/dashboard.html', context)
def forgotPassword(request):
if request.method == 'POST':
email = request.POST['email']
if Account.objects.filter(email=email).exists():
user = Account.objects.get(email__exact=email)
# Reset password email
current_site = get_current_site(request)
mail_subject = 'Reset Your Password'
message = render_to_string('accounts/reset_password_email.html', {
'user': user,
'domain': current_site,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'token': default_token_generator.make_token(user),
})
to_email = email
send_email = EmailMessage(mail_subject, message, to=[to_email])
send_email.send()
messages.success(request, 'Password reset email has been sent to your email address.')
return redirect('login')
else:
messages.error(request, 'Account does not exist!')
return redirect('forgotPassword')
return render(request, 'accounts/forgotPassword.html')
def resetpassword_validate(request, uidb64, token):
try:
uid = urlsafe_base64_decode(uidb64).decode()
user = Account._default_manager.get(pk=uid)
except(TypeError, ValueError, OverflowError, Account.DoesNotExist):
user = None
if user is not None and default_token_generator.check_token(user, token):
request.session['uid'] = uid
messages.success(request, 'Please reset your password')
return redirect('resetPassword')
else:
messages.error(request, 'This link has been expired!')
return redirect('login')
def resetPassword(request):
if request.method == 'POST':
password = request.POST['password']
confirm_password = request.POST['confirm_password']
if password == confirm_password:
uid = request.session.get('uid')
user = Account.objects.get(pk=uid)
user.set_password(password)
user.save()
messages.success(request, 'Password reset successful')
return redirect('login')
else:
messages.error(request, 'Password do not match!')
return redirect('resetPassword')
else:
return render(request, 'accounts/resetPassword.html')
@login_required(login_url='login')
def my_orders(request):
orders = Order.objects.filter(user=request.user, is_ordered=True).order_by('-created_at')
context = {
'orders': orders,
}
return render(request, 'accounts/my_orders.html', context)
@login_required(login_url='login')
def edit_profile(request):
userprofile = get_object_or_404(UserProfile, user=request.user)
if request.method == 'POST':
user_form = UserForm(request.POST, instance=request.user)
profile_form = UserProfileForm(request.POST, request.FILES, instance=userprofile)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request, 'Your profile has been updated.')
return redirect('edit_profile')
else:
user_form = UserForm(instance=request.user)
profile_form = UserProfileForm(instance=userprofile)
context = {
'user_form': user_form,
'profile_form': profile_form,
'userprofile': userprofile,
}
return render(request, 'accounts/edit_profile.html', context)
@login_required(login_url='login')
def change_password(request):
if request.method == 'POST':
current_password = request.POST['current_password']
new_password = request.POST['new_password']
confirm_password = request.POST['confirm_password']
user = Account.objects.get(username__exact=request.user.username)
if new_password == confirm_password:
success = user.check_password(current_password)
if success:
user.set_password(new_password)
user.save()
# auth.logout(request)
messages.success(request, 'Password updated successfully.')
return redirect('change_password')
else:
messages.error(request, 'Please enter valid current password')
return redirect('change_password')
else:
messages.error(request, 'Password does not match!')
return redirect('change_password')
return render(request, 'accounts/change_password.html')
@login_required(login_url='login')
def order_detail(request, order_id):
order_detail = OrderProduct.objects.filter(order__order_number=order_id)
order = Order.objects.get(order_number=order_id)
subtotal = 0
for i in order_detail:
subtotal += i.product_price * i.quantity
context = {
'order_detail': order_detail,
'order': order,
'subtotal': subtotal,
}
return render(request, 'accounts/order_detail.html', context)
|
[
"noreply@github.com"
] |
noreply@github.com
|
21efb74694d004e4bde09a3fbf8c6fb83384a6c6
|
e17b5454f2dd01b0e8de98579c3fff5979ef5d64
|
/company/migrations/0005_alter_company_address.py
|
1861e1355c0b0f4baac0f9ec3abae7d62a7e8940
|
[] |
no_license
|
knyshman/crm1
|
89e178bdd6f0920229468c7bd521759265f1e0e5
|
8730f2b454bcd84d3440f6e0a2bbb9d95ed98bc2
|
refs/heads/main
| 2023-07-15T03:47:56.590080
| 2021-08-22T10:16:53
| 2021-08-22T10:16:53
| 397,689,223
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
# Generated by Django 3.2.6 on 2021-08-09 07:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('company', '0004_auto_20210809_1048'),
]
operations = [
migrations.AlterField(
model_name='company',
name='address',
field=models.TextField(max_length=200, verbose_name='Адрес компании'),
),
]
|
[
"88670748+knyshman@users.noreply.github.com"
] |
88670748+knyshman@users.noreply.github.com
|
b8d68f6a9354e092d8ff50325b16555fda027803
|
b1fdbdbf52bd5c4f9c31dffa51c56f33a4f5271f
|
/chronic2.py
|
a6a172294b298c314e24be3f680aca3d3a5f4b8e
|
[] |
no_license
|
mannylopez/DemolitionsPDX
|
02fbcfb487f0f6a43e1777c15af5a6c05de3327a
|
5805b9bd80ce882c5c17be74c7b38d12ef671263
|
refs/heads/master
| 2021-01-10T19:47:23.724461
| 2015-06-11T02:47:28
| 2015-06-11T02:47:28
| 37,211,156
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,291
|
py
|
from lxml import html
import requests
page = requests.get('http://www.portlandchronicle.com/demolition-permits-issued-may-18-may-24/')
tree = html.fromstring(page.text)
mayEighteenHead = tree.xpath('//*[@id="post-2931"]/div/p[3]/strong/text()')
mayEighteenBlock = tree.xpath('//*[@id="post-2931"]/div/div[2]/p/strong/text()')
mayEighteenFoot = tree.xpath('//*[@id="post-2931"]/div/div[2]/p/strong/text()')
# mayTwentyFiveHead = tree.xpath('//*[@id="post-3022"]/div[1]/p[3]/strong/text()')
# mayTwentyFiveBlock = tree.xpath('//*[@id="post-3022"]/div[1]/div[2]/p/strong/text()')
# mayTwentyFiveFoot = tree.xpath('//*[@id="post-3022"]/div[1]/div[2]/div/p/strong/a/text()')
# print 'May 25 Head: ', mayTwentyFiveHead
# print ' : '
# print len(mayTwentyFiveHead)
# print ' : '
# print 'May 25 Block: ', mayTwentyFiveBlock
# print ' : '
# print len(mayTwentyFiveBlock)
# print ' : '
# print 'May 25 Foot: ', mayTwentyFiveFoot
# print ' : '
# print len(mayTwentyFiveFoot)
print 'May 18 Head: ', mayEighteenHead
print ' : '
print len(mayEighteenHead)
print ' : '
print 'May 18 Block: ', mayEighteenBlock
print ' : '
print len(mayEighteenBlock)
print ' : '
print 'May 18 Foot: ', mayEighteenFoot
print ' : '
print len(mayEighteenFoot)
|
[
"manuel.est.lopez@gmail.com"
] |
manuel.est.lopez@gmail.com
|
cfd9fe7c1d360679dde3f2d9087ae5b08aa89dea
|
ccf95d8916dfb1773898a96b3b35e10f276181cc
|
/Chapter4/try10541.py
|
97a6c0bd5d06c4096956b33371ad801c848a0fc6
|
[] |
no_license
|
Janet-ctrl/python
|
ad5761875b4eaa2cb1d4194cb9f41a787a1ac5ce
|
666cfe9e82aaae245d6648e2fc97e60184a99704
|
refs/heads/master
| 2022-11-06T17:08:01.151639
| 2020-06-11T20:28:43
| 2020-06-11T20:28:43
| 270,942,344
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 228
|
py
|
pizzas =['pikkant', 'meaty treat','three cheese']
for pizza in pizzas:
print(pizza.title())
for pizza in pizzas:
print(f'I enjoy the {pizza.title()} on the pizza menu.')
print(f'\nI really really really like pizza!')
|
[
"janet.eras@outlook.com"
] |
janet.eras@outlook.com
|
2831374c95558c3e2e891d0c7ba3c1ecbab6e0a2
|
372c0e129f5240db55bdca4da025f30506e977bf
|
/django/user/urls.py
|
67f316998edd3d8e0107a5ff9ad2b6ae669f57e0
|
[
"MIT"
] |
permissive
|
rem1niscence/bank-app
|
b8f38e908a2673b3e7f59d5b83d567580a5c9dc4
|
d16977a3fc8edc52c4271ee24521e543d981cd8a
|
refs/heads/master
| 2021-10-11T04:14:09.075975
| 2019-01-04T22:31:43
| 2019-01-04T22:31:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 383
|
py
|
from django.contrib.auth.views import LogoutView, LoginView
from django.urls import path
from user import views
app_name = 'user'
urlpatterns = [
path('login', LoginView.as_view(), name="login"),
path('logout', LogoutView.as_view(), name="logout"),
path('register', views.registrationFormExtended, name="register"),
path('edit', views.edit_user_info, name='edit')
]
|
[
"roniel_valdez@outlook.com"
] |
roniel_valdez@outlook.com
|
2e06ff1551d9602442f61381074c53b5775bb2e0
|
e66bbd54067184b40d82968f68aa23e0944ec9d4
|
/rand.py
|
2bb1e0e3a3576f83cc850bdf9547f1ae279ecd45
|
[] |
no_license
|
mfiels/single-server-queue-sim
|
451874f4a4e549d669d911a930c7ead59278ff63
|
0a9873ffb15135a2138ab09c3a009c5b95fe78f8
|
refs/heads/master
| 2020-06-08T03:00:44.343793
| 2013-10-29T15:30:21
| 2013-10-29T15:30:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 579
|
py
|
import math
class ExpRand(object):
def __init__(self, parameter, generator):
self.parameter = parameter
self.generator = generator
def next(self):
return -1 / self.parameter * math.log(self.generator.next())
class LCG(object):
def __init__(self, a, m, seed):
self.a = a
self.m = m
self.previous = seed
def next(self):
u = self.previous / self.m
self.previous = (self.a * self.previous) % self.m
return u
class Deterministic(object):
def __init__(self, value):
self.value = value
def next(self):
return self.value
|
[
"mfielbig@gmail.com"
] |
mfielbig@gmail.com
|
30bb87520457b778c50a45b260ec8d4f109784c7
|
2f0d81065ccfe27a73b66d82e1d938ec565cbe13
|
/111.py
|
e70c4ae2eb15d74893927282fec361268441bea0
|
[] |
no_license
|
likaon100/bootcamp_alx
|
bbdc82ac3d6f254326051e9360e89aeac33b5b02
|
5f0d1c99261ee1ef6241a506bec82ff5c1413eed
|
refs/heads/master
| 2020-03-31T10:09:44.717419
| 2018-12-17T22:55:02
| 2018-12-17T22:55:02
| 152,124,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 827
|
py
|
produkty = {
"ziemniaki": 3,
"cebula": 2,
"woda": 1.5,
"piwo": 2.5,
"zimioki": 3.5,
"browarek": 4.0,
}
koszyk = {}
while True:
print("W naszym sklepie oferujemy: ")
for produkt in produkty:
print(f" - {produkt} - w cenie: {produkty[produkt]} PLN")
print()
wybor_produktu = input("Który produkt chcesz kupić? (wpisz koniec aby zakończyć ")
if wybor_produktu == "koniec":
break
if wybor_produktu in produkty:
ile = input(f"Ile chcesz kupić [{wybor_produktu}]")
cena = int(ile) * produkty[wybor_produktu]
koszyk[wybor_produktu] = cena
else:
print(f"Zapłacisz: {cena}")
for produkt in koszyk:
print (f" - {produkt}: {koszyk[produkt]}")
print(":):):):):)")
print ("Twój rachunek")
sumarycznie = 0
for "produkty"
|
[
"luk.podlewski@gmail.com"
] |
luk.podlewski@gmail.com
|
3ac7c0c35f95782cfbb951672eedb64836413325
|
1de5f94c0296bfd38a8524647b8bd6522a2b3c3a
|
/oac_web_service/views/sparql.py
|
51f7b85a802340ad88c3e6867c2bdce613764eeb
|
[] |
no_license
|
Brown-University-Library/oac_web_service
|
dc36711318ce4eaf038d4608adb305a99a8bba6a
|
165d7bb773f5a2b53fd6b0196107ccd001f312bb
|
refs/heads/main
| 2021-07-14T08:30:07.809468
| 2021-07-06T14:34:31
| 2021-07-06T14:34:31
| 4,136,850
| 1
| 0
| null | 2021-07-06T14:34:32
| 2012-04-25T13:35:43
|
Python
|
UTF-8
|
Python
| false
| false
| 1,937
|
py
|
import traceback
from flask import request, jsonify, make_response
from oac_web_service import app
from oac_web_service.models.annotation import Annotation, AnnotationError
from java.lang import String, NullPointerException
from com.hp.hpl.jena.tdb import TDBFactory
from com.hp.hpl.jena.query import QueryParseException, QueryFactory, QueryExecutionFactory, ResultSetFormatter, ReadWrite
@app.route('/sparql', methods=['POST','GET'])
def sparql():
"""
A POST or GET method to query the Open Annotations index with a SPARQL Query
This method has READ access to the Annotations index.
Required parameters:
query: The SPARQL query to execute
"""
sparql_xml_results = ""
try:
if request.method == 'POST':
q = request.form.get('query', None)
else:
q = request.args.get('query', None)
query = QueryFactory.create(String(q))
# Start dataset READ transaction
dataset = TDBFactory.createDataset(app.config['STORE_LOCATION'])
dataset.begin(ReadWrite.READ)
try:
qexec = QueryExecutionFactory.create(query, dataset)
results = qexec.execSelect()
try:
sparql_xml_results = ResultSetFormatter.asXMLString(results)
finally:
qexec.close()
finally:
dataset.end()
except AnnotationError, ex:
return jsonify({'value' : ex.value, 'trace' : traceback.format_stack()})
except QueryParseException, ex:
return jsonify({'error' : "SPARQL query not parsable."})
except NullPointerException, ex:
return jsonify({'error' : "No SPARQL query found. Please POST a valid SPARQL query in the 'query' parameter of the request"})
except Exception, ex:
raise
else:
resp = make_response(unicode(sparql_xml_results))
resp.mimetype = 'text/xml'
return resp
|
[
"wilcox.kyle@gmail.com"
] |
wilcox.kyle@gmail.com
|
b27c4576f2f1687fb477e3b6b56692c1d6919617
|
ead7dbba151265230638a8dc609407fd07ad598c
|
/final_pro/clustering/main.py
|
e6a63dad25ae82753b4f962147df16b8a0662b6d
|
[] |
no_license
|
immrz/CityU-ML-PA
|
6a24865e0e1d151223a2c8349bc8a9ad5c8cf370
|
19bb3d6210b0ecd44b24ecb1cf093f55c964ad68
|
refs/heads/master
| 2020-04-08T18:30:47.417077
| 2019-04-11T09:10:05
| 2019-04-11T09:10:05
| 159,611,024
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,185
|
py
|
import os
import numpy as np
from ass02.part_a import kmeans, mean_shift
from final_pro import utility
from final_pro.clustering import spectral
import time
from sklearn import metrics
import argparse
import scipy.cluster.vq as vq
import multiprocessing as mp
def load_feat_and_target(folder):
feat_file = os.path.join(folder, 'MNIST_TestSet10K_DimReduced.npy')
target_file = os.path.join(folder, 'MNIST_TestSet10K_GroundTruths.npy')
features = np.load(feat_file, allow_pickle=False)
targets = np.load(target_file, allow_pickle=False)
return features, targets
def main(cmd=None):
args = parse_args(cmd=cmd)
features, targets = load_feat_and_target(args.folder)
if args.whiten:
features = vq.whiten(features)
targets = targets.astype(np.int32)
print('Number {:d} started!'.format(args.save_postfix))
num_cluster = 10
since = time.time()
if args.alg == 'kmeans':
sub_args = utility.Config(num_cluster=num_cluster, num_epoch=args.num_epoch, debug=False)
assign, _ = kmeans.receiver(sub_args, features)
elif args.alg == 'meanshift':
sub_args = utility.Config(num_epoch=args.num_epoch, debug=False, bandwidth=args.bandwidth)
assign, _ = mean_shift.receiver(sub_args, features)
elif args.alg == 'spectral':
kw = {'num_epoch': args.num_epoch}
if args.adj_type == 'knn':
kw['num_neighbor'] = args.num_neighbor if args.num_neighbor is not None \
else int(np.sqrt(features.shape[0]))
elif args.adj_type == 'epsilon':
kw['epsilon'] = args.epsilon
elif args.adj_type == 'fully':
kw['sigma'] = args.sigma
if args.norm_lap is not None:
kw['normalize'] = args.norm_lap
assign, _ = spectral.cluster(features, num_cluster=num_cluster, adj_type=args.adj_type, **kw)
else:
raise NotImplementedError
elapsed = time.time() - since
adj_rand_idx = metrics.adjusted_rand_score(labels_true=targets, labels_pred=assign)
ret_msg = '{:02d}: It takes {:.0f}h {:.0f}m {:.0f}s to finish. The Adjusted Random Index is {:.4f}'.format(
args.save_postfix, elapsed // 3600, elapsed % 3600 // 60, elapsed % 60, adj_rand_idx)
if args.save_path is not None:
try:
np.savetxt(os.path.join(args.save_path, 'res_{:02d}.txt'.format(args.save_postfix)), assign)
except:
print('Saving failed!')
return ret_msg, assign
def parse_args(cmd=None):
parser = argparse.ArgumentParser()
parser.add_argument('folder', type=str, help='The folder to find the features and ground-truths.')
parser.add_argument('alg', type=str, choices=['kmeans', 'meanshift', 'spectral'], help='The algorithm to use.')
parser.add_argument('--whiten', action='store_true', help='If set, normalize the features first.')
parser.add_argument('--num-epoch', default=100, type=int, help='The maximum iterations.')
parser.add_argument('--bandwidth', default=0.1, type=float, help='The bandwidth of the kernels for meanshift.')
parser.add_argument('--adj-type', default='knn', choices=['knn', 'epsilon', 'fully'], type=str,
help='The method to compute the adjacency matrix for Spectral Clustering.')
parser.add_argument('--num-neighbor', default=None, type=int, help='The threshold of the knn adjacency.')
parser.add_argument('--epsilon', default=0.1, type=float, help='The threshold of the epsilon adjacency.')
parser.add_argument('--sigma', default=1., type=float, help='The threshold of the fully adjacency.')
parser.add_argument('--norm-lap', default=None, choices=['sym', 'rw', None], type=str,
help='If not None, specify which kind of normalized Laplacian to use.')
parser.add_argument('--save-path', default=None, type=str, help='The folder to save results.')
parser.add_argument('--save-postfix', type=int, help='The postfix of the saved result file.')
if cmd is None:
args = parser.parse_args()
else:
args = parser.parse_args(cmd.split())
return args
def multi_args():
choices = ['../ spectral --adj-type knn --num-neighbor 50 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 60 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 70 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 80 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 90 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 100 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 110 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 120 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 130 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 140 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 150 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 160 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 170 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 180 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 190 --norm-lap sym',
'../ spectral --adj-type knn --num-neighbor 200 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 8 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 9 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 10 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 11 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 12 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 13 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 14 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 15 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 16 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 17 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 18 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 19 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 20 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 21 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 22 --norm-lap sym',
'../ spectral --adj-type epsilon --epsilon 23 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.1 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.2 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.3 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.4 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.5 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.6 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.7 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.8 --norm-lap sym',
'../ spectral --adj-type fully --sigma 0.9 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.0 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.1 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.2 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.3 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.4 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.5 --norm-lap sym',
'../ spectral --adj-type fully --sigma 1.6 --norm-lap sym']
choices = list([c + ' --save-path ../results --save-postfix {:d}'.format(i) for i, c in enumerate(choices)])
pool = mp.Pool(processes=48)
multi_res = pool.map(main, choices)
pool.close()
pool.join()
for msg, _ in multi_res:
print(msg)
if __name__ == '__main__':
# main()
multi_args()
|
[
"runzemao2@ad.cityu.edu.hk"
] |
runzemao2@ad.cityu.edu.hk
|
78d31fe9f28ab2f74a52fd05337904956f1874d5
|
cff557ebdc39d7f71228bdb3f52c86888b4bf64f
|
/app7 - Data from web/realEstate.py
|
2468c011970a3d8ff8534ba58e868dc9f22dd1ed
|
[] |
no_license
|
masih68kh/Python-Projects
|
e2a388678db1d4d4b8e88b93f1d565b8fb7d8c00
|
7ab4fa504202f15706225fa4ce9970fffdbb51e4
|
refs/heads/master
| 2020-07-18T00:04:35.514791
| 2019-09-19T21:32:38
| 2019-09-19T21:32:38
| 206,130,304
| 0
| 1
| null | 2019-11-25T06:26:03
| 2019-09-03T17:08:29
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,925
|
py
|
import requests
from bs4 import BeautifulSoup
import pandas as pd
def number_extractor_from_string(strg):
'''
accepts <strg> : string, that has \n and space char in it
return the int value of the price that comes after $
'''
number = ''
for char in strg:
if char != '\n' and char != ' ':
number += char
return number
r = requests.get("http://www.pyclass.com/real-estate/rock-springs-wy/LCWYROCKSPRINGS/",
headers={'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'})
soup = BeautifulSoup(r.content)
alll= soup.find_all("div", {"class":"propertyRow"}) # len = 10
price_ls = []
for item in alll:
price_ls.append( item.find("h4", {"class":"propPrice"}).text )
price_ls = list(map(number_extractor_from_string, price_ls))
addr_ls = []
for item in alll:
addr_ls.append( item.find_all("span", {"class":"propAddressCollapse"})[0].text +', '+
item.find_all("span", {"class":"propAddressCollapse"})[1].text )
bed_ls = []
sqft_ls = []
for item in alll:
if item.find("span", {"class":"infoBed"}) != None:
bed_ls.append( item.find("span", {"class":"infoBed"}).text[0])
else:
bed_ls.append(None)
if item.find("span", {"class":"infoSqFt"}) != None:
sqft_ls.append( item.find("span", {"class":"infoSqFt"}).text.replace("Sq. Ft", ""))
else:
sqft_ls.append(None)
bath_ls = []
for item in alll:
Tag = item.find("div", {"class":"infoLine2"}).find("span", {"class":"infoValueFullBath"})
if Tag != None:
bath_ls.append(Tag.text[0])
else:
bath_ls.append(None)
pd.DataFrame({"Address": addr_ls,
"Price" : price_ls,
"Sq Ft" : sqft_ls,
"Number of Bedrooms" : bed_ls,
"Number of Bathrooms" : bath_ls,
}).to_csv("data.csv")
|
[
"masihkhodabandeh@Masihs-MacBook-Pro.local"
] |
masihkhodabandeh@Masihs-MacBook-Pro.local
|
611aafa0d2bc9469f0e2adc62ea0a5eca3ae4e61
|
45b799fbce7b2475a122859f78861df8935a464e
|
/SendMail.py
|
2ef1252772c985d53a9c24fd652fd4d688f649d8
|
[
"MIT"
] |
permissive
|
patrickjmccauley/Stock_Data_HBF
|
b1b389ee5e2c9ff469c838b6b8e6a7f343f51613
|
96649f0f838864fcecba2dc714a3c0851c01cec0
|
refs/heads/master
| 2023-04-11T08:03:46.947073
| 2021-04-23T15:56:27
| 2021-04-23T15:56:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 825
|
py
|
import smtplib, ssl
SMTP_SSL_PORT = 465
EMAIL_ADDRESS = "patrickjmccauley.dev@gmail.com"
def send_mail(subject, message, to_address=EMAIL_ADDRESS):
""" Simple wrapper to send email. Intent is to be used on script failure
"""
# Init values, consume credential
port = SMTP_SSL_PORT
f = open("cred.pickle")
password = f.read()
f.close()
# Create a secure SSL context
context = ssl.create_default_context()
# Send the mail
with smtplib.SMTP_SSL("smtp.gmail.com", port, context=context) as server:
server.login(EMAIL_ADDRESS, password)
msg = "Subject: {}\n\n{}".format(subject, message)
server.sendmail(EMAIL_ADDRESS, to_address, msg)
def main():
# Testing the code
send_mail("Testing email send", "Testing")
if __name__ == "__main__":
main()
|
[
"pmccau@seas.upenn.edu"
] |
pmccau@seas.upenn.edu
|
c7b2d0f1eb9fca968cf444a988542cce3378ac43
|
128ca08737d72d78d33530e4150fb406bbca78e2
|
/upwork/utils.py
|
588a218c0f4fe983cf21887c9a9acbb9e8ae05b5
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
frolenkov-nikita/python-upwork
|
ef25828285caff8f310ac86cf9d6de414cb67937
|
d052f5caedc632c73ad770b1f822a8a494f6b34b
|
refs/heads/master
| 2023-06-01T09:28:48.092216
| 2021-07-01T16:07:10
| 2021-07-01T16:07:10
| 208,970,493
| 1
| 0
|
NOASSERTION
| 2019-09-17T05:57:01
| 2019-09-17T05:57:00
| null |
UTF-8
|
Python
| false
| false
| 4,989
|
py
|
# Python bindings to Upwork API
# python-upwork version 0.5
# (C) 2010-2015 Upwork
from datetime import date
from upwork.exceptions import ApiValueError
def assert_parameter(parameter_name, value, options_list):
"""Raise an exception if parameter's value not in options list."""
if value not in options_list:
raise ApiValueError(
"Incorrect value for {0}: '{1}', "
"valid values are {2}".format(
parameter_name, value, options_list))
def decimal_default(obj):
"""JSON serialization of Decimal.
*Usage:*
``json.dumps(data, default=decimal_default)``
Converts decimal to string.
"""
if obj.__class__.__name__ == 'Decimal':
return str(obj)
raise TypeError
class Q(object):
"""Simple GDS query constructor.
Used to costruct :py:class:`upwork.utils.Query`.
"""
def __init__(self, arg1, operator=None, arg2=None):
self.arg1 = arg1
self.operator = operator
self.arg2 = arg2
def __and__(self, other):
return self.__class__(self, 'AND', other)
def __or__(self, other):
return self.__class__(self, 'OR', other)
def __eq__(self, other):
return self.__class__(self, '=', other)
def __lt__(self, other):
return self.__class__(self, '<', other)
def __le__(self, other):
return self.__class__(self, '<=', other)
def __gt__(self, other):
return self.__class__(self, '>', other)
def __ge__(self, other):
return self.__class__(self, '>=', other)
def arg_to_string(self, arg):
if isinstance(arg, self.__class__):
if arg.operator:
return '({0})'.format(arg)
else:
return arg
elif isinstance(arg, str):
return "'{0}'".format(arg)
elif isinstance(arg, date):
return "'{0}'".format(arg.isoformat())
else:
return str(arg)
def __str__(self):
if self.operator:
str1 = self.arg_to_string(self.arg1)
str2 = self.arg_to_string(self.arg2)
return '{0} {1} {2}'.format(str1, self.operator, str2)
else:
return self.arg1
class Query(object):
"""Simple GDS query.
*Example:*::
client.timereport.get_provider_report('user1',
upwork.utils.Query(select=upwork.utils.Query.DEFAULT_TIMEREPORT_FIELDS,
where=(upwork.utils.Q('worked_on') <= date.today()) &
(upwork.utils.Q('worked_on') > '2010-05-01')))
"""
DEFAULT_TIMEREPORT_FIELDS = ['worked_on',
'team_id',
'team_name',
'task',
'memo',
'hours']
DEFAULT_FINREPORT_FIELDS = ['reference',
'date',
'buyer_company__id',
'buyer_company_name',
'buyer_team__id',
'buyer_team_name',
'provider_company__id',
'provider_company_name',
'provider_team__id',
'provider_team_name',
'provider__id',
'provider_name',
'type',
'subtype',
'amount']
def __init__(self, select, where=None, order_by=None):
self.select = select
self.where = where
self.order_by = order_by
def __str__(self):
select = self.select
select_str = 'SELECT ' + ', '.join(select)
where_str = ''
if self.where:
where_str = ' WHERE {0}'.format(self.where)
order_by_str = ''
if self.order_by:
order_by_str = ' ORDER BY ' + ','.join(self.order_by)
return ''.join([select_str, where_str, order_by_str])
class Table(object):
"""
A helper class to access cryptic GDS response as a list of dictionaries.
"""
def __init__(self, data):
self._cols = data['cols'] # Original data
self._rows = data['rows']
self.cols = [col['label'] for col in data['cols']]
self.rows = []
if data['rows']:
if data['rows'][0] != '': # Empty response
for row in [row['c'] for row in data['rows']]:
self.rows.append([cell['v'] for cell in row])
def __getitem__(self, key):
if not isinstance(key, (slice, int)):
raise TypeError
if isinstance(key, slice):
return [dict(zip(self.cols, row)) for row in self.rows[key]]
else:
return dict(zip(self.cols, self.rows[key]))
def __len__(self):
return len(self.rows)
|
[
"mnovozhilov@upwork.com"
] |
mnovozhilov@upwork.com
|
b13fd0bd13938b71d53b4ba7187e81aa986fa5b7
|
379f65890dbdcc773f98ecba5f77a9f5ac7b3cde
|
/temp/SVM_Classifier.py
|
ead9e90c12749e9814ac4cd4d2bd264b18c15973
|
[] |
no_license
|
SiriusExplorer/LiuZhen_Project
|
9ed3983f652bb002e3b9dc7aa4ae578920d3fde4
|
4f8d8e1a2e99cbdba74fdf41417a7a6fe3619cd6
|
refs/heads/master
| 2021-09-03T18:05:37.377363
| 2018-01-10T23:56:25
| 2018-01-10T23:56:25
| 108,834,956
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 578
|
py
|
import numpy as np
from sklearn.svm import SVC
vector = np.load('./sieved_dataset_acc_pssm/trainset/right_pssm/n1/vector_np_lam39.npy')
label = np.load('./sieved_dataset_acc_pssm/trainset/label_digit_nucleus01_np.npy')
clf = SVC()
clf.fit(vector, label)
test_vector = np.load('./sieved_dataset_acc_pssm/testset/right_pssm/n1/vector_np_lam39.npy')
predict_label = clf.predict(test_vector)
test_label = np.load('./sieved_dataset_acc_pssm/testset/label_digit_nucleus01_np.npy')
accuracy = np.sum(predict_label == test_label) / len(test_label)
print(predict_label)
print(accuracy)
|
[
"noreply@github.com"
] |
noreply@github.com
|
82680248823c227421e846392ab57040f5801f30
|
35083763215f56821cd26366b47eda9776e6013f
|
/python/prime.py
|
66f8c0323230651a757fe9216ff3ee043b33805b
|
[] |
no_license
|
tushargayan2324/2019
|
95f5f39b99a27584087eafdd8688c0b65aa8583f
|
f4d065c8ae4123f7c195040590917e177b775d46
|
refs/heads/master
| 2021-07-19T03:30:56.896731
| 2020-09-08T05:46:25
| 2020-09-08T05:46:25
| 211,306,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 345
|
py
|
#!/usr/bin/python
lower = input("Lower Bound ")
upper = input("Upper Bound ")
print("Prime numbers between",lower,"and",upper,"are:")
for num in range(lower,upper + 1):
# prime numbers are greater than 1
if num > 1:
for i in range(2,num):
if (num % i) == 0:
break
else:
print(num)
|
[
"noreply@github.com"
] |
noreply@github.com
|
b06eb50e6c329475c873b1a0734e44d1e58e64c5
|
38576a9ac324d6b8d870f812ea6e6589c45c0386
|
/maps/oldstuff/ai/fleet.py
|
e1fe28199c46bacc460a521bcf6a8189b644f939
|
[] |
no_license
|
arsatiki/star-admiral
|
8aa19210d6763230ce8e81a73130b9395e53041a
|
8d5d66dcf79484528ff3b6c2472f357044424722
|
refs/heads/master
| 2021-01-01T15:30:27.066128
| 2009-03-02T20:01:35
| 2009-03-02T20:01:35
| 96,899
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,216
|
py
|
from future import division
from math import radians
import cmath
class Ship(object):
"""Ship(mass=100, speed=5, attack=20, hitpoint_mul = 2, pos = 0)"""
def __init__(self, mass=100, speed=5, attack=20, hitpoint_mul = 2, pos = 0j):
super(Ship, self).__init__()
self.mass = mass
self.inertia = mass/speed
self.attack = 20
self.__hitpoints = hitpoint_mul * mass
self.heading = 1
self.position = pos
self.alive = True
def set_hp(self, val):
self.__hitpoints == val
if self.__hitpoints < 0:
print self, "died"
self.alive = False
def get_hp(self):
return self.__hitpoints
hitpoints = property(get_hp, set_hp, None, "Ship hitpoints")
def fire(self, target):
distance = abs(self.pos - target.pos)
damage = attack / distance**2
self.hitpoints -= damage
def advance(self):
movement = self.heading / self.inertia
self.pos += movement
def turn(self, direction):
angle = radians(direction * 45)
self.heading *= cmath.exp(complex(0, angle))
|
[
"ars@iki.fi"
] |
ars@iki.fi
|
0116aadc905532d21ffd01fd445c5a00907e42da
|
d692e868819b7eaf2f69017056b8d3f46b8f265b
|
/python/stopwatch.py
|
65b41a1794b69d46f6b1ba767e47f0a46cfb3bd3
|
[] |
no_license
|
ksen0/submission
|
bee665b68aa6d6930224578aa2de5a428da866dc
|
ffef4e3fd3ec75eebed98dc661ef865e26398b6e
|
refs/heads/master
| 2021-06-16T22:04:20.954064
| 2017-04-24T13:56:20
| 2017-04-24T14:20:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 870
|
py
|
import time
class Stopwatch:
def __init__(self):
self.counts = {}
self.times = {}
self.reset()
def reset(self):
self.clock = time.time()
def lap(self, task, verbose = 0):
elapsed = time.time() - self.clock
self.reset()
self.counts[task] = self.counts.get(task, 0) + 1
self.times[task] = self.times.get(task, 0) + elapsed
if verbose>0 and self.counts[task]%verbose == 0:
print(task + " (iter. "+str(self.counts[task])+") took " + str(elapsed) + " seconds.")
def __str__(self):
out = ""
for task, count in self.counts.items():
avg = self.times[task] / count
out = out + ("\n" if len(out) > 0 else "") + str(avg)
out = out + "," + str(count) + "," + str(self.times[task]) + "," + task
return out
|
[
"cscw1306@users.noreply.github.com"
] |
cscw1306@users.noreply.github.com
|
70429f73bbca6c8c28bbffeb622ee490018c69d8
|
95f21bdadb48a25321f76980ba72887255033343
|
/torch/fx/graph.py
|
0975962ff45a803dd2244f6ce861e2ca16d18f36
|
[
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
chen-chentao/pytorch
|
e4f5215270aeb25c61ec700c4e142962ac1e56de
|
caa377f546306f8885fba1df230ae4db91dea2a4
|
refs/heads/master
| 2023-03-08T06:26:15.628237
| 2021-02-24T00:29:34
| 2021-02-24T00:32:21
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 39,433
|
py
|
from .node import Node, Argument, Target, map_arg, _type_repr, _get_qualified_name
from typing import Callable, Any, List, Dict, NamedTuple, Optional, Tuple, Set, FrozenSet
from dataclasses import dataclass
from contextlib import contextmanager
import torch
import keyword
import re
import builtins
import math
# Mapping of builtins to their `typing` equivalent.
_origin_type_map = {
list: List,
dict: Dict,
set: Set,
frozenset: FrozenSet,
tuple: Tuple,
}
class _CustomBuiltin(NamedTuple):
"""Additional objs that we add to every graph's globals.
The repr() for some standard library objects is not valid Python code without
an import. For common objects of this sort, we bundle them in the globals of
every FX graph.
"""
# How to import this object from the standard library.
import_str: str
# The actual object, produced from that import string.
obj: Any
_custom_builtins: Dict[str, _CustomBuiltin] = {}
def _register_custom_builtin(name: str, import_str: str, obj: Any):
_custom_builtins[name] = _CustomBuiltin(import_str, obj)
_register_custom_builtin('inf', 'from math import inf', math.inf)
_register_custom_builtin('nan', 'from math import nan', math.nan)
_register_custom_builtin('NoneType', 'NoneType = type(None)', type(None))
_register_custom_builtin('torch', 'import torch', torch)
def _is_magic(x: str) -> bool:
return x.startswith('__') and x.endswith('__')
def _snake_case(s: str) -> str:
"""
Transforms the given string ``s`` to a Python-style variable name
Examples:
``mod.snake_case`` -> ``mod.snake_case``
``mod.pascalCase``-> ``mod.pascal_case``
``mod.ALL_CAPS`` -> ``mod.all_caps``
"""
chars = []
prev_lower = False
for c in s:
if prev_lower and c.isupper():
chars.append('_')
chars.append(c.lower())
prev_lower = c.islower()
return ''.join(chars)
def _is_from_torch(obj: Any) -> bool:
module_name = getattr(obj, '__module__', None)
if module_name is not None:
base_module = module_name.partition('.')[0]
return base_module == 'torch'
name = getattr(obj, '__name__', None)
# exclude torch because torch.torch.torch.torch works. idk mang
if name is not None and name != 'torch':
for guess in [torch, torch.nn.functional]:
if getattr(guess, name, None) is obj:
return True
return False
class _Namespace:
"""A context for associating names uniquely with objects.
The following invariants are enforced:
- Each object gets a single name.
- Each name is unique within a given namespace.
- Names generated do not shadow builtins, unless the object is indeed that builtin.
"""
def __init__(self):
self._obj_to_name: Dict[Any, str] = {}
self._unassociated_names = set()
self._used_names: Dict[str, int] = {}
def create_name(self, candidate: str, obj: Optional[Any]) -> str:
"""Create a unique name.
Arguments:
candidate: used as the basis for the unique name, relevant to the user.
obj: If not None, an object that will be associated with the unique name.
"""
if obj is not None and obj in self._obj_to_name:
return self._obj_to_name[obj]
# delete all characters that are illegal in a Python identifier
candidate = re.sub('[^0-9a-zA-Z_]+', '_', candidate)
if candidate[0].isdigit():
candidate = f'_{candidate}'
while candidate in self._used_names or self._is_illegal_name(candidate, obj):
match = re.match(r"(.*)_(\d+)$", candidate)
if match is None:
candidate = candidate + '_1'
else:
base, num = match.group(1, 2)
candidate = f'{base}_{int(num) + 1}'
self._used_names.setdefault(candidate)
if obj is None:
self._unassociated_names.add(candidate)
else:
self._obj_to_name[obj] = candidate
return candidate
def associate_name_with_obj(self, name: str, obj: Any):
"""Associate a unique name with an object.
Neither `name` nor `obj` should be associated already.
"""
assert obj not in self._obj_to_name
assert name in self._unassociated_names
self._obj_to_name[obj] = name
self._unassociated_names.remove(name)
def _is_illegal_name(self, name: str, obj: Any) -> bool:
# 1. keywords are never allowed as names.
if name in keyword.kwlist:
return True
# 2. Can't shadow a builtin name, unless you *are* that builtin.
if name in builtins.__dict__:
return obj is not builtins.__dict__[name]
# 3. Can't shadow our custom builtins either
if name in _custom_builtins:
return obj is not _custom_builtins[name].obj
return False
@dataclass
class PythonCode:
"""Represents all the information necessary to exec or save a graph as Python code."""
# Python source code for the forward function definition.
src: str
# Values in global scope during exection of `src_def`.
globals: Dict[str, Any]
def _format_args(args: Tuple[Argument, ...], kwargs: Dict[str, Argument]) -> str:
args_s = ', '.join(repr(a) for a in args)
kwargs_s = ', '.join(f'{k} = {repr(v)}' for k, v in kwargs.items())
if args_s and kwargs_s:
return f'{args_s}, {kwargs_s}'
return args_s or kwargs_s
def _format_target(base: str, target: str) -> str:
elems = target.split('.')
r = base
for e in elems:
if not e.isidentifier():
r = f'getattr({r}, "{e}")'
else:
r = f'{r}.{e}'
return r
class _InsertPoint:
def __init__(self, graph, new_insert):
self.graph = graph
self.orig_insert, graph._insert = graph._insert, new_insert
def __enter__(self):
pass
def __exit__(self, type, value, tb):
self.graph._insert = self.orig_insert
class _node_list:
def __init__(self, graph: 'Graph', direction: str = '_next'):
assert direction in ['_next', '_prev']
self.graph = graph
self.direction = direction
def __len__(self):
return self.graph._len
def __iter__(self):
root, direction = self.graph._root, self.direction
cur = getattr(root, direction)
while cur is not root:
if not cur._erased:
yield cur
cur = getattr(cur, direction)
def __reversed__(self):
return _node_list(self.graph, '_next' if self.direction == '_prev' else '_prev')
class Graph:
"""
``Graph`` is the main data structure used in the FX Intermediate Representation.
It consists of a series of ``Node`` s, each representing callsites (or other
syntactic constructs). The list of ``Node`` s, taken together, constitute a
valid Python function.
For example, the following code
.. code-block:: python
import torch
import torch.fx
class MyModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.param = torch.nn.Parameter(torch.rand(3, 4))
self.linear = torch.nn.Linear(4, 5)
def forward(self, x):
return torch.topk(torch.sum(self.linear(x + self.linear.weight).relu(), dim=-1), 3)
m = MyModule()
gm = torch.fx.symbolic_trace(m)
Will produce the following Graph::
print(gm.graph)
.. code-block:: text
graph(x):
%linear_weight : [#users=1] = self.linear.weight
%add_1 : [#users=1] = call_function[target=operator.add](args = (%x, %linear_weight), kwargs = {})
%linear_1 : [#users=1] = call_module[target=linear](args = (%add_1,), kwargs = {})
%relu_1 : [#users=1] = call_method[target=relu](args = (%linear_1,), kwargs = {})
%sum_1 : [#users=1] = call_function[target=torch.sum](args = (%relu_1,), kwargs = {dim: -1})
%topk_1 : [#users=1] = call_function[target=torch.topk](args = (%sum_1, 3), kwargs = {})
return topk_1
For the semantics of operations represented in the ``Graph``, please see :class:`Node`.
"""
def __init__(self):
"""
Construct an empty Graph.
"""
self._root : Node = Node(self, '', 'root', '', (), {})
self._used_names : Dict[str, int] = {} # base name -> number
self._insert = self._root.prepend
self._len = 0
self._graph_namespace = _Namespace()
@property
def nodes(self) -> _node_list:
"""
Get the list of Nodes that constitute this Graph.
Note that this ``Node`` list representation is a doubly-linked list. Mutations
during iteration (e.g. delete a Node, add a Node) are safe.
Returns:
A doubly-linked list of Nodes. Note that ``reversed`` can be called on
this list to switch iteration order.
"""
return _node_list(self)
def graph_copy(self, g : 'Graph', val_map : Dict[Node, Node]) -> 'Optional[Argument]':
"""
Copy all nodes from a given graph into ``self``.
Args:
g (Graph): The source graph from which to copy Nodes.
val_map (Dict[Node, Node]): a dictionary that will be populated with a mapping
from nodes in ``g`` to nodes in ``self``. Note that ``val_map`` can be passed
in with values in it already to override copying of certain values.
Returns:
The value in ``self`` that is now equivalent to the output value in ``g``,
if ``g`` had an ``output`` node. ``None`` otherwise.
"""
for node in g.nodes:
if node in val_map:
continue
if node.op == 'output':
rv = map_arg(node.args[0], lambda n: val_map[n])
return rv
val_map[node] = self.node_copy(node, lambda n : val_map[n])
return None
def __deepcopy__(self, memo=None) -> 'Graph':
"""
Explicitly implement __deepcopy__ to prevent excessive recursion depth
from the default implementation. This uses graph_copy to copy the nodes
in an iterative way, rather than recursive. It also populates the
memoization table to prevent unnecessary copies (e.g. references to
nodes or other parts of the Graph from a custom GraphModule implementation
"""
memo = memo if memo else {}
g = Graph()
output_val = g.graph_copy(self, val_map=memo)
g.output(output_val)
return g
def create_node(self, op: str, target: 'Target',
args: Optional[Tuple['Argument', ...]] = None,
kwargs: Optional[Dict[str, 'Argument']] = None,
name: Optional[str] = None,
type_expr: Optional[Any] = None) -> Node:
"""
Create a ``Node`` and add it to the ``Graph`` at the current insert-point.
Note that the current insert-point can be set via :meth:`Graph.inserting_before`
and :meth:`Graph.inserting_after`.
Args:
op (str): the opcode for this Node. One of 'call_function', 'call_method', 'get_attr',
'call_module', 'placeholder', or 'output'. The semantics of these opcodes are
described in the ``Graph`` docstring.
args (Optional[Tuple[Argument, ...]]): is a tuple of arguments to this node.
kwargs (Optional[Dict[str, Argument]]): the kwargs of this Node
name (Optional[str]): an optional string name for the ``Node``.
This will influence the name of the value assigned to in the
Python generated code.
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
Returns:
The newly-created and inserted node.
"""
assert op in ('call_function', 'call_method', 'get_attr', 'call_module', 'placeholder', 'output')
args = () if args is None else args
kwargs = {} if kwargs is None else kwargs
assert isinstance(args, tuple), "args must be a tuple"
assert isinstance(kwargs, dict), "kwargs must be a dict"
candidate = name if name is not None else self._target_to_str(target)
name = self._graph_namespace.create_name(candidate, None)
n = Node(self, name, op, target, args, kwargs, type_expr)
self._graph_namespace.associate_name_with_obj(name, n)
self._insert(n)
self._len += 1
return n
def erase_node(self, to_erase : Node) -> None:
"""
Erases a ``Node`` from the ``Graph``. Throws an exception if
there are still users of that node in the ``Graph``.
Args:
to_erase (Node): The ``Node`` to erase from the ``Graph``.
"""
if len(to_erase.users) > 0:
raise RuntimeError(f'Tried to erase Node {to_erase} but it still had {len(to_erase.users)} '
f'users in the graph: {to_erase.users}!')
to_erase._remove_from_list()
to_erase._erased = True # iterators may retain handles to erased nodes
self._len -= 1
# Null out this Node's argument nodes so that the Nodes referred to
# can update their ``users`` accordingly
new_args = map_arg(to_erase.args, lambda n: None)
assert isinstance(new_args, tuple)
to_erase.args = new_args
new_kwargs = map_arg(to_erase.kwargs, lambda n: None)
assert isinstance(new_kwargs, dict)
to_erase.kwargs = new_kwargs
def inserting_before(self, n: Optional[Node] = None):
"""Set the point at which create_node and companion methods will insert into the graph.
When used within a 'with' statement, this will temporary set the insert point and
then restore it when the with statement exits::
with g.inserting_before(n):
... # inserting before node n
... # insert point restored to what it was previously
g.inserting_before(n) # set the insert point permanently
Args:
n (Optional[Node]): The node before which to insert. If None this will insert before
the beginning of the entire graph.
Returns:
A resource manager that will restore the insert point on ``__exit__``.
"""
if n is None:
return self.inserting_after(self._root)
assert n.graph == self, "Node to insert before is not in graph."
return _InsertPoint(self, n.prepend)
def inserting_after(self, n: Optional[Node] = None):
"""Set the point at which create_node and companion methods will insert into the graph.
When used within a 'with' statement, this will temporary set the insert point and
then restore it when the with statement exits::
with g.inserting_after(n):
... # inserting after node n
... # insert point restored to what it was previously
g.inserting_after(n) # set the insert point permanently
Args:
n (Optional[Node]): The node before which to insert. If None this will insert after
the beginning of the entire graph.
Returns:
A resource manager that will restore the insert point on ``__exit__``.
"""
if n is None:
return self.inserting_before(self._root)
assert n.graph == self, "Node to insert after is not in graph."
return _InsertPoint(self, n.append)
# sugar for create_node when you know the op
def placeholder(self, name: str, type_expr: Optional[Any] = None) -> Node:
"""
Insert a ``placeholder`` node into the Graph. A ``placeholder`` represents
a function input.
Args:
name (str): A name for the input value. This corresponds to the name
of the positional argument to the function this ``Graph`` represents.
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have. This is needed in some
cases for proper code generation (e.g. when the function is used
subsequently in TorchScript compilation).
.. note::
The same insertion point and type expression rules apply for this method
as ``Graph.create_node``.
"""
return self.create_node('placeholder', name, type_expr=type_expr)
def get_attr(self, qualified_name: str, type_expr: Optional[Any] = None) -> Node:
"""
Insert a ``get_attr`` node into the Graph. A ``get_attr`` ``Node`` represents the
fetch of an attribute from the ``Module`` hierarchy.
Args:
qualified_name (str): the fully-qualified name of the attribute to be retrieved.
For example, if the traced Module has a submodule named ``foo``, which has a
submodule named ``bar``, which has an attribute named ``baz``, the qualified
name ``foo.bar.baz`` should be passed as ``qualified_name``.
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
Returns:
The newly-created and inserted ``get_attr`` node.
.. note::
The same insertion point and type expression rules apply for this method
as ``Graph.create_node``.
"""
return self.create_node('get_attr', qualified_name, type_expr=type_expr)
def call_module(self,
module_name: str,
args: Optional[Tuple['Argument', ...]] = None,
kwargs: Optional[Dict[str, 'Argument']] = None,
type_expr: Optional[Any] = None) -> Node:
"""
Insert a ``call_module`` ``Node`` into the ``Graph``. A ``call_module`` node
represents a call to the forward() function of a ``Module`` in the ``Module``
hierarchy.
Args:
module_name (str): The qualified name of the ``Module`` in the ``Module``
hierarchy to be called. For example, if the traced ``Module`` has a
submodule named ``foo``, which has a submodule named ``bar``, the
qualified name ``foo.bar`` should be passed as ``module_name`` to
call that module.
args (Optional[Tuple[Argument, ...]]): The positional arguments to be passed
to the called method. Note that this should *not* include a ``self`` argument.
kwargs (Optional[Dict[str, Argument]]): The keyword arguments to be passed
to the called method
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
Returns:
The newly-created and inserted ``call_module`` node.
.. note::
The same insertion point and type expression rules apply for this method
as :meth:`Graph.create_node`.
"""
return self.create_node('call_module', module_name, args, kwargs, type_expr=type_expr)
def call_method(self,
method_name: str,
args: Optional[Tuple['Argument', ...]] = None,
kwargs: Optional[Dict[str, 'Argument']] = None,
type_expr: Optional[Any] = None) -> Node:
"""
Insert a ``call_method`` ``Node`` into the ``Graph``. A ``call_method`` node
represents a call to a given method on the 0th element of ``args``.
Args:
method_name (str): The name of the method to apply to the self argument.
For example, if args[0] is a ``Node`` representing a ``Tensor``,
then to call ``relu()`` on that ``Tensor``, pass ``relu`` to ``method_name``.
args (Optional[Tuple[Argument, ...]]): The positional arguments to be passed
to the called method. Note that this *should* include a ``self`` argument.
kwargs (Optional[Dict[str, Argument]]): The keyword arguments to be passed
to the called method
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
Returns:
The newly created and inserted ``call_method`` node.
.. note::
The same insertion point and type expression rules apply for this method
as :meth:`Graph.create_node`.
"""
return self.create_node('call_method', method_name, args, kwargs, type_expr=type_expr)
def call_function(self,
the_function: Callable[..., Any],
args: Optional[Tuple['Argument', ...]] = None,
kwargs: Optional[Dict[str, 'Argument']] = None,
type_expr: Optional[Any] = None) -> Node:
"""
Insert a ``call_function`` ``Node`` into the ``Graph``. A ``call_function`` node
represents a call to a Python callable, specified by ``the_function``. ``the_function``
can be
Args:
the_function (Callable[..., Any]): The function to be called. Can be any PyTorch
operator, Python function, or member of the ``builtins`` or ``operator``
namespaces.
args (Optional[Tuple[Argument, ...]]): The positional arguments to be passed
to the called function.
kwargs (Optional[Dict[str, Argument]]): The keyword arguments to be passed
to the called function
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
Returns
The newly created and inserted ``call_function`` node.
.. note::
The same insertion point and type expression rules apply for this method
as :meth:`Graph.create_node`.
"""
return self.create_node('call_function', the_function, args, kwargs, type_expr=type_expr)
def node_copy(self, node: Node, arg_transform: Callable[[Node], 'Argument'] = lambda x: x) -> Node:
"""
Copy a node from one graph into another. ``arg_transform`` needs to transform arguments from
the graph of node to the graph of self. Example::
# Copying all the nodes in `g` into `new_graph`
g : torch.fx.Graph = ...
new_graph = torch.fx.graph()
value_remap = {}
for node in g.nodes:
value_remap[node] = new_graph.node_copy(node, lambda n : value_remap[n])
Args:
node (Node): The node to copy into ``self``.
arg_transform (Callable[[Node], Argument]): A function that transforms
``Node`` arguments in node's ``args`` and ``kwargs`` into the
equivalent argument in ``self``. In the simplest case, this should
retrieve a value out of a table mapping Nodes in the original
graph to ``self``.
"""
args = map_arg(node.args, arg_transform)
kwargs = map_arg(node.kwargs, arg_transform)
assert isinstance(args, tuple)
assert isinstance(kwargs, dict)
return self.create_node(node.op, node.target, args, kwargs, node.name, node.type)
def output(self, result: 'Argument', type_expr: Optional[Any] = None):
"""
Insert an ``output`` ``Node`` into the ``Graph``. An ``output`` node represents
a ``return`` statement in Python code. ``result`` is the value that should
be returned.
Args:
result (Argument): The value to be returned.
type_expr (Optional[Any]): an optional type annotation representing the
Python type the output of this node will have.
.. note::
The same insertion point and type expression rules apply for this method
as ``Graph.create_node``.
"""
return self.create_node(op='output', target='output', args=(result,), type_expr=type_expr)
def _target_to_str(self, target : Target) -> str:
if callable(target):
op = target.__name__
else:
assert isinstance(target, str)
op = target
if _is_magic(op):
op = op[2:-2]
op = _snake_case(op)
return op
def python_code(self, root_module: str) -> PythonCode:
"""
Turn this ``Graph`` into valid Python code.
Args:
root_module (str): The name of the root module on which to look-up
qualified name targets. This is usually 'self'.
Returns:
A PythonCode object, consisting of two fields:
src: the Python source code representing the object
globals: a dictionary of global names in `src` -> the objects that they reference.
"""
# NOTE: [Graph Namespaces]
#
# There are two types of symbols in generated Python source code:
# locals and globals.
# Locals are locally defined by the output of a node in the Graph.
# Globals are references to external objects, like functions or types.
#
# When generating Python code, we need to make sure to name things
# appropriately. In particular:
# - All names should be unique, to avoid weird shadowing bugs.
# - These names need to be consistent, e.g. a object should always be
# referenced by the same name.
#
# To do this, we create a new namespace just for this source. All names
# that get printed must come from this namespace.
#
# Why can't we re-use node.name? Because it was generated within the
# namespace `self._graph_namespace`. In order to provide uniqueness
# over both locals (node.name) *and* globals, we create a completely
# new namespace to put all identifiers in.
namespace = _Namespace()
# Override Node's repr to generate a valid name within our namespace.
# Since repr() is designed to produce a valid Python expression, it
# makes sense to re-use it. This way, it's easy to print something like
# Tuple[Node, Node] by simply calling repr() on it. Node's __repr__ is
# implemented cooperatively to allow this.
def node_repr(n: Node):
return namespace.create_name(n.name, n)
@contextmanager
def override_node_repr(graph: Graph):
orig_repr_fns = {}
for node in graph.nodes:
orig_repr_fns[node] = node._repr_fn
node._repr_fn = node_repr
try:
yield None
finally:
# restore the original repr functions
for node in graph.nodes:
node._repr_fn = orig_repr_fns[node]
with override_node_repr(self):
return self._python_code(root_module, namespace)
def _python_code(self, root_module: str, namespace: _Namespace) -> PythonCode:
free_vars: List[str] = []
body: List[str] = []
globals_: Dict[str, Any] = {}
# Wrap string in list to pass by reference
maybe_return_annotation : List[str] = ['']
def add_global(name_hint: str, obj: Any):
"""Add an obj to be tracked as a global.
We call this for names that reference objects external to the
Graph, like functions or types.
Returns: the global name that should be used to reference 'obj' in generated source.
"""
if _is_from_torch(obj):
# HACK: workaround for how torch custom ops are registered. We
# can't import them like normal modules so they must retain their
# fully qualified name.
return _get_qualified_name(obj)
# normalize the name hint to get a proper identifier
global_name = namespace.create_name(name_hint, obj)
if global_name in globals_:
assert globals_[global_name] is obj
return global_name
globals_[global_name] = obj
return global_name
# Pre-fill the globals table with registered builtins.
for name, (_, obj) in _custom_builtins.items():
add_global(name, obj)
def type_repr(o : Any):
typename = _type_repr(o)
# This is a generic type, e.g. typing.List[torch.Tensor]
if hasattr(o, '__origin__'):
origin_type = _origin_type_map.get(o.__origin__, o.__origin__)
origin_typename = add_global(_type_repr(origin_type), origin_type)
# Assign global names for each of the inner type variables.
args = [type_repr(arg) for arg in o.__args__]
return f'{origin_typename}[{",".join(args)}]'
# Common case: this is a regular module name like 'foo.bar.baz'
return add_global(typename, o)
# Run through reverse nodes and record the first instance of a use
# of a given node. This represents the *last* use of the node in the
# execution order of the program, which we will use to free unused
# values
node_to_last_use : Dict[Node, Node] = {}
user_to_last_uses : Dict[Node, List[Node]] = {}
def register_last_uses(n : Node, user : Node):
if n not in node_to_last_use:
node_to_last_use[n] = user
user_to_last_uses.setdefault(user, []).append(n)
for node in reversed(self.nodes):
map_arg(node.args, lambda n: register_last_uses(n, node))
map_arg(node.kwargs, lambda n: register_last_uses(n, node))
def delete_unused_values(user : Node):
"""
Delete values after their last use. This ensures that values that are
not used in the remainder of the code are freed and the memory usage
of the code is optimal.
"""
if user.op == 'placeholder':
return
if user.op == 'output':
body.append('\n')
return
nodes_to_delete = user_to_last_uses.get(user, [])
if len(nodes_to_delete):
to_delete_str = ' = '.join([repr(n) for n in nodes_to_delete] + ['None'])
body.append(f'; {to_delete_str}\n')
else:
body.append('\n')
def emit_node(node : Node):
if node.op == 'placeholder':
assert isinstance(node.target, str)
maybe_type_annotation = '' if node.type is None else f' : {type_repr(node.type)}'
maybe_default_arg = '' if not node.args else f' = {repr(node.args[0])}'
free_vars.append(f'{node.target}{maybe_type_annotation}{maybe_default_arg}')
raw_name = node.target.replace('*', '')
if raw_name != repr(node):
body.append(f'{repr(node)} = {raw_name}\n')
return
elif node.op == 'call_method':
assert isinstance(node.target, str)
body.append(
f'{repr(node)} = {_format_target(repr(node.args[0]), node.target)}'
f'({_format_args(node.args[1:], node.kwargs)})')
return
elif node.op == 'call_function':
assert callable(node.target)
# pretty print operators
if node.target.__module__ == '_operator' and node.target.__name__ in magic_methods:
assert isinstance(node.args, tuple)
body.append(f'{repr(node)} = {magic_methods[node.target.__name__].format(*(repr(a) for a in node.args))}')
return
qualified_name = _get_qualified_name(node.target)
global_name = add_global(qualified_name, node.target)
if global_name == 'getattr' and \
isinstance(node.args, tuple) and \
isinstance(node.args[1], str) and \
node.args[1].isidentifier():
# pretty print attribute access
body.append(f'{repr(node)} = {_format_target(repr(node.args[0]), node.args[1])}')
return
body.append(f'{repr(node)} = {global_name}({_format_args(node.args, node.kwargs)})')
return
elif node.op == 'call_module':
assert isinstance(node.target, str)
body.append(f'{repr(node)} = {_format_target(root_module, node.target)}({_format_args(node.args, node.kwargs)})')
return
elif node.op == 'get_attr':
assert isinstance(node.target, str)
body.append(f'{repr(node)} = {_format_target(root_module, node.target)}')
return
elif node.op == 'output':
if node.type is not None:
maybe_return_annotation[0] = f" -> {type_repr(node.type)}"
body.append(f'return {repr(node.args[0])}')
return
raise NotImplementedError(f'node: {node.op} {node.target}')
for node in self.nodes:
# NOTE: emit_node does not emit a string with newline. It depends
# on delete_unused_values to append one
emit_node(node)
delete_unused_values(node)
# repr() for inf and nan floating point values aren't parseable by
# python as literals. Explicitly import the names from the ``math`` module.
if len(body) == 0:
# If the Graph has no non-placeholder nodes, no lines for the body
# have been emitted. To continue to have valid Python code, emit a
# single pass statement
body.append('pass\n')
code = ''.join(body)
code = '\n'.join(' ' + line for line in code.split('\n'))
fn_code = f"""
def forward(self, {', '.join(free_vars)}){maybe_return_annotation[0]}:
{code}"""
return PythonCode(fn_code,
globals_)
def __str__(self) -> str:
"""
Print a human-readable (not machine-readable) string representation
of this Graph
"""
placeholder_names : List[str] = []
# This is a one-element array just so ``format_node`` can modify the closed
# over value
maybe_return_typename : List[str] = ['']
node_strs = [node.format_node(placeholder_names) for node in self.nodes]
param_str = ', '.join(placeholder_names)
s = f'graph({param_str}){maybe_return_typename[0]}:'
for node_str in node_strs:
if node_str:
s += '\n ' + node_str
return s
def print_tabular(self):
"""
Prints the intermediate representation of the graph in tabular
format.
"""
try:
from tabulate import tabulate
except ImportError:
print("`print_tabular` relies on the library `tabulate`, "
"which could not be found on this machine. Run `pip "
"install tabulate` to install the library.")
node_specs = [[n.op, n.name, n.target, n.args, n.kwargs]
for n in self.nodes]
print(tabulate(node_specs,
headers=['opcode', 'name', 'target', 'args', 'kwargs']))
def lint(self, root : Optional[torch.nn.Module] = None):
"""
Runs various checks on this Graph to make sure it is well-formed. In
particular:
- Checks Nodes have correct ownership (owned by this graph)
- Checks Nodes appear in topological order
- If ``root`` is provided, checks that targets exist in ``root``
Args:
root (Optional[torch.nn.Module]): The root module with which to check
for targets. This is equivalent to the ``root`` argument that is
passed when constructing a ``GraphModule``.
"""
# Check topo order
def check_arg(arg : Node, n : Optional[Node] = None) -> None:
context_str = f' of Node \'{n}\' ' if n else ' '
if arg.graph is not self:
raise RuntimeError(f'Argument \'{arg}\'{context_str}does not belong to this Graph, '
f'but was used as an argument! If you are copying nodes from another graph, make '
f'sure to use ``arg_transform`` on node_copy() to remap values\n{self}')
if arg not in seen_values:
raise RuntimeError(f'Argument \'{arg}\'{context_str}was used before it has been '
f'defined! Please check that Nodes in the graph are topologically ordered\n{self}')
seen_names : Set[str] = set()
seen_values : Set[Node] = set()
for node in self.nodes:
if node.op not in ['placeholder', 'call_method', 'call_module', 'call_function', 'get_attr', 'output']:
raise RuntimeError(f'Node {node} had unknown opcode {node.op}!')
if node.graph is not self:
raise RuntimeError(f'Node \'{node}\' does not belong to this Graph!')
map_arg(node.args, lambda arg: check_arg(arg, node))
map_arg(node.kwargs, lambda arg: check_arg(arg, node))
seen_values.add(node)
if node.name in seen_names:
raise RuntimeError(f'Node redefined name {node.name}!')
seen_names.add(node.name)
# Check targets are legit
if root:
for node in self.nodes:
if node.op in ['get_attr', 'call_module']:
assert isinstance(node.target, str)
target_atoms = node.target.split('.')
m_itr = root
for i, atom in enumerate(target_atoms):
m_itr = getattr(m_itr, atom, None)
if m_itr is None:
seen_qualname = '.'.join(target_atoms[:i])
raise RuntimeError(f'Node {node} target {node.target} references nonexistent attribute '
f'{atom} of {seen_qualname}')
reflectable_magic_methods = {
'add': '{} + {}',
'sub': '{} - {}',
'mul': '{} * {}',
'floordiv': '{} // {}',
'truediv': '{} / {}',
'div': '{} / {}',
'mod': '{} % {}',
'pow': '{} ** {}',
'lshift': '{} << {}',
'rshift': '{} >> {}',
'and': '{} & {}',
'or': '{} | {}',
'xor': '{} ^ {}',
'getitem': '{}[{}]'
}
magic_methods = dict({
'eq': '{} == {}',
'ne': '{} != {}',
'lt': '{} < {}',
'gt': '{} > {}',
'le': '{} <= {}',
'ge': '{} >= {}',
'pos': '+{}',
'neg': '-{}',
'invert': '~{}'}, **reflectable_magic_methods)
|
[
"facebook-github-bot@users.noreply.github.com"
] |
facebook-github-bot@users.noreply.github.com
|
dc34c1f11f334a3d915def0a7e3345ee0781e7e9
|
38ea041a35d6e1bbdcb875cfff1a313b02476e81
|
/appModules/AddContact.py
|
515d8116725ef529922d9747efd7df54bad352c6
|
[] |
no_license
|
saraliuhou/DataDriverTestFrameWork
|
1824d0b771c20a87ce3d0b5cebf5cf1e70b4226b
|
5f243026e9f03e96fa010f945fb31b7545759798
|
refs/heads/master
| 2020-06-01T00:19:32.435417
| 2019-06-12T09:10:09
| 2019-06-12T09:10:09
| 190,554,542
| 0
| 0
| null | 2019-06-06T09:29:50
| 2019-06-06T09:29:50
| null |
UTF-8
|
Python
| false
| false
| 2,262
|
py
|
from pageObjects.HomePage import HomePage
from pageObjects.NewContact import AddContactPage
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from util.ParseConfigurationFile import ParseConfigFile
class NewContactPersonAction(object):
def __init__(self):
pass
@staticmethod
def addressLink(driver):
'''
点击通讯录按钮
:param driver:
:return:
'''
homePage = HomePage(driver)
# 点击通讯录
homePage.addressLink().click()
@staticmethod
def addContact(driver, contactName, contactMail, isSatr, contactPhone, contactComment):
'''
添加联系人场景
:param driver:
:param contactName:
:param contactMail:
:param isSatr:
:param contactPhone:
:param contactComment:
:return:
'''
# 点击新建联系人
addContact = AddContactPage(driver)
# 调试的时候这边有时候会报错。点击不到[新建联系人]这个按钮,所以加了一个显示等待
by, locator = ParseConfigFile().getElementValue('126mail_addContactPage', 'addContactPage.newContact')
WebDriverWait(driver, 30).until(EC.element_to_be_clickable((by, locator)))
addContact.newContact().click()
if contactName:
# 非必填项
addContact.addName().send_keys(contactName)
# 必填项
addContact.addMail().send_keys(contactMail)
if isSatr == '是':
addContact.markStar().click()
if contactPhone:
addContact.addPhone().send_keys(contactPhone)
if contactComment:
addContact.addContent().send_keys(contactComment)
addContact.clickCommitBtn().click()
if __name__=='__main__':
from appModules.LoginAction import LoginAction
import time
from selenium import webdriver
driver = webdriver.Firefox()
driver.get('https://mail.126.com')
time.sleep(5)
LoginAction.login(driver, 'linux', 'chao')
NewContactPersonAction.addressLink(driver)
NewContactPersonAction.addContact(driver, '','123456@qq.com', '是', '','')
time.sleep(5)
driver.quit()
|
[
"281754043@qq.com"
] |
281754043@qq.com
|
c4a498197bd65892c63d8b651006a2e100b27e0c
|
48e124e97cc776feb0ad6d17b9ef1dfa24e2e474
|
/sdk/python/pulumi_azure_native/media/v20210601/get_transform.py
|
2d5107073354128ca98d5c3db8d4db0c9a68f79d
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
bpkgoud/pulumi-azure-native
|
0817502630062efbc35134410c4a784b61a4736d
|
a3215fe1b87fba69294f248017b1591767c2b96c
|
refs/heads/master
| 2023-08-29T22:39:49.984212
| 2021-11-15T12:43:41
| 2021-11-15T12:43:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,036
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetTransformResult',
'AwaitableGetTransformResult',
'get_transform',
'get_transform_output',
]
@pulumi.output_type
class GetTransformResult:
"""
A Transform encapsulates the rules or instructions for generating desired outputs from input media, such as by transcoding or by extracting insights. After the Transform is created, it can be applied to input media by creating Jobs.
"""
def __init__(__self__, created=None, description=None, id=None, last_modified=None, name=None, outputs=None, system_data=None, type=None):
if created and not isinstance(created, str):
raise TypeError("Expected argument 'created' to be a str")
pulumi.set(__self__, "created", created)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if last_modified and not isinstance(last_modified, str):
raise TypeError("Expected argument 'last_modified' to be a str")
pulumi.set(__self__, "last_modified", last_modified)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if outputs and not isinstance(outputs, list):
raise TypeError("Expected argument 'outputs' to be a list")
pulumi.set(__self__, "outputs", outputs)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def created(self) -> str:
"""
The UTC date and time when the Transform was created, in 'YYYY-MM-DDThh:mm:ssZ' format.
"""
return pulumi.get(self, "created")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
An optional verbose description of the Transform.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastModified")
def last_modified(self) -> str:
"""
The UTC date and time when the Transform was last updated, in 'YYYY-MM-DDThh:mm:ssZ' format.
"""
return pulumi.get(self, "last_modified")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def outputs(self) -> Sequence['outputs.TransformOutputResponse']:
"""
An array of one or more TransformOutputs that the Transform should generate.
"""
return pulumi.get(self, "outputs")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetTransformResult(GetTransformResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetTransformResult(
created=self.created,
description=self.description,
id=self.id,
last_modified=self.last_modified,
name=self.name,
outputs=self.outputs,
system_data=self.system_data,
type=self.type)
def get_transform(account_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
transform_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetTransformResult:
"""
A Transform encapsulates the rules or instructions for generating desired outputs from input media, such as by transcoding or by extracting insights. After the Transform is created, it can be applied to input media by creating Jobs.
:param str account_name: The Media Services account name.
:param str resource_group_name: The name of the resource group within the Azure subscription.
:param str transform_name: The Transform name.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['resourceGroupName'] = resource_group_name
__args__['transformName'] = transform_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:media/v20210601:getTransform', __args__, opts=opts, typ=GetTransformResult).value
return AwaitableGetTransformResult(
created=__ret__.created,
description=__ret__.description,
id=__ret__.id,
last_modified=__ret__.last_modified,
name=__ret__.name,
outputs=__ret__.outputs,
system_data=__ret__.system_data,
type=__ret__.type)
@_utilities.lift_output_func(get_transform)
def get_transform_output(account_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
transform_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetTransformResult]:
"""
A Transform encapsulates the rules or instructions for generating desired outputs from input media, such as by transcoding or by extracting insights. After the Transform is created, it can be applied to input media by creating Jobs.
:param str account_name: The Media Services account name.
:param str resource_group_name: The name of the resource group within the Azure subscription.
:param str transform_name: The Transform name.
"""
...
|
[
"noreply@github.com"
] |
noreply@github.com
|
0998d7b83e0b0f823115e7d2c6b0d79aadab41fd
|
191863e86029b0e5866bf810ec7bf209fce88b6f
|
/frameworks/mxnet/code/test_train.py
|
bba2094bb5d9a7e52f1fc55b1c7a9e19bf647552
|
[
"Apache-2.0"
] |
permissive
|
EmilyWebber/amazon-sagemaker-examples
|
237efa2211a5d26854ece7da2356d627d4f5f225
|
fb5a8ba5298bb7879ee9b8c10cb0842ac078b84b
|
refs/heads/main
| 2022-04-06T10:29:56.728806
| 2022-04-05T14:54:25
| 2022-04-05T14:54:25
| 331,713,808
| 3
| 3
|
Apache-2.0
| 2021-01-21T18:13:20
| 2021-01-21T18:13:19
| null |
UTF-8
|
Python
| false
| false
| 2,221
|
py
|
# coding=utf-8
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import sys
import boto3
from train import parse_args, train
dirname = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(dirname, "config.json"), "r") as f:
CONFIG = json.load(f)
def download_from_s3(data_dir="/tmp/data", train=True):
"""Download MNIST dataset and convert it to numpy array
Args:
data_dir (str): directory to save the data
train (bool): download training set
Returns:
tuple of images and labels as numpy arrays
"""
if not os.path.exists(data_dir):
os.makedirs(data_dir)
if train:
images_file = "train-images-idx3-ubyte.gz"
labels_file = "train-labels-idx1-ubyte.gz"
else:
images_file = "t10k-images-idx3-ubyte.gz"
labels_file = "t10k-labels-idx1-ubyte.gz"
# download objects
s3 = boto3.client("s3")
bucket = CONFIG["public_bucket"]
for obj in [images_file, labels_file]:
key = os.path.join("datasets/image/MNIST", obj)
dest = os.path.join(data_dir, obj)
if not os.path.exists(dest):
s3.download_file(bucket, key, dest)
return
class Env:
def __init__(self):
# simulate container env
os.environ["SM_MODEL_DIR"] = "/tmp/model"
os.environ["SM_CHANNEL_TRAINING"] = "/tmp/data"
os.environ["SM_CHANNEL_TESTING"] = "/tmp/data"
os.environ["SM_HOSTS"] = '["algo-1"]'
os.environ["SM_CURRENT_HOST"] = "algo-1"
os.environ["SM_NUM_GPUS"] = "0"
if __name__ == "__main__":
Env()
args = parse_args()
train(args)
|
[
"noreply@github.com"
] |
noreply@github.com
|
61a7de6458a93abbd4bcecb609a66744676cb232
|
dc9791ba93f46274c859823ec4c94cfd5e7b314f
|
/mne/lib/python2.7/site-packages/mne/realtime/mock_lsl_stream.py
|
d30654dd044407b4ad234122d037895f4a440744
|
[] |
no_license
|
GanshengT/INSERM_EEG_Enrico_Proc
|
84f8225920227dee89b98d9065fa6036b54a66d7
|
343edc32e5c9705213189a088855c635b31ca22b
|
refs/heads/master
| 2020-06-11T08:31:08.689920
| 2020-04-23T07:29:44
| 2020-04-23T07:29:44
| 193,898,744
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,355
|
py
|
# Authors: Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD (3-clause)
import time
from ..utils import _check_pylsl_installed, deprecated
from ..io import constants
RT_MSG = ('The realtime module is being deprecated from `mne-python` '
'and moved to its own package, `mne-realtime`. '
'To install, please use `$ pip install mne_realtime`.')
@deprecated(RT_MSG)
class MockLSLStream(object):
"""Mock LSL Stream.
Parameters
----------
host : str
The LSL identifier of the server.
raw : instance of Raw object
An instance of Raw object to be streamed.
ch_type : str
The type of data that is being streamed.
time_dilation : int
A scale factor to speed up or slow down the rate of
the data being streamed.
"""
def __init__(self, host, raw, ch_type, time_dilation=1):
self._host = host
self._ch_type = ch_type
self._time_dilation = time_dilation
raw.load_data().pick(ch_type)
self._raw = raw
self._sfreq = int(self._raw.info['sfreq'])
def start(self):
"""Start a mock LSL stream."""
try:
from multiprocessing import Process
except ImportError:
raise ImportError('This requires multiprocessing '
'to work properly.')
print("now sending data...")
self.process = Process(target=self._initiate_stream)
self.process.daemon = True
self.process.start()
return self
def stop(self):
"""Stop a mock LSL stream."""
self._streaming = False
print("Stopping stream...")
self.process.terminate()
return self
def __enter__(self):
"""Enter the context manager."""
self.start()
return self
def __exit__(self, type_, value, traceback):
"""Exit the context manager."""
self.stop()
def _initiate_stream(self):
# outlet needs to be made on the same process
pylsl = _check_pylsl_installed(strict=True)
self._streaming = True
info = pylsl.StreamInfo(name='MNE', type=self._ch_type.upper(),
channel_count=self._raw.info['nchan'],
nominal_srate=self._sfreq,
channel_format='float32', source_id=self._host)
info.desc().append_child_value("manufacturer", "MNE")
channels = info.desc().append_child("channels")
for ch in self._raw.info['chs']:
unit = ch['unit']
keys, values = zip(*list(constants.FIFF.items()))
unit = keys[values.index(unit)]
channels.append_child("channel") \
.append_child_value("label", ch['ch_name']) \
.append_child_value("type", self._ch_type.lower()) \
.append_child_value("unit", unit)
# next make an outlet
outlet = pylsl.StreamOutlet(info)
# let's make some data
counter = 0
while self._streaming:
mysample = self._raw[:, counter][0].ravel().tolist()
# now send it and wait for a bit
outlet.push_sample(mysample)
counter = 0 if counter == self._raw.last_samp else counter + 1
time.sleep(self._time_dilation / self._sfreq)
|
[
"aegean0045@outlook.com"
] |
aegean0045@outlook.com
|
70237c341ae1c9585377c6c6ec289173ce92bdae
|
148044ba8412cfe9227201e82360770d6a7e9780
|
/check_screen.py
|
095f92651215f811b52a37d88fe1c3fbc9022209
|
[] |
no_license
|
mwaskom/sticks_experiment
|
9e0b2af851e20f82cd8a3011b08ac061b0061191
|
fcfd98cb4528e9011168be27b2121a96514b3fa3
|
refs/heads/master
| 2023-08-24T06:22:27.939464
| 2015-08-28T23:53:16
| 2015-08-31T04:38:24
| 38,704,521
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 424
|
py
|
import sys
from psychopy import visual, event
import cregg
def main(arglist):
p = cregg.Params("scan")
p.set_by_cmdline(arglist)
win = cregg.launch_window(p)
visual.Circle(win, p.array_radius,
edges=128,
lineColor="white",
lineWidth=2).draw()
win.flip()
event.waitKeys(keyList=p.quit_keys)
if __name__ == "__main__":
main(sys.argv[1:])
|
[
"mwaskom@stanford.edu"
] |
mwaskom@stanford.edu
|
0c6ec38b710115292a757031f6f00f64e4877fb2
|
44924df18da6a8400ad97014d486d176d3098753
|
/test/geometry/test_homography.py
|
5edb347580663a84ef0f62743efd9e5094f9cd9b
|
[
"Apache-2.0"
] |
permissive
|
ducha-aiki/kornia
|
d41cd60fdcb3f03c9bcb8f304efa70db8a6436ce
|
0aa7a7110872f610f3947eccc4a35f0f0c7d79bd
|
refs/heads/master
| 2023-01-30T12:37:45.760726
| 2021-10-13T13:09:30
| 2021-10-13T13:09:30
| 192,566,129
| 3
| 0
|
Apache-2.0
| 2023-01-15T21:02:45
| 2019-06-18T15:21:56
|
Python
|
UTF-8
|
Python
| false
| false
| 12,160
|
py
|
import random
import pytest
import torch
from torch.autograd import gradcheck
import kornia
import kornia.testing as utils
from kornia.geometry.homography import (
find_homography_dlt,
find_homography_dlt_iterated,
oneway_transfer_error,
symmetric_transfer_error,
)
from kornia.testing import assert_close
class TestOneWayError:
def test_smoke(self, device, dtype):
pts1 = torch.rand(1, 6, 2, device=device, dtype=dtype)
pts2 = torch.rand(1, 6, 2, device=device, dtype=dtype)
H = utils.create_random_homography(1, 3).type_as(pts1).to(device)
assert oneway_transfer_error(pts1, pts2, H).shape == (1, 6)
def test_batch(self, device, dtype):
batch_size = 5
pts1 = torch.rand(batch_size, 3, 2, device=device, dtype=dtype)
pts2 = torch.rand(batch_size, 3, 2, device=device, dtype=dtype)
H = utils.create_random_homography(1, 3).type_as(pts1).to(device)
assert oneway_transfer_error(pts1, pts2, H).shape == (batch_size, 3)
def test_gradcheck(self, device):
# generate input data
batch_size, num_points, num_dims = 2, 3, 2
points1 = torch.rand(batch_size, num_points, num_dims, device=device, dtype=torch.float64, requires_grad=True)
points2 = torch.rand(batch_size, num_points, num_dims, device=device, dtype=torch.float64)
H = utils.create_random_homography(batch_size, 3).type_as(points1).to(device)
assert gradcheck(oneway_transfer_error, (points1, points2, H), raise_exception=True)
def test_shift(self, device, dtype):
pts1 = torch.zeros(3, 2, device=device, dtype=dtype)[None]
pts2 = torch.tensor([[1.0, 0.0], [2.0, 0.0], [2.0, 2.0]], device=device, dtype=dtype)[None]
H = torch.tensor([[1.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], dtype=dtype, device=device)[None]
expected = torch.tensor([0.0, 1.0, 5.0], device=device, dtype=dtype)[None]
assert_close(oneway_transfer_error(pts1, pts2, H), expected, atol=1e-4, rtol=1e-4)
class TestSymmetricTransferError:
def test_smoke(self, device, dtype):
pts1 = torch.rand(1, 6, 2, device=device, dtype=dtype)
pts2 = torch.rand(1, 6, 2, device=device, dtype=dtype)
H = utils.create_random_homography(1, 3).type_as(pts1).to(device)
assert symmetric_transfer_error(pts1, pts2, H).shape == (1, 6)
def test_batch(self, device, dtype):
batch_size = 5
pts1 = torch.rand(batch_size, 3, 2, device=device, dtype=dtype)
pts2 = torch.rand(batch_size, 3, 2, device=device, dtype=dtype)
H = utils.create_random_homography(1, 3).type_as(pts1).to(device)
assert symmetric_transfer_error(pts1, pts2, H).shape == (batch_size, 3)
def test_gradcheck(self, device):
# generate input data
batch_size, num_points, num_dims = 2, 3, 2
points1 = torch.rand(batch_size, num_points, num_dims, device=device, dtype=torch.float64, requires_grad=True)
points2 = torch.rand(batch_size, num_points, num_dims, device=device, dtype=torch.float64)
H = utils.create_random_homography(batch_size, 3).type_as(points1).to(device)
assert gradcheck(symmetric_transfer_error, (points1, points2, H), raise_exception=True)
def test_shift(self, device, dtype):
pts1 = torch.zeros(3, 2, device=device, dtype=dtype)[None]
pts2 = torch.tensor([[1.0, 0.0], [2.0, 0.0], [2.0, 2.0]], device=device, dtype=dtype)[None]
H = torch.tensor([[1.0, 0.0, 1.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]], dtype=dtype, device=device)[None]
expected = torch.tensor([0.0, 2.0, 10.0], device=device, dtype=dtype)[None]
assert_close(symmetric_transfer_error(pts1, pts2, H), expected, atol=1e-4, rtol=1e-4)
class TestFindHomographyDLT:
def test_smoke(self, device, dtype):
points1 = torch.rand(1, 4, 2, device=device, dtype=dtype)
points2 = torch.rand(1, 4, 2, device=device, dtype=dtype)
weights = torch.ones(1, 4, device=device, dtype=dtype)
H = find_homography_dlt(points1, points2, weights)
assert H.shape == (1, 3, 3)
@pytest.mark.parametrize("batch_size, num_points", [(1, 4), (2, 5), (3, 6)])
def test_shape(self, batch_size, num_points, device, dtype):
B, N = batch_size, num_points
points1 = torch.rand(B, N, 2, device=device, dtype=dtype)
points2 = torch.rand(B, N, 2, device=device, dtype=dtype)
weights = torch.ones(B, N, device=device, dtype=dtype)
H = find_homography_dlt(points1, points2, weights)
assert H.shape == (B, 3, 3)
@pytest.mark.parametrize("batch_size, num_points", [(1, 4), (2, 5), (3, 6)])
def test_shape_noweights(self, batch_size, num_points, device, dtype):
B, N = batch_size, num_points
points1 = torch.rand(B, N, 2, device=device, dtype=dtype)
points2 = torch.rand(B, N, 2, device=device, dtype=dtype)
H = find_homography_dlt(points1, points2, None)
assert H.shape == (B, 3, 3)
@pytest.mark.parametrize("batch_size, num_points", [(1, 4), (2, 5), (3, 6)])
def test_points_noweights(self, batch_size, num_points, device, dtype):
B, N = batch_size, num_points
points1 = torch.rand(B, N, 2, device=device, dtype=dtype)
points2 = torch.rand(B, N, 2, device=device, dtype=dtype)
weights = torch.ones(B, N, device=device, dtype=dtype)
H_noweights = find_homography_dlt(points1, points2, None)
H_withweights = find_homography_dlt(points1, points2, weights)
assert H_noweights.shape == (B, 3, 3) and H_withweights.shape == (B, 3, 3)
assert_close(H_noweights, H_withweights, rtol=1e-3, atol=1e-4)
@pytest.mark.parametrize("batch_size", [1, 2, 5])
def test_clean_points(self, batch_size, device, dtype):
# generate input data
points_src = torch.rand(batch_size, 10, 2, device=device, dtype=dtype)
H = kornia.eye_like(3, points_src)
H = H * 0.3 * torch.rand_like(H)
H = H / H[:, 2:3, 2:3]
points_dst = kornia.transform_points(H, points_src)
weights = torch.ones(batch_size, 10, device=device, dtype=dtype)
# compute transform from source to target
dst_homo_src = find_homography_dlt(points_src, points_dst, weights)
assert_close(kornia.transform_points(dst_homo_src, points_src), points_dst, rtol=1e-3, atol=1e-4)
@pytest.mark.grad
@pytest.mark.skipif(torch.__version__ < '1.7', reason="pytorch bug of incopatible types: #33546 fixed in v1.7")
def test_gradcheck(self, device):
# Save initial seed
initial_seed = torch.random.initial_seed()
max_number_of_checks = 10
# Test gradients for a max_number_of_checks times
current_seed = initial_seed
for i in range(max_number_of_checks):
torch.manual_seed(current_seed)
points_src = torch.rand(1, 10, 2, device=device, dtype=torch.float64, requires_grad=True)
points_dst = torch.rand_like(points_src)
weights = torch.ones_like(points_src)[..., 0]
try:
gradcheck(
find_homography_dlt, (points_src, points_dst, weights), rtol=1e-6, atol=1e-6, raise_exception=True
)
# Gradcheck failed
except RuntimeError:
# All iterations failed
if i == max_number_of_checks - 1:
assert gradcheck(
find_homography_dlt,
(points_src, points_dst, weights),
rtol=1e-6,
atol=1e-6,
raise_exception=True,
)
# Next iteration
else:
current_seed = random.randrange(0xFFFFFFFFFFFFFFFF)
continue
# Gradcheck succeed
torch.manual_seed(initial_seed)
return
class TestFindHomographyDLTIter:
def test_smoke(self, device, dtype):
points1 = torch.rand(1, 4, 2, device=device, dtype=dtype)
points2 = torch.rand(1, 4, 2, device=device, dtype=dtype)
weights = torch.ones(1, 4, device=device, dtype=dtype)
H = find_homography_dlt_iterated(points1, points2, weights, 5)
assert H.shape == (1, 3, 3)
@pytest.mark.parametrize("batch_size, num_points", [(1, 4), (2, 5), (3, 6)])
def test_shape(self, batch_size, num_points, device, dtype):
B, N = batch_size, num_points
points1 = torch.rand(B, N, 2, device=device, dtype=dtype)
points2 = torch.rand(B, N, 2, device=device, dtype=dtype)
weights = torch.ones(B, N, device=device, dtype=dtype)
H = find_homography_dlt_iterated(points1, points2, weights, 5)
assert H.shape == (B, 3, 3)
@pytest.mark.parametrize("batch_size", [1, 2])
def test_clean_points(self, batch_size, device, dtype):
# generate input data
points_src = torch.rand(batch_size, 10, 2, device=device, dtype=dtype)
H = kornia.eye_like(3, points_src)
H = H * 0.3 * torch.rand_like(H)
H = H / H[:, 2:3, 2:3]
points_dst = kornia.transform_points(H, points_src)
weights = torch.ones(batch_size, 10, device=device, dtype=dtype)
# compute transform from source to target
dst_homo_src = find_homography_dlt_iterated(points_src, points_dst, weights, 10)
assert_close(kornia.transform_points(dst_homo_src, points_src), points_dst, rtol=1e-3, atol=1e-4)
@pytest.mark.grad
@pytest.mark.skipif(torch.__version__ < '1.7', reason="pytorch bug of incopatible types: #33546 fixed in v1.7")
def test_gradcheck(self, device):
# Save initial seed
initial_seed = torch.random.initial_seed()
max_number_of_checks = 10
# Test gradients for a max_number_of_checks times
current_seed = initial_seed
for i in range(max_number_of_checks):
torch.manual_seed(current_seed)
points_src = torch.rand(1, 10, 2, device=device, dtype=torch.float64, requires_grad=True)
points_dst = torch.rand_like(points_src)
weights = torch.ones_like(points_src)[..., 0]
try:
gradcheck(
find_homography_dlt_iterated,
(points_src, points_dst, weights),
rtol=1e-6,
atol=1e-6,
raise_exception=True,
)
# Gradcheck failed
except RuntimeError:
# All iterations failed
if i == max_number_of_checks - 1:
assert gradcheck(
find_homography_dlt_iterated,
(points_src, points_dst, weights),
rtol=1e-6,
atol=1e-6,
raise_exception=True,
)
# Next iteration
else:
current_seed = random.randrange(0xFFFFFFFFFFFFFFFF)
continue
# Gradcheck succeed
torch.manual_seed(initial_seed)
return
@pytest.mark.grad
@pytest.mark.parametrize("batch_size", [1, 2])
def test_dirty_points_and_gradcheck(self, batch_size, device, dtype):
# generate input data
points_src = torch.rand(batch_size, 10, 2, device=device, dtype=dtype)
H = kornia.eye_like(3, points_src)
H = H * (1 + torch.rand_like(H))
H = H / H[:, 2:3, 2:3]
points_src = 100.0 * torch.rand(batch_size, 20, 2, device=device, dtype=dtype)
points_dst = kornia.transform_points(H, points_src)
# making last point an outlier
points_dst[:, -1, :] += 20
weights = torch.ones(batch_size, 20, device=device, dtype=dtype)
# compute transform from source to target
dst_homo_src = find_homography_dlt_iterated(points_src, points_dst, weights, 0.5, 10)
assert_close(
kornia.transform_points(dst_homo_src, points_src[:, :-1]), points_dst[:, :-1], rtol=1e-3, atol=1e-3
)
|
[
"noreply@github.com"
] |
noreply@github.com
|
d5e1939f099cf3e03251eb29a08dac4f28524a6f
|
7eedbe5c7d0a0d602d03bfe7d73878adca4c61e4
|
/src_old/calculus.py
|
20c434bac11cde88912e6138766eb575a8dce5ea
|
[
"MIT"
] |
permissive
|
johndpope/GeLaTo
|
da413da5e718ed67b620af313ba99abb87b49c4e
|
e00577924b7d13f3d5d387583f457caf8065b004
|
refs/heads/master
| 2020-03-29T23:05:27.981692
| 2018-07-30T15:16:54
| 2018-07-30T15:16:54
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,757
|
py
|
# coding: utf-8
# TODO add action of diff operators on sympy known functions
import numpy as np
from itertools import groupby
from collections import OrderedDict
#from sympy.core.sympify import sympify
from sympy.simplify.simplify import simplify
from sympy import Symbol
from sympy import Lambda
from sympy import Function
from sympy import bspline_basis
from sympy import lambdify
from sympy import cos
from sympy import sin
from sympy import Rational
from sympy import diff
from sympy import Matrix, ImmutableDenseMatrix
from sympy import latex
from sympy import Integral
from sympy import I as sympy_I
from sympy.core import Basic
from sympy.core.singleton import S
from sympy.simplify.simplify import nsimplify
from sympy.utilities.lambdify import implemented_function
from sympy.matrices.dense import MutableDenseMatrix
from sympy import Mul, Add
from sympy import postorder_traversal
from sympy import preorder_traversal
from sympy.core.expr import Expr
from sympy.core.containers import Tuple
from sympy import Integer, Float
from sympy import Add, Mul
from sympy import preorder_traversal, Expr
from sympy import simplify
from sympy import S
from sympy.core.compatibility import is_sequence
from sympy import Basic
from sympy import Indexed, IndexedBase
# ...
class LinearOperator(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Grad'
is_commutative = True
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
expr = _args[0]
if isinstance(expr, Add):
args = expr.args
args = [cls.eval(a) for a in expr.args]
return Add(*args)
if isinstance(expr, Mul):
coeffs = [a for a in expr.args if isinstance(a, _coeffs_registery)]
vectors = [a for a in expr.args if not(a in coeffs)]
a = S.One
if coeffs:
a = Mul(*coeffs)
b = S.One
if vectors:
b = cls(Mul(*vectors), evaluate=False)
return Mul(a, b)
return cls(expr, evaluate=False)
# ...
# ...
class DifferentialOperator(LinearOperator):
"""
This class is a linear operator that applies the Leibniz formula
Examples
========
"""
coordinate = None
@classmethod
def eval(cls, *_args):
"""."""
expr = _args[0]
if isinstance(expr, Add):
args = expr.args
args = [cls.eval(a) for a in expr.args]
return Add(*args)
if isinstance(expr, Mul):
coeffs = [a for a in expr.args if isinstance(a, _coeffs_registery)]
vectors = [a for a in expr.args if not(a in coeffs)]
c = S.One
if coeffs:
c = Mul(*coeffs)
V = S.One
if vectors:
if len(vectors) == 1:
V = cls(Mul(vectors[0]), evaluate=False)
elif len(vectors) == 2:
a = vectors[0]
b = vectors[1]
fa = cls(a, evaluate=False)
fb = cls(b, evaluate=False)
V = a * fb + fa * b
else:
V = cls(Mul(*vectors), evaluate=False)
return Mul(c, V)
return cls(expr, evaluate=False)
# ...
# ...
class dx(DifferentialOperator):
coordinate = 'x'
grad_index = 0 # index in grad
pass
class dy(DifferentialOperator):
coordinate = 'y'
grad_index = 1 # index in grad
pass
class dz(DifferentialOperator):
coordinate = 'z'
grad_index = 2 # index in grad
pass
_partial_derivatives = (dx, dy, dz)
# ...
# ...
def find_partial_derivatives(expr):
"""
returns all partial derivative expressions
"""
if isinstance(expr, (Add, Mul)):
return find_partial_derivatives(expr.args)
elif isinstance(expr, (list, tuple, Tuple)):
args = []
for a in expr:
args += find_partial_derivatives(a)
return args
elif isinstance(expr, _partial_derivatives):
return [expr]
return []
# ...
# ...
def get_number_derivatives(expr):
"""
returns the number of partial derivatives in expr.
this is still an experimental version, and it assumes that expr is of the
form d(a) where a is a single atom.
"""
n = 0
if isinstance(expr, _partial_derivatives):
assert(len(expr.args) == 1)
n += 1 + get_number_derivatives(expr.args[0])
return n
# ...
# ...
def sort_partial_derivatives(expr):
"""returns the partial derivatives of an expression, sorted.
"""
ls = []
args = find_partial_derivatives(expr)
# # ... Note
# # group by is given the wrong answer for expr =mu * u + dx(u) + dx(dx(u))
# for key, group in groupby(args, lambda x: get_number_derivatives(x)):
# g = [a for a in group]
# for a in group:
# ls.append(a)
# # ...
# ...
d = {}
for a in args:
n = get_number_derivatives(a)
if n in d.keys():
d[n] += [a]
else:
d[n] = [a]
# ...
# ...
if not d:
return []
# ...
# ... sort keys from high to low
keys = np.asarray(list(d.keys()))
keys.sort()
keys = keys[::-1]
# ...
# ... construct a list of partial derivatives from high to low order
ls = []
for k in keys:
ls += d[k]
# ...
return ls
# ...
# ...
def get_index_derivatives(expr):
"""
"""
coord = ['x','y','z']
d = OrderedDict()
for c in coord:
d[c] = 0
ops = [a for a in preorder_traversal(expr) if isinstance(a, _partial_derivatives)]
for i in ops:
op = type(i)
if isinstance(i, dx):
d['x'] += 1
elif isinstance(i, dy):
d['y'] += 1
elif isinstance(i, dz):
d['z'] += 1
return d
# ...
# ...
def get_atom_derivatives(expr):
"""
"""
if isinstance(expr, _partial_derivatives):
assert(len(expr.args) == 1)
return get_atom_derivatives(expr.args[0])
elif isinstance(expr, _calculus_operators):
raise TypeError('remove this raise later')
else:
return expr
# ...
# ...
class DotBasic(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Dot'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
class Dot_1d(DotBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
if not( len(_args) == 2):
raise ValueError('Expecting two arguments')
u = _args[0]
v = _args[1]
return u * v
class Dot_2d(DotBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
if not( len(_args) == 2):
raise ValueError('Expecting two arguments')
u = _args[0]
v = _args[1]
if isinstance(u, (Matrix, ImmutableDenseMatrix)):
if isinstance(v, (Matrix, ImmutableDenseMatrix)):
raise NotImplementedError('TODO')
else:
return Tuple(u[0,0]*v[0] + u[0,1]*v[1], u[1,0]*v[0] + u[1,1]*v[1])
else:
if isinstance(v, (Matrix, ImmutableDenseMatrix)):
raise NotImplementedError('TODO')
else:
return u[0]*v[0] + u[1]*v[1]
class Dot_3d(DotBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
if not( len(_args) == 2):
raise ValueError('Expecting two arguments')
u = _args[0]
v = _args[1]
if isinstance(u, (Matrix, ImmutableDenseMatrix)):
if isinstance(v, (Matrix, ImmutableDenseMatrix)):
raise NotImplementedError('TODO')
else:
return Tuple(u[0,0]*v[0] + u[0,1]*v[1] + u[0,2]*v[2],
u[1,0]*v[0] + u[1,1]*v[1] + u[1,2]*v[2],
u[2,0]*v[0] + u[2,1]*v[1] + u[2,2]*v[2])
else:
if isinstance(v, (Matrix, ImmutableDenseMatrix)):
raise NotImplementedError('TODO')
else:
return u[0]*v[0] + u[1]*v[1] + u[2]*v[2]
# ...
# ...
class CrossBasic(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Cross'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
class Cross_2d(CrossBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
v = _args[1]
return u[0]*v[1] - u[1]*v[0]
class Cross_3d(CrossBasic):
"""
Examples
========
"""
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
v = _args[1]
return Tuple(u[1]*v[2] - u[2]*v[1],
u[2]*v[0] - u[0]*v[2],
u[0]*v[1] - u[1]*v[0])
# ...
# ...
class GradBasic(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Grad'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
class Grad_1d(GradBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return dx(u)
class Grad_2d(GradBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return Tuple(dx(u), dy(u))
class Grad_3d(GradBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return Tuple(dx(u), dy(u), dz(u))
# ...
# ...
class CurlBasic(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Curl'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
class Curl_2d(CurlBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return Tuple( dy(u),
-dx(u))
class Curl_3d(CurlBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return Tuple(dy(u[2]) - dz(u[1]),
dz(u[0]) - dx(u[2]),
dx(u[1]) - dy(u[0]))
# ...
# ...
class Rot_2d(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Grad'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return dy(u[0]) - dx(u[1])
# ...
# ...
class DivBasic(CalculusFunction):
"""
Examples
========
"""
nargs = None
name = 'Div'
def __new__(cls, *args, **options):
# (Try to) sympify args first
if options.pop('evaluate', True):
r = cls.eval(*args)
else:
r = None
if r is None:
return Basic.__new__(cls, *args, **options)
else:
return r
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
class Div_1d(DivBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return dx(u)
class Div_2d(DivBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return dx(u[0]) + dy(u[1])
class Div_3d(DivBasic):
"""
Examples
========
"""
@classmethod
def eval(cls, *_args):
"""."""
if not _args:
return
u = _args[0]
return dx(u[0]) + dy(u[1]) + dz(u[2])
# ...
# ...
_coord_registery = ['x', 'y', 'z']
# ...
# ...
_operators_1d = [Dot_1d,
Grad_1d, Div_1d]
_operators_2d = [Dot_2d, Cross_2d,
Grad_2d, Curl_2d, Rot_2d, Div_2d]
_operators_3d = [Dot_3d, Cross_3d,
Grad_3d, Curl_3d, Div_3d]
# ...
# ... generic operators
class GenericFunction(CalculusFunction):
def __getitem__(self, indices, **kw_args):
if is_sequence(indices):
# Special case needed because M[*my_tuple] is a syntax error.
return Indexed(self, *indices, **kw_args)
else:
return Indexed(self, indices, **kw_args)
class Dot(GenericFunction):
pass
class Cross(GenericFunction):
pass
class Grad(GenericFunction):
pass
class Curl(GenericFunction):
pass
class Rot(GenericFunction):
pass
class Div(GenericFunction):
pass
_generic_ops = (Dot, Cross,
Grad, Curl, Rot, Div)
# ...
# ... alias for ufl compatibility
cross = Cross
dot = Dot
Inner = Dot # TODO do we need to add the Inner class Function?
inner = Inner
grad = Grad
curl = Curl
rot = Rot
div = Div
_calculus_operators = (Grad, Dot, Inner, Cross, Rot, Curl, Div)
# ...
# ...
def partial_derivative_as_symbol(expr, name=None, dim=None):
"""Returns a Symbol from a partial derivative expression."""
if not isinstance(expr, _partial_derivatives):
raise TypeError('Expecting a partial derivative expression')
index = get_index_derivatives(expr)
var = get_atom_derivatives(expr)
if not isinstance(var, (Symbol, Indexed)):
print(type(var))
raise TypeError('Expecting a Symbol, Indexed')
code = ''
for k,n in list(index.items()):
code += k*n
if var.is_Indexed:
if name is None:
name = var.base
indices = ''.join('{}'.format(i) for i in var.indices)
name = '{name}_{code}'.format(name=name, code=code)
shape = None
if dim:
shape = [dim]
return IndexedBase(name, shape=shape)[indices]
else:
if name is None:
name = var.name
name = '{name}_{code}'.format(name=name, code=code)
return Symbol(name)
# ...
|
[
"ratnaniahmed@gmail.com"
] |
ratnaniahmed@gmail.com
|
c0b4fa2db3f14cfcc949209826036005ad046e4e
|
8ff3a04cebb9f6b99d113fc3d19382396c77a0c7
|
/cs211/FiveTwelve-Notes.py
|
ad3dddb07b47c2e17788edd5437cf850d90366c8
|
[] |
no_license
|
lmjim/year1
|
20d533a5e8e7a1474d0477614096e172fd9bf976
|
c31701c819a003066b48fc0d8d98e64cff2defe1
|
refs/heads/master
| 2021-05-18T23:27:42.168825
| 2020-03-31T02:23:10
| 2020-03-31T02:23:10
| 251,476,503
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,156
|
py
|
>>> tiles = []
>>> for row in range(4):
columns = []
for col in range(4):
columns.append(None)
tiles.append(columns)
>>> print(tiles)
[[None, None, None, None], [None, None, None, None], [None, None, None, None], [None, None, None, None]]
>>> rep = []
>>> for row in tiles:
value_list = []
for tile in row:
if tile is None:
value_list.append(0)
else:
value_list.append(tile.value)
rep.append(value_list)
>>> print(rep)
[[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
>>> tiles = []
>>> for row in range(4):
row_tiles = []
for col in range(4):
if rep[row][col] == 0:
row_tiles.append(None)
else:
val = rep[row][col]
tile = Tile(self, row, col, value=val)
row_tiles.append(tile)
#self.notify("New", data={"tile": tile})
tiles.append(row_tiles)
>>> print(tiles)
[[None, None, None, None], [None, None, None, None], [None, None, None, None], [None, None, None, None]]
>>> print(row_tiles)
[None, None, None, None]
|
[
"ljim@uoregon.edu"
] |
ljim@uoregon.edu
|
5e634d96c95aaf800062ab559c679f183064a4e5
|
8356b36e54b04c469104660f7e3a116e3147dc34
|
/top_headlines.py
|
c9b15180bd08ce35beabbf803ba366f97e08c90c
|
[] |
no_license
|
skyyi1126/SI507_HW11
|
d0066cb443d4636d93f599c9b260898b8e40d7b3
|
2e13ea4cb01d9eec55c1d8dee2c87696da01ea14
|
refs/heads/master
| 2022-12-10T00:06:15.882770
| 2018-12-03T03:02:02
| 2018-12-03T03:02:02
| 160,121,919
| 0
| 0
| null | 2022-12-08T01:27:33
| 2018-12-03T02:36:51
|
Python
|
UTF-8
|
Python
| false
| false
| 1,608
|
py
|
from flask import Flask, render_template
import requests
from secrets import api_key
import json
import datetime
app = Flask(__name__)
def get_articles(section):
url = "https://api.nytimes.com/svc/topstories/v2/" + section + ".json"
params = {
"api_key": api_key
}
results = requests.get(url, params).json()["results"]
return results[0:5]
def get_greeting():
t = datetime.datetime.now()
h = t.hour
m = t.minute
if (h>=0 and h<12) or (h==12 and m==0):
return "Good morning"
elif (h<16) or (h==16 and m==0):
return "Good afternoon"
elif (h<20) or (h==20 and m==0):
return "Good evening"
elif h<=24:
return "Good night"
else:
return "What's wrong with your time"
@app.route('/user/<nm>/<section>')
def articles(nm, section):
results = get_articles(section)
text = []
for i in results:
text.append(i["title"] + " (" + i["url"] + ")")
return render_template('user.html', greeting = get_greeting(),
title="Today's top headers in " + section + " are...", my_list=text, name=nm)
@app.route('/user/<nm>')
def articles_tech(nm):
results = get_articles("technology")
text = []
for i in results:
text.append(i["title"] + " (" + i["url"] + ")")
return render_template('user.html', greeting = get_greeting(),
title="Today's top headers in technology are...", my_list=text, name=nm)
@app.route('/')
def index():
return '<h1>Welcome!</h1>'
if __name__ == '__main__':
print('starting Flask app', app.name)
app.run(debug=True)
|
[
"sky971126@163.com"
] |
sky971126@163.com
|
c09e78fbea37df5389c7c187b4ba191210940cd3
|
9569555f262c6cb8701a8e4c56b2c0d3499b068a
|
/main/views.py
|
96058f887f8e186f7e7c9146a3eaaaae3fb50ea6
|
[] |
no_license
|
hanelso/mysitepy
|
6af327d92bbe103794712a20d4a3ffc65e0d5022
|
8e6facdc6baa79b66452b03d6c251d9d5f95e5fd
|
refs/heads/master
| 2020-04-01T13:20:42.330778
| 2018-10-26T07:08:32
| 2018-10-26T07:08:32
| 153,248,563
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 307
|
py
|
from django.db.models import Max
from django.shortcuts import render
# Create your views here.
from user.models import User
def index(request):
print(User.objects.aggregate(id=Max('id'))['id'])
print(type(User.objects.aggregate(id=Max('id'))['id']))
return render(request, 'main/index.html')
|
[
"gjsxj1992@gmail.com"
] |
gjsxj1992@gmail.com
|
6872ae8eb5ec75cbb2419ad2d62cff26fed5eae2
|
1a31dfb66512aa66c407484f2ea8b0fb370669a4
|
/dstt/urls.py
|
ea10b8d0b93dbfa3fb49d2bb5b02982685ef0ae0
|
[] |
no_license
|
nisha-eng/dstt
|
790129f2918e0210421039baba0a4e8c877a7627
|
bab89000242aec3a1a6fb05447ec52b14722809f
|
refs/heads/main
| 2023-03-01T02:50:20.767421
| 2021-02-05T05:23:08
| 2021-02-05T05:23:08
| 336,171,818
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,056
|
py
|
"""dstt URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from django.conf import Settings, settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('account/',include('account.urls')),
path('administration/',include('administration.urls')),
path('employee/',include('employee.urls'))
]+static(settings.STATIC_URL,document_root=settings.MEDIA_ROOT)
|
[
"mdipakpatidar@gmail.com"
] |
mdipakpatidar@gmail.com
|
34f4aaaee0d22405b25fc632c1962fb509840b62
|
8fe200d255cae8bb17caa0f6827bb3d5b034f9e3
|
/TEST/test2.py
|
509f4f9ba2fd4fd4ca8cd5a6e2ce8859d8f80510
|
[] |
no_license
|
WoojungChoi/Python_Algorithm
|
3a3358cfe4108faf8936746966d0b5695be7b11d
|
9766c1900daf0bdd0df32bb253067ccc3cbe98a3
|
refs/heads/master
| 2020-06-11T17:43:25.088592
| 2020-05-25T13:55:38
| 2020-05-25T13:55:38
| 194,038,787
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,812
|
py
|
class node:
def __init__(self, key=None, left=None, right=None):
self.key = key
self.left = left
self.right = right
class Dict:
x = p = node
z = node(key=0, left=0, right=0)
z.left = z
z.right = z
head = node(key=0, left=0, right=z)
def search(self, search_key):
x = self.head.right
while x != self.z:
if x.key == search_key:
return x.key
if x.key > search_key:
x= x.left
print('left ', end='')
else:
x = x.right
print('right ', end='')
return -1
def insert(self, v):
x = p = self.head
while (x != self.z):
p = x
if x.key ==v:
return
if x.key > v:
x = x.left
else:
x = x.right
x = node(key=v, left = self.z, right = self.z)
if p.key > v:
p.left = x
else:
p.right = x
import random, time, sys
N = 8
#key = list(range(1, N+1))
#s_key = list(range(1, N+1))
#random.shuffle(key)
key = [2,1,7,8,6,3,5,4]
#s_key = [8,5,10]
d = Dict()
for i in range(N):
d.insert(key[i])
start_time = time.time()
'''
for i in range(len(s_key)):
print('탐색 키 입력: ', s_key[i])
result = d.search(s_key[i])
if (result == -1) or (result != s_key[i]):
print('\n탐색 실패')
else:
print('\n탐색 성공')
print()
'''
while True:
inputval = int(input())
print('탐색 키 입력 : ', inputval)
if(inputval == 999):
print('프로그램 종료')
break
result = d.search(inputval)
if (result == -1):
print('\n탐색 실패')
else:
print('\n탐색 성공')
print()
|
[
"44879897+WoojungChoi@users.noreply.github.com"
] |
44879897+WoojungChoi@users.noreply.github.com
|
48193ed877c9dc01d8759a30ba0ef88a6bbd6f53
|
daaefa367aad90d6977bb94d9208d7a35606cf26
|
/rent_crawl.py
|
c3989dc0a25688e5a8a4e43e3472b972286d6443
|
[
"MIT"
] |
permissive
|
montypaws/crawl_smth
|
27d93bcdbb8266616f4e35be44251a8cda172158
|
1848dba19417c206a229fa8f53a909f940ad6359
|
refs/heads/master
| 2020-04-05T03:02:54.516377
| 2017-04-06T02:41:25
| 2017-04-06T02:41:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,770
|
py
|
#!/usr/bin/python
# coding:utf-8
from bs4 import BeautifulSoup
import re
import os
import sys
import urllib
import time
import random
import time
#################### 配置开始#################
# 版面配置
# 支持爬多个版面,取消下面的注释即可
# 二手房
# board = 'OurHouse'
# 二手市场主版
# board = 'SecondMarket'
# 租房
boards = ['OurEstate', 'PolicyEstate', 'SchoolEstate', 'RealEstate_review', 'ShangHaiEstate', 'RealEstate', 'Picture']
# 关注关键词文件
keywordsFile = '/home/wwwroot/www.reactshare.cn/rent/keywords.txt'
# 黑名单关键词
blacklistFile = '/home/wwwroot/www.reactshare.cn/rent/blacklist.txt'
# 爬虫结果文件,简易放入 http 服务目录中
outputFile = '/home/wwwroot/www.reactshare.cn/rent/index.html'
# 比对文件,如果没有更新则不通知
lastCopy = '/home/wwwroot/www.reactshare.cn/rent/last.html'
# 结果通知地址, 用于通知爬虫执行完毕,可查看结果
notifyUrl = "http://m.reactshare.cn/rent"
# 最多爬的页数
maxCrawlPage = 3
# 每爬一个页面最少等待多少秒,防止被黑名单
# 外加一个随机等待因子,总计等待baseSleepSec + [0~X] 秒
baseSleepSec = 1
randWaitSec = 2
# 随机等待
################### 配置结束#################
reload(sys)
sys.setdefaultencoding("utf-8")
keywords = []
blacklist = []
matched = []
final = []
def notInBlackList(item) :
for kw in blacklist:
if item.find(kw) >= 0:
return False
return True
for kw in open(keywordsFile).readlines():
keywords.append(kw.strip())
for kw in open(blacklistFile).readlines():
blacklist.append(kw.strip())
for board in boards:
# continue
for page in range(1, maxCrawlPage):
url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page)
data = urllib.urlopen(url).read()
# print data
soup = BeautifulSoup(data, "html5lib")
for a in soup.find_all(href=re.compile("\/article\/" + board)):
item = a.encode('utf-8')
for kw in keywords:
if item.find(kw) >= 0 and notInBlackList(item):
matched.append(item)
time.sleep(baseSleepSec + randWaitSec * random.random())
for item in matched:
if item not in final:
final.append(item)
# 为了避免无聊的人反复顶贴,做一次排序
final.sort()
union=final
# 检查本次爬得得数据是否有更新
if os.path.exists(lastCopy):
last=[]
for item in open(lastCopy).readlines():
last.append(item.strip())
union=list(set(last).union(set(final)))
diff=list(set(union) ^ set(last))
if len(diff) == 0 :
sys.exit(0)
# 保存上次数据
tmp = open(lastCopy, 'w')
tmp.write('\n'.join(union))
tmp.close()
# 输出网页
# 重新按照关键词分类来渲染页面
html = "<html><head><meta charset='UTF-8' /><meta name='viewport' content='width=device-width,user-scalable=yes'><meta name='apple-mobile-web-app-capable' content='yes'><title>水木爬爬</title><base href='http://m.newsmth.net/' /></head><body>"
html += "<style> a:visited {color:gray;} a:active {color:red;} a {color:blue;}</style>"
for kw in keywords:
html += "<h2> %s </h2>" % (kw)
for item in union:
if item.find(kw) >= 0:
html += "%s<br/>" % (item)
html += "<hr />"
for board in boards:
html += "<p><a href='http://m.newsmth.net/board/%s'>%s</a></p>" % (board, board)
html += "<hr />"
html += "<p>%d items updated at %s </p><p><a href='http://m.newsmth.net/'>水木社区</a></p>" % (len(union), time.strftime('%Y-%m-%d %X', time.localtime()))
html += "</body></html>"
output = open(outputFile, 'w')
output.write(html)
output.close()
# notify
data = urllib.urlopen(notifyUrl).read()
|
[
"hustos@qq.com"
] |
hustos@qq.com
|
2f876f6a85661251f0ba85f749269bb1b2e63c24
|
e2efa339cf6fb017e1d1898325b363a69c227409
|
/app.py
|
6367bcdbeda570b322259488161e00e0d12605db
|
[] |
no_license
|
lm10pulkit/update_delete
|
201b22b3816606640ab22a0f63c7bf2d58ed6295
|
c9c935e070f555c006dca00fd0940863fcc0790d
|
refs/heads/master
| 2020-04-15T18:32:53.239716
| 2019-01-09T19:07:40
| 2019-01-09T19:07:40
| 164,915,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,733
|
py
|
from flask import Flask ,session, render_template, request, redirect,url_for,g
from flask_mysqldb import MySQL
from flask_bcrypt import Bcrypt
import os
# intializing the app
app = Flask(__name__)
#secret key
app.secret_key= os.urandom(24)
# setting up database
app.config['MYSQL_HOST']='localhost'
app.config['MYSQL_USER']='root'
app.config['MYSQL_PASSWORD']= ''
app.config['MYSQL_DB']='crud'
mysql = MySQL(app)
#bcrypt for hashing passwords to keep database secure
bcrypt= Bcrypt(app)
@app.route('/',methods=['GET','POST'])
def index():
if request.method=='GET':
return render_template('login.html')
else:
form= request.form
username=form['username']
password=form['password']
if username=='admin' and password=='admin':
session['user']=username
return redirect(url_for('data'))
else:
return redirect(url_for('index'))
@app.route('/list',methods=['GET'])
def data():
if 'user' in session:
cur = mysql.connection.cursor()
resultValue = cur.execute(" select * from employee")
userDetails = cur.fetchall()
return render_template('list.html', employee=userDetails)
else:
return redirect(url_for('index'))
@app.route('/add',methods=['GET','POST'])
def add():
if 'user' in session:
if request.method == 'GET':
return render_template('add.html')
else:
form = request.form
print(form)
firstname = form['firstname']
lastname = form['lastname']
address = form['address']
email = form['email']
contact = form['contact']
argo = [firstname, lastname, address, email, int(contact)]
cur = mysql.connection.cursor()
cur.execute("INSERT INTO employee(firstname,lastname,address,email,contact) values (%s,%s,%s,%s,%s)", argo)
mysql.connection.commit()
cur.close()
return redirect(url_for('data'))
else:
return redirect(url_for('index'))
@app.route('/delete/<id>',methods=['GET'])
def delete(id=None):
if 'user' in session:
query='delete from employee where id = %s'
params=[id]
cur = mysql.connection.cursor()
cur.execute(query,params)
mysql.connection.commit()
cur.close()
return redirect(url_for('data'))
else:
return redirect(url_for('index'))
@app.route('/edit/<id>',methods=['POST','GET'])
def edit(id=None):
if 'user' in session:
if request.method=='POST':
form = request.form
params=[form['firstname'],form['lastname'],form['address'],form['email'],form['contact'],id]
query ='update employee set firstname= %s , lastname = %s , address= %s , email= %s, contact= %s where id = %s '
cur = mysql.connection.cursor()
cur.execute(query, params)
mysql.connection.commit()
cur.close()
return redirect(url_for('data'))
else:
query = 'select * from employee where id = %s'
params=[id]
cur = mysql.connection.cursor()
resultValue=cur.execute(query, params)
if resultValue>0:
userDetails = cur.fetchall()
return render_template('edit.html',user=userDetails[0])
else:
return 'invalid id'
else:
return redirect(url_for('index'))
@app.route('/logout',methods=['GET'])
def logout():
session.pop('user', None)
return redirect(url_for('index'))
if __name__=='__main__':
app.run(debug=True)
|
[
"root@localhost.localdomain"
] |
root@localhost.localdomain
|
4a010a42bfbd615afad1fd018c160396fa4dbd69
|
40f4626ec26f23923c2b19d7ed24f3c512495182
|
/src/kangqi/task/compQA/model/module/cross_attention_indirect.py
|
3606a67410715c49755b69f8e4e28061ab9a5fcc
|
[] |
no_license
|
Zjhao666/CompQA
|
c937c382a2f0a0fce4fdda8efda7c916b3e4c978
|
4bb2abc40428373481909e02543062a7388615bd
|
refs/heads/master
| 2023-02-09T02:28:09.966576
| 2020-12-31T21:18:32
| 2020-12-31T21:18:32
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,278
|
py
|
"""
Author: Kangqi Luo
Goal: Combine the structure of ABCNN-1 and AF-attention
(A Decomposable Attention Model for Natural Language Inference)
We are using the module in compQA scenario, where the rhs (path) is represented by both pwords and preds.
Therefore, we send'em together into the module, making it a little bit more complex than a normal CrossAtt layer.
"""
import tensorflow as tf
from . import att_layer
from kangqi.util.LogUtil import LogInfo
class IndirectCrossAttention:
def __init__(self, lf_max_len, rt_max_len, dim_att_hidden, att_func):
self.lf_max_len = lf_max_len
self.rt_max_len = rt_max_len
self.dim_att_hidden = dim_att_hidden
LogInfo.logs('IndirectCrossAttention: lf_max_len = %d, rt_max_len = %d, dim_att_hidden = %d, att_func = %s.',
lf_max_len, rt_max_len, dim_att_hidden, att_func)
assert att_func in ('dot', 'bilinear', 'bahdanau', 'bdot')
self.att_func = getattr(att_layer, 'cross_att_' + att_func)
def forward(self, lf_input, lf_mask, rt_input, rt_mask):
"""
:param lf_input: (ds, lf_max_len, dim_hidden)
:param lf_mask: (ds, lf_max_len) as float32
:param rt_input: (ds, rt_max_len, dim_hidden)
:param rt_mask: (ds, rt_max_len) as float32
"""
with tf.variable_scope('cross_att_indirect', reuse=tf.AUTO_REUSE):
lf_cube_mask = tf.stack([lf_mask] * self.rt_max_len,
axis=-1, name='lf_cube_mask') # (ds, lf_max_len, rt_max_len)
rt_cube_mask = tf.stack([rt_mask] * self.lf_max_len,
axis=1, name='rt_cube_mask') # (ds, lf_max_len, rt_max_len)
cube_mask = tf.multiply(lf_cube_mask, rt_cube_mask, name='cube_mask')
""" Calculate cross attention matrix """
raw_att_mat = self.att_func(lf_input=lf_input, rt_input=rt_input,
lf_max_len=self.lf_max_len,
rt_max_len=self.rt_max_len,
dim_att_hidden=self.dim_att_hidden)
masked_att_mat = raw_att_mat * cube_mask + tf.float32.min * (1. - cube_mask)
# padding: -inf
""" Attention normalize & produce att_repr """
att_norm_for_lf = tf.nn.softmax(masked_att_mat, dim=2, name='att_norm_for_lf')
att_norm_for_rt = tf.nn.softmax(masked_att_mat, dim=1, name='att_norm_for_rt')
# for_lf: sum_j A[:,j] = 1.
# for_rt: sum_i A[i,:] = 1.
lf_att_repr = tf.matmul(att_norm_for_lf, rt_input, name='lf_att_repr') # (ds, lf_max_len, dim_emb)
rt_att_repr = tf.matmul(tf.transpose(att_norm_for_rt, perm=[0, 2, 1]), # (ds, rt_max_len, lf_max_len)
lf_input, name='rt_att_repr') # (ds, rt_max_len, dim_emb)
return lf_att_repr, rt_att_repr, raw_att_mat
# @staticmethod
# def att_norm_col_wise(att_mat):
# sum_of_cols = 1e-4 + tf.reduce_mean(att_mat, axis=1, name='sum_of_cols') # (ds, rt_max_len)
# sum_of_cols = tf.expand_dims(sum_of_cols, axis=1) # (ds, 1, rt_max_len)
# att_norm = tf.div(att_mat, sum_of_cols, name='att_norm_col_wise')
# # (ds, lf_max_len, rt_max_len), sum(att_norm[:, j]) = 1
# # att_norm[:, j]: the distribution over left words for each word-j at right side
# return att_norm
#
# @staticmethod
# def att_norm_row_wise(att_mat):
# sum_of_rows = 1e-4 + tf.reduce_sum(att_mat, axis=2, name='sum_of_rows') # (ds, lf_max_len)
# sum_of_rows = tf.expand_dims(sum_of_rows, axis=2) # (ds, lf_max_len, 1)
# att_norm = tf.div(att_mat, sum_of_rows, name='att_norm_row_wise')
# # (ds, lf_max_len, rt_max_len), sum(att_norm[i, :]) = 1
# # att_norm[i, :]: the distribution over right words for each word-i at left side
# return att_norm
#
# def construct_att_weights(self, att_mat):
# """
# Parikh: Go through formula (2) in AF-attention paper
# :param att_mat: (ds, q_max_len, p_max_len + pw_max_len)
# :return: 3 attention weights (q, p, pw) and the split attention matrices
# """
# """ Naive v.s. Parikh: just different from the normalizing direction!! """
# p_att_mat, pw_att_mat = tf.split(value=att_mat,
# num_or_size_splits=[self.p_max_len, self.pw_max_len],
# axis=2) # (ds, q_max_len, p_max_len | pw_max_len)
# if self.att_norm_mode == 'parikh':
# att_wt_q = self.att_norm_col_wise(att_mat=att_mat) # (ds, q_max_len, p_max_len+pw_max_len)
# att_wt_p = self.att_norm_row_wise(att_mat=p_att_mat) # (ds, q_max_len, p_max_len)
# att_wt_pw = self.att_norm_row_wise(att_mat=pw_att_mat) # (ds, q_max_len, pw_max_len)
# else: # naive
# att_wt_q = self.att_norm_row_wise(att_mat=att_mat)
# att_wt_p = self.att_norm_col_wise(att_mat=p_att_mat)
# att_wt_pw = self.att_norm_col_wise(att_mat=pw_att_mat)
# return p_att_mat, pw_att_mat, att_wt_q, att_wt_p, att_wt_pw
|
[
"luo.kangqi@qq.com"
] |
luo.kangqi@qq.com
|
69aed622e4876c23b75c4b21c7f103a89fbf8bc9
|
2f081a778580db03fec64e17f7633ce4acfa0cee
|
/model.py
|
fc181f1e919f594d818ebd3d588d303f48397f2b
|
[] |
no_license
|
karaiyaashu/chat-bot
|
23554258f2d36b2478054b4bab490e776c93509d
|
b373b23f99f0cf1c816eb572c2d99de7ec9a4145
|
refs/heads/master
| 2023-03-01T11:36:16.515911
| 2021-01-31T10:55:53
| 2021-01-31T10:55:53
| 334,605,618
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 584
|
py
|
import torch
import torch.nn as nn
class NeuralNet(nn.Module):
def __init__(self, input_size, hidden_size, num_classes):
super(NeuralNet, self).__init__()
self.l1 = nn.Linear(input_size, hidden_size)
self.l2 = nn.Linear(hidden_size, hidden_size)
self.l3 = nn.Linear(hidden_size, num_classes)
self.relu = nn.ReLU()
def forward(self, x):
out = self.l1(x)
out = self.relu(out)
out = self.l2(out)
out = self.relu(out)
out = self.l3(out)
# no activation and no softmax
return out
|
[
"karaiyaashutosh@gmail.com"
] |
karaiyaashutosh@gmail.com
|
0e8bd19dbae95853ce77bbf96277626db3436873
|
199ad938261bb0549e43ec43c92ebdf0c915071f
|
/variation6/tests/test_distance.py
|
6af7cc6d0d63458100e9eed882e24f1b805b5b0a
|
[] |
no_license
|
pziarsolo/variation6
|
71492eb8f9a7e4335d0fb170064e131a40c9b6fe
|
f07b23743beea3349fd5b7d4b9f4f26da8a17713
|
refs/heads/master
| 2020-08-29T15:44:03.909890
| 2020-06-12T09:35:21
| 2020-06-12T09:35:21
| 218,078,637
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,357
|
py
|
import unittest
import math
import numpy as np
import dask.array as da
import variation6.array as va
from variation6.stats.distance import (calc_kosman_dist, _kosman,
calc_pop_pairwise_unbiased_nei_dists,
calc_dset_pop_distance)
from variation6.variations import Variations
from variation6 import GT_FIELD, FLT_VARS, DP_FIELD
from variation6.filters import keep_samples
from variation6.compute import compute
class PairwiseFilterTest(unittest.TestCase):
def test_kosman_2_indis(self):
a = np.array([[-1, -1], [0, 0], [0, 1], [0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0], [0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((a, b), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
snp_by_snp_compartion_array = _kosman(vars1, vars2)
distance_ab = compute(snp_by_snp_compartion_array,
silence_runtime_warnings=True)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 1 / 3
c = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
d = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
gts = np.stack((c, d), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
snp_by_snp_compartion_array = _kosman(vars1, vars2)
distance_ab = compute(snp_by_snp_compartion_array,
silence_runtime_warnings=True)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 0
variations = Variations()
gts = np.stack((b, d), axis=1)
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
snp_by_snp_compartion_array = _kosman(vars1, vars2)
distance_ab = compute(snp_by_snp_compartion_array,
silence_runtime_warnings=True)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 0.45
def test_kosman_2_indis_in_memory(self):
a = np.array([[-1, -1], [0, 0], [0, 1], [0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0], [0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((a, b), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
distance_ab = _kosman(vars1, vars2)
va.make_sure_array_is_in_memory(distance_ab)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 1 / 3
c = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
d = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
gts = np.stack((c, d), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
distance_ab = _kosman(vars1, vars2)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 0
variations = Variations()
gts = np.stack((b, d), axis=1)
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
distance_ab = _kosman(vars1, vars2)
distance = distance_ab.sum() / distance_ab.shape[0]
assert distance == 0.45
def test_kosman_missing(self):
a = np.array([[-1, -1], [0, 0], [0, 1], [0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0], [0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((a, b), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
snp_by_snp_compartion_array = _kosman(vars1, vars2)
distance_ab = compute(snp_by_snp_compartion_array,
silence_runtime_warnings=True)
c = np.array([[-1, -1], [-1, -1], [0, 1],
[0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
d = np.array([[-1, -1], [-1, -1], [0, 0],
[0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((c, d), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
snp_by_snp_compartion_array = _kosman(vars1, vars2)
distance_cd = compute(snp_by_snp_compartion_array,
silence_runtime_warnings=True)
assert np.all(distance_ab == distance_cd)
def test_kosman_missing_in_memory(self):
a = np.array([[-1, -1], [0, 0], [0, 1], [0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0], [0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((a, b), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
distance_ab = _kosman(vars1, vars2)
c = np.array([[-1, -1], [-1, -1], [0, 1],
[0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
d = np.array([[-1, -1], [-1, -1], [0, 0],
[0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 1]])
gts = np.stack((c, d), axis=1)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
vars1 = keep_samples(variations, ['0'])[FLT_VARS]
vars2 = keep_samples(variations, ['1'])[FLT_VARS]
distance_cd = _kosman(vars1, vars2)
assert np.all(distance_ab == distance_cd)
def test_kosman_pairwise(self):
a = np.array([[-1, -1], [0, 0], [0, 1],
[0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0],
[0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 2]])
c = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
d = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
gts = np.stack((a, b, c, d), axis=0)
gts = np.transpose(gts, axes=(1, 0, 2)).astype(np.int16)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = da.from_array(samples)
variations[GT_FIELD] = da.from_array(gts)
distances, samples = calc_kosman_dist(variations,
silence_runtime_warning=True)
expected = [0.33333333, 0.75, 0.75, 0.5, 0.5, 0.]
assert np.allclose(distances, expected)
def test_kosman_pairwise_in_memory(self):
a = np.array([[-1, -1], [0, 0], [0, 1],
[0, 0], [0, 0], [0, 1], [0, 1],
[0, 1], [0, 0], [0, 0], [0, 1]])
b = np.array([[1, 1], [-1, -1], [0, 0],
[0, 0], [1, 1], [0, 1], [1, 0],
[1, 0], [1, 0], [0, 1], [1, 2]])
c = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
d = np.full(shape=(11, 2), fill_value=1, dtype=np.int16)
gts = np.stack((a, b, c, d), axis=0)
gts = np.transpose(gts, axes=(1, 0, 2)).astype(np.int16)
variations = Variations()
samples = np.array([str(i) for i in range(gts.shape[1])])
variations.samples = samples
variations[GT_FIELD] = gts
distances, samples = calc_kosman_dist(variations)
expected = [0.33333333, 0.75, 0.75, 0.5, 0.5, 0.]
assert np.allclose(distances, expected)
class NeiUnbiasedDistTest(unittest.TestCase):
def test_nei_dist(self):
gts = np.array([[[1, 1], [5, 2], [2, 2], [3, 2]],
[[1, 1], [1, 2], [2, 2], [2, 1]],
[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = da.from_array(np.array([1, 2, 3, 4]))
variations[GT_FIELD] = da.from_array(gts)
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=6,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isclose(dists[0], 0.3726315908494797)
# all missing
gts = np.array([[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = da.from_array(np.array([1, 2, 3, 4]))
variations[GT_FIELD] = da.from_array(gts)
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=1,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isnan(dists[0])
# min_num_genotypes
gts = np.array([[[1, 1], [5, 2], [2, 2], [3, 2]],
[[1, 1], [1, 2], [2, 2], [2, 1]],
[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = da.from_array(np.array([1, 2, 3, 4]))
variations[GT_FIELD] = da.from_array(gts)
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=6,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isclose(dists[0], 0.3726315908494797)
def test_nei_dist_in_memory(self):
gts = np.array([[[1, 1], [5, 2], [2, 2], [3, 2]],
[[1, 1], [1, 2], [2, 2], [2, 1]],
[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = np.array([1, 2, 3, 4])
variations[GT_FIELD] = gts
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=6,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isclose(dists[0], 0.3726315908494797)
# all missing
gts = np.array([[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = np.array([1, 2, 3, 4])
variations[GT_FIELD] = gts
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=1,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isnan(dists[0])
# min_num_genotypes
gts = np.array([[[1, 1], [5, 2], [2, 2], [3, 2]],
[[1, 1], [1, 2], [2, 2], [2, 1]],
[[-1, -1], [-1, -1], [-1, -1], [-1, -1]]])
variations = Variations()
variations.samples = np.array([1, 2, 3, 4])
variations[GT_FIELD] = gts
pops = [[1, 2], [3, 4]]
dists = calc_pop_pairwise_unbiased_nei_dists(variations,
max_alleles=6,
populations=pops,
silence_runtime_warnings=True,
min_num_genotypes=1)
assert math.isclose(dists[0], 0.3726315908494797)
class DsetDistTest(unittest.TestCase):
def test_dest_jost_distance(self):
gts = [[(1, 1), (1, 3), (1, 2), (1, 4), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[(1, 3), (1, 1), (1, 1), (1, 3), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)]]
samples = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
pops = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10, 11]]
dps = [[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20],
[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20]]
variations = Variations()
variations.samples = da.from_array(np.array(samples))
variations[GT_FIELD] = da.from_array(np.array(gts))
variations[DP_FIELD] = da.from_array(np.array(dps))
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=0)
assert np.allclose(dists, [0.65490196])
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=6)
assert np.all(np.isnan(dists))
def test_dest_jost_distance_in_memory(self):
gts = [[(1, 1), (1, 3), (1, 2), (1, 4), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[(1, 3), (1, 1), (1, 1), (1, 3), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)]]
samples = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
pops = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10, 11]]
dps = [[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20],
[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20]]
variations = Variations()
variations.samples = np.array(samples)
variations[GT_FIELD] = np.array(gts)
variations[DP_FIELD] = np.array(dps)
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=0)
assert np.allclose(dists, [0.65490196])
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=6)
assert np.all(np.isnan(dists))
def test_empty_pop(self):
missing = (-1, -1)
gts = [[(1, 1), (1, 3), (1, 2), (1, 4), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[(1, 3), (1, 1), (1, 1), (1, 3), (3, 3), (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[missing, missing, missing, missing, missing, (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
]
dps = [[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 0],
[20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 0],
[0, 0, 0, 0, 0, 20, 20, 20, 20, 20, 0]]
samples = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
pops = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10, 11]]
variations = Variations()
variations.samples = da.from_array(np.array(samples))
variations[GT_FIELD] = da.from_array(np.array(gts))
variations[DP_FIELD] = da.from_array(np.array(dps))
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=0)
assert np.allclose(dists, [0.65490196])
gts = [[missing, missing, missing, missing, missing, (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[missing, missing, missing, missing, missing, (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
[missing, missing, missing, missing, missing, (3, 2), (3, 4), (2, 2), (2, 4), (4, 4), (-1, -1)],
]
dps = [[0, 0, 0, 0, 0, 20, 20, 20, 20, 20, 0],
[0, 0, 0, 0, 0, 20, 20, 20, 20, 20, 0],
[0, 0, 0, 0, 0, 20, 20, 20, 20, 20, 0]]
samples = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
pops = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10, 11]]
variations = Variations()
variations.samples = da.from_array(np.array(samples))
variations[GT_FIELD] = da.from_array(np.array(gts))
variations[DP_FIELD] = da.from_array(np.array(dps))
dists = calc_dset_pop_distance(variations, max_alleles=5,
silence_runtime_warnings=True,
populations=pops, min_num_genotypes=0)
assert np.isnan(dists[0])
if __name__ == '__main__':
# import sys; sys.argv = ['.', 'DsetDistTest.test_empty_pop']
unittest.main()
|
[
"pziarsolo@upv.es"
] |
pziarsolo@upv.es
|
1b51d266eac14d71dc95534f42206ded8f0809a1
|
59d03a204de30d6d2160ae8b3346f852b7bd5fce
|
/meiduo_mall/meiduo_mall/utils/exceptions.py
|
be9036e00b9ce684272ae64bdf3e1ed2e29ed1d4
|
[] |
no_license
|
eninem123/meiduo_mall
|
40d8b67f95ef98d6b3e9810a9fdbd7ef87dd323e
|
4d6d0b526e026bf31515abb546c72d7ed48495c6
|
refs/heads/master
| 2020-04-06T10:40:19.614955
| 2018-11-13T14:46:19
| 2018-11-13T14:46:19
| 157,387,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,337
|
py
|
"""
由于一些数据库的异常django不能捕获,要自己捕获
做个异常处理的方法
"""
from rest_framework.views import exception_handler as drf_exception_handler
import logging
from django.db import DatabaseError
from redis.exceptions import RedisError
from rest_framework.response import Response
from rest_framework import status
# 获取在配置文件中定义的logger,用来记录日志 这里的django就是setting日志器命名的
logger = logging.getLogger('django')
def exception_handler(exc, context):
"""
自定义异常处理
:param exc: 异常
:param context: 抛出异常的上下文
:return: Response响应对象
"""
# 调用drf框架原生的异常处理方法
response = drf_exception_handler(exc, context)
# 没有响应要么就是没报错,要么就是报错没法识别,没法识别如果是数据库错误就返回
if response is None:
# 视图上下文
view = context['view']
# 捕获数据库的异常,如果exc是数据库异常
if isinstance(exc, DatabaseError) or isinstance(exc, RedisError):
# 数据库异常
logger.error('[%s] %s' % (view, exc))
response = Response({'message': '服务器内部错误'}, status=status.HTTP_507_INSUFFICIENT_STORAGE)
return response
|
[
"2442972114@qq.com"
] |
2442972114@qq.com
|
38330c2113ca389eab763c88370aae6d7a5fbc3e
|
26a36f20246724bf9be0f2050e1185e1e647014a
|
/python/mirror.py
|
b155d5db5db0d83f065936ca4be55bc129b31f2c
|
[] |
no_license
|
lddsjy/leetcode
|
b80b108637ea65220516b4ffb565bdd6fee7b008
|
5fee1609d3610fdc1ddfdaf998cd1eade6ddc14e
|
refs/heads/master
| 2021-07-05T01:33:02.762558
| 2021-01-24T08:05:45
| 2021-01-24T08:05:45
| 218,760,249
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 387
|
py
|
# -*- coding:utf-8 -*-
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# 返回镜像树的根节点
def Mirror(self, root):
if not root:
return None
root.left, root.right = root.right, root.left
self.Mirror(root.left)
self.Mirror(root.right)
|
[
"710910630@qq.com"
] |
710910630@qq.com
|
924fb21f59270b7ce97c1bb05d536a81999db58b
|
b0f287876ac885504c8d52b343edd56e58ee147d
|
/reference.py
|
197777c73e663bc6d72e60c5fe2234c9964337f6
|
[] |
no_license
|
Swoorup/idapylibs
|
a751b622748a58a056a0ea2b36f451d5abb3c68c
|
0f10241c99ec326a45b9d35a411f66da4a1f2521
|
refs/heads/master
| 2016-09-05T18:06:57.068118
| 2015-09-14T04:12:56
| 2015-09-14T04:12:56
| 10,644,433
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 121
|
py
|
import find
class Filter(object);
def __init__(self):
def debugprint(self, str):
print("FILTER: " + str)
def
|
[
"satanic_dementia143@yahoo.com"
] |
satanic_dementia143@yahoo.com
|
2132ca489839efb59eecac3da30efd56457831e6
|
18eac94ff076c1eecd72870ef93ae656906e8673
|
/supervised_learning/0x06-keras/13-predict.py
|
e2426676d174b5ff52b5fd6940693d363bda35a2
|
[] |
no_license
|
dgquintero/holbertonschool-machine_learning
|
c1331ff87e053f9c143a0e503e8db177dfc7aafe
|
c80073d0ef68deeedbe2d991e296ef75f58a220f
|
refs/heads/master
| 2022-12-19T21:49:10.581793
| 2020-10-15T14:56:22
| 2020-10-15T14:56:22
| 279,329,167
| 0
| 1
| null | 2020-09-25T19:11:52
| 2020-07-13T14:42:03
|
Python
|
UTF-8
|
Python
| false
| false
| 574
|
py
|
#!/usr/bin/env python3
"""function predict"""
import tensorflow.keras as K
def predict(network, data, verbose=False):
"""
function that tests a neural network
Arguments:
network: the network model to test
data: the input data to test the model with
labels: are the correct one-hot labels of data
verbose: is a boolean that determines if output
should be printed during the testing process
Returns: the prediction for the data
"""
prediction = network.predict(data, verbose=verbose)
return prediction
|
[
"dgquintero02@hotmail.com"
] |
dgquintero02@hotmail.com
|
6276ed8fbaf501f6fe6c7314d1eee780a50c0c89
|
270d7f88e47683abd55c0191466c80513b2aa9f9
|
/tests/test_tta.py
|
9d0ba17296509b58febeed4a4f4c0b193716299d
|
[
"MIT"
] |
permissive
|
williamberrios/pytorch-toolbelt
|
abdf8e455a4ffc79d2afbc92e80005a821fb97a9
|
4a24e6324b8270d31c08b8b2f667d740b9823377
|
refs/heads/master
| 2023-07-06T06:35:24.197821
| 2021-08-12T07:47:20
| 2021-08-12T07:47:20
| 400,866,088
| 1
| 0
|
MIT
| 2021-08-28T18:43:12
| 2021-08-28T18:43:12
| null |
UTF-8
|
Python
| false
| false
| 4,994
|
py
|
from collections import defaultdict
import cv2
import torch
import numpy as np
import pytest
from torch import nn
from pytorch_toolbelt.inference import tta
from pytorch_toolbelt.modules import GlobalAvgPool2d
from pytorch_toolbelt.utils.torch_utils import to_numpy
from pytorch_toolbelt.zoo import resnet34_unet64_s4
skip_if_no_cuda = pytest.mark.skipif(not torch.cuda.is_available(), reason="CUDA is not available")
class NoOp(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input):
return input
class SumAll(nn.Module):
def __init__(self):
super().__init__()
def forward(self, input):
return input.sum(dim=[1, 2, 3])
def test_d4_image2mask():
x = torch.rand((4, 3, 224, 224))
model = NoOp()
output = tta.d4_image2mask(model, x)
np.testing.assert_allclose(to_numpy(output), to_numpy(x), atol=1e-6, rtol=1e-6)
def test_d4_image2mask_v2():
x = torch.rand((4, 3, 224, 224))
x_a = tta.d4_image_augment(x)
y = tta.d4_image_deaugment(x_a)
np.testing.assert_allclose(to_numpy(y), to_numpy(x), atol=1e-6, rtol=1e-6)
@torch.no_grad()
@skip_if_no_cuda()
def test_d4_speed():
df = defaultdict(list)
n = 100
model = resnet34_unet64_s4().cuda().eval()
x = torch.rand((4, 3, 224, 224)).float().cuda()
y1 = tta.d4_image2mask(model, x)
y2 = tta.d4_image_deaugment(model(tta.d4_image_augment(x)))
np.testing.assert_allclose(to_numpy(y1), to_numpy(y2), atol=1e-6, rtol=1e-6)
for deterministic in [False, True]:
for benchmark in [False, True]:
for dtype in [torch.float16, torch.float32]:
torch.cuda.empty_cache()
torch.backends.cuda.deterministic = deterministic
torch.backends.cuda.benchmark = benchmark
model = resnet34_unet64_s4().to(dtype).cuda().eval()
speed_v1 = 0
speed_v2 = 0
for i in range(n):
x = torch.rand((4, 3, 224, 224)).to(dtype).cuda(non_blocking=False)
start = cv2.getTickCount()
y = tta.d4_image2mask(model, x)
v = y.sum().item()
finish = cv2.getTickCount()
speed_v1 += finish - start
np.testing.assert_allclose(v, v, atol=1e-6, rtol=1e-6)
for i in range(n):
x = torch.rand((4, 3, 224, 224)).to(dtype).cuda(non_blocking=False)
start = cv2.getTickCount()
x_a = tta.d4_image_augment(x)
x_a = model(x_a)
y = tta.d4_image_deaugment(x_a)
v = y.sum().item()
finish = cv2.getTickCount()
speed_v2 += finish - start
np.testing.assert_allclose(v, v, atol=1e-6, rtol=1e-6)
df["mode"].append("fp16" if dtype == torch.float16 else "fp32")
df["deterministic"].append(deterministic)
df["benchmark"].append(benchmark)
df["d4_image2mask (ms)"].append(1000.0 * speed_v1 / (cv2.getTickFrequency() * n))
df["d4_augment (ms)"].append(1000.0 * speed_v2 / (cv2.getTickFrequency() * n))
import pandas as pd
df = pd.DataFrame.from_dict(df)
pd.set_option("display.max_columns", None)
pd.set_option("display.max_rows", None)
print(df)
df.to_csv("tta_eval.csv", index=False)
def test_fliplr_image2mask():
x = torch.rand((4, 3, 224, 224))
model = NoOp()
output = tta.fliplr_image2mask(model, x)
np.testing.assert_allclose(to_numpy(output), to_numpy(x), atol=1e-6, rtol=1e-6)
def test_d4_image2label():
x = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.d4_image2label(model, x)
expected = int(x.sum())
assert int(output) == expected
def test_fliplr_image2label():
x = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.fliplr_image2label(model, x)
expected = int(x.sum())
assert int(output) == expected
def test_fivecrop_image2label():
x = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.fivecrop_image2label(model, x, (2, 2))
expected = ((1 + 2 + 5 + 6) + (3 + 4 + 7 + 8) + (9 + 0 + 3 + 4) + (1 + 2 + 5 + 6) + (6 + 7 + 0 + 1)) / 5
assert int(output) == expected
def test_tencrop_image2label():
x = torch.tensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 0, 1, 2], [3, 4, 5, 6]]).unsqueeze(0).unsqueeze(0).float()
model = SumAll()
output = tta.tencrop_image2label(model, x, (2, 2))
expected = (2 * ((1 + 2 + 5 + 6) + (3 + 4 + 7 + 8) + (9 + 0 + 3 + 4) + (1 + 2 + 5 + 6) + (6 + 7 + 0 + 1))) / 10
assert int(output) == expected
|
[
"ekhvedchenya@gmail.com"
] |
ekhvedchenya@gmail.com
|
abee20e74748f84b81263b5a7dca482647bdac3d
|
acb8e84e3b9c987fcab341f799f41d5a5ec4d587
|
/langs/9/v4g.py
|
947f100ab07df7782ca283e84a091c434fd16de6
|
[] |
no_license
|
G4te-Keep3r/HowdyHackers
|
46bfad63eafe5ac515da363e1c75fa6f4b9bca32
|
fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2
|
refs/heads/master
| 2020-08-01T12:08:10.782018
| 2016-11-13T20:45:50
| 2016-11-13T20:45:50
| 73,624,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 486
|
py
|
import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'v4G':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1])
|
[
"juliettaylorswift@gmail.com"
] |
juliettaylorswift@gmail.com
|
6ca39e346afe1a24d2e9392a806a89b65aac8f46
|
5e04722e7b1e259065af53db329ef49517f78212
|
/blog/migrations/0008_auto_20180427_1739.py
|
f38793c3e22f7cf5a01ca59476ef2db1bca1b4b5
|
[] |
no_license
|
luckyqiang123/gitmyblog
|
d4c52dadec2fb542e7b4eaafb424034004d06a9e
|
b51b5bbfbd7013169e22ff1d25908fcf845b90b1
|
refs/heads/master
| 2020-03-12T05:32:11.667077
| 2018-06-19T11:07:17
| 2018-06-19T11:07:17
| 130,465,467
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 710
|
py
|
# Generated by Django 2.0 on 2018-04-27 09:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('blog', '0007_auto_20180306_2107'),
]
operations = [
migrations.AlterField(
model_name='blog',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='blog',
name='blog_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.BlogType'),
),
]
|
[
"zhangzhiqiang@zhangzhiqiang-X450VB.lan"
] |
zhangzhiqiang@zhangzhiqiang-X450VB.lan
|
d1b636ed6f52579d24b77748566ee0b2b1321c0c
|
2fae7d7ba6b2a4590fd4d90b1a491e919ec744de
|
/myPy/day04/day04-exam03-최소값찾기.py
|
3c02cc8ee23cca83d662d69c45d87e0a1108605f
|
[] |
no_license
|
sw-Jack/Python
|
0952ded606f79c07c77a021b075e09b1e95caca4
|
c139301bf3a98f9b5289807ca8141febb947603b
|
refs/heads/master
| 2023-02-02T11:06:18.264799
| 2020-12-21T13:11:32
| 2020-12-21T13:11:32
| 290,997,124
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
py
|
# 최소값 구하는 프로그램
def min(lst) :
a = len(lst)
minVal = lst[a-1]
for i in range(1,a) :
if lst[i] < minVal :
minVal = lst[i]
return minVal
exlst = [10,43,46,2,100,12,9,78]
print(min(exlst))
|
[
"jacksuwon@gmail.com"
] |
jacksuwon@gmail.com
|
32508c5eceee6c55b16c8bf3f92d09b0b222487b
|
4c9d97989a2b716e4b3e942c0453ea2b0473c61a
|
/tests/test_utils.py
|
bdd51d4d3df44c91f96fff3a2875dc65909c509d
|
[
"BSD-2-Clause"
] |
permissive
|
andvikt/mqtt_decorator
|
23418a8dae2844bed840f8f5fe26e4b3222d37e5
|
36bf76d3d6fa548172db32cdf002d68c96c7a535
|
refs/heads/master
| 2020-05-24T16:18:28.481000
| 2019-06-24T20:56:18
| 2019-06-24T20:56:18
| 187,354,204
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,696
|
py
|
from logging import basicConfig, DEBUG
basicConfig(level=DEBUG)
import smarthome.rules
from smarthome import utils, const
import pytest
import asyncio
import typing
from dataclasses import dataclass, field
def test_proxy():
from smarthome.utils.proxy import LambdaProxy, proxy
@dataclass()
class TestObj:
value: int = 1
another_value: int = 4
some_lambda: typing.Callable = field(default_factory=lambda: lambda: 20, compare=False)
def property(self):
return self.value + 2
def __add__(self, other):
return typing.cast(
self.__class__
, LambdaProxy(self, value = lambda x: x + other)
)
state = TestObj()
state2 = TestObj()
assert state == state2
check1 = TestObj(value=4, another_value=9)
mut = proxy(state
, value=lambda x: x * 2
, another_value = 9
, some_lambda = lambda x: lambda : x() / 2
)
state.value += 1
assert mut == check1
assert ((state + 5) + 1).value == 8
assert state.value == 2
assert mut.value == 4
assert state.some_lambda() == 20
assert mut.some_lambda() == 10
assert state.property() == 4
assert mut.property() == 6
assert mut.another_value == 9
assert isinstance(mut, TestObj)
assert isinstance(mut, LambdaProxy)
mut_on_mut = LambdaProxy(mut, value=lambda x: x * 10)
assert mut_on_mut.value == 40
assert mut_on_mut.property() == 42
test_dict = {'hello': mut}
prox_dict = LambdaProxy(test_dict)
assert prox_dict['hello'] is mut
@pytest.mark.asyncio
async def test_states():
from smarthome import State, rule
from asyncio_primitives import utils as async_utils
st1 = State(False, _str='state1')
st2 = State(False, _str='state2')
hitcnt = 0
@rule(st1 == st2)
def hello():
nonlocal hitcnt
hitcnt+=1
task = await hello()
await st1.change(True)
await st2.change(True)
await st1.change(False)
await st2.change(False)
with async_utils.ignoreerror(asyncio.CancelledError):
task.cancel()
await task
assert hitcnt == 2
@pytest.mark.asyncio
async def test_complex_rule():
from smarthome import State, rule
from asyncio_primitives import utils as async_utils
st1 = State(0)
st2 = State(0)
hitcnt = 0
@rule((st1 > st2) | (st2 < 2))
async def comp_rule():
nonlocal hitcnt
hitcnt+=1
const.logger.debug(f'new hit {hitcnt}')
task = await comp_rule()
await st2.change(1)
await st2.change(2)
await st1.change(3)
assert hitcnt == 2
with async_utils.ignoreerror(asyncio.CancelledError):
task.cancel()
await task
@rule(((st1 + st2) == 2) | ((st2 - 1) == 5))
async def add_rule():
nonlocal hitcnt
hitcnt += 1
const.logger.debug(f'new hit {hitcnt}')
task = await add_rule()
await st1.change(1)
await st2.change(1)
await st2.change(6)
assert hitcnt == 4
with async_utils.ignoreerror(asyncio.CancelledError):
task.cancel()
await task
@pytest.mark.asyncio
async def test_max_count():
counts = []
@smarthome.rules.counter(max_count=3)
async def hello(cnt):
counts.append(cnt)
await asyncio.gather(*[hello() for x in range(6)])
assert counts == [0,1,2,0,1,2]
@smarthome.rules.counter(max_wait=0.3)
async def fast_count(cnt):
counts.append(cnt)
counts.clear()
await fast_count()
await asyncio.sleep(0.25)
await fast_count()
await asyncio.sleep(0.25)
await fast_count()
await asyncio.sleep(0.35)
await fast_count()
assert counts == [0,1,2,0]
@pytest.mark.asyncio
async def test_timeshed():
from smarthome import rule, utils
from datetime import timedelta
started = utils.CustomTime.now()
ended: utils.CustomTime = None
@rule(utils.TimeTracker.now() + timedelta(seconds=0.5))
async def hey():
nonlocal ended
ended = utils.CustomTime.now()
await hey()
await asyncio.sleep(1.2)
assert round((ended - started).total_seconds()) == 1
@pytest.mark.asyncio
async def test_timeshed_multi():
from smarthome import rule, utils
from datetime import timedelta
hitcnt = 0
@utils.TimeTracker.repeat(time_interval=timedelta(seconds=0.3))
async def hey(x, y):
nonlocal hitcnt
hitcnt+=1
assert x == 1, y==2
task = await hey(1, y=2)
await asyncio.sleep(1)
assert hitcnt == 3
with pytest.raises(asyncio.CancelledError):
task.cancel()
await task
|
[
"andvikt@gmail.com"
] |
andvikt@gmail.com
|
7cd68e68d92dd9238eb4990c45759f3df352627e
|
960c564f5084643786dfcf85e5f1004677c080e1
|
/models/__init__.py
|
cda91e5d3e129e4bac32881fd524e29eb78a7365
|
[] |
no_license
|
1204136013/web
|
cdc8fbe0039aff049213a379d0ca2bdd9166d662
|
570d242e8649dfbdca91995c8af9db99e77b1686
|
refs/heads/master
| 2020-08-22T21:10:54.929257
| 2019-10-21T05:40:05
| 2019-10-21T05:40:05
| 216,478,048
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,521
|
py
|
import json
import time
from utils import log
def save(data, path):
"""
data 是 dict 或者 list
path 是保存文件的路径
"""
s = json.dumps(data, indent=2, ensure_ascii=False)
with open(path, 'w+', encoding='utf-8') as f:
# log('save', path, s, data)
f.write(s)
def load(path):
with open(path, 'r', encoding='utf-8') as f:
s = f.read()
# log('load', s)
return json.loads(s)
# Model 是一个 ORM(object relation mapper)
# 好处就是不需要关心存储数据的细节,直接使用即可
class Model(object):
"""
Model 是所有 model 的基类
@classmethod 是一个套路用法
例如
user = User()
user.db_path() 返回 User.txt
"""
@classmethod
def db_path(cls):
"""
cls 是类名, 谁调用的类名就是谁的
classmethod 有一个参数是 class(这里我们用 cls 这个名字)
所以我们可以得到 class 的名字
"""
classname = cls.__name__
path = 'data/{}.txt'.format(classname)
return path
@classmethod
def _new_from_dict(cls, d):
# 因为子元素的 __init__ 需要一个 form 参数
# 所以这个给一个空字典
m = cls({})
for k, v in d.items():
# setattr 是一个特殊的函数
# 假设 k v 分别是 'name' '422'
# 它相当于 m.name = '422'
setattr(m, k, v)
return m
@classmethod
def new(cls, form, **kwargs):
m = cls(form)
# 额外地设置 m 的属性
for k, v in kwargs.items():
# 这是一个神奇的函数, 可以设置对象的属性
setattr(m, k, v)
m.save()
return m
@classmethod
def all(cls):
"""
all 方法(类里面的函数叫方法)使用 load 函数得到所有的 models
"""
path = cls.db_path()
models = load(path)
# 这里用了列表推导生成一个包含所有 实例 的 list
# 因为这里是从 存储的数据文件 中加载所有的数据
# 所以用 _new_from_dict 这个特殊的函数来初始化一个数据
ms = [cls._new_from_dict(m) for m in models]
return ms
@classmethod
def find_all(cls, **kwargs):
ms = []
log('kwargs, ', kwargs, type(kwargs))
k, v = '', ''
for key, value in kwargs.items():
k, v = key, value
all = cls.all()
for m in all:
# 也可以用 getattr(m, k) 取值
if v == m.__dict__[k]:
ms.append(m)
return ms
@classmethod
def find_by(cls, **kwargs):
"""
用法如下,kwargs 是只有一个元素的 dict
u = User.find_by(username='422')
"""
log('kwargs, ', kwargs, type(kwargs))
k, v = '', ''
for key, value in kwargs.items():
k, v = key, value
all = cls.all()
for m in all:
# 也可以用 getattr(m, k) 取值
if v == m.__dict__[k]:
return m
return None
@classmethod
def find(cls, id):
return cls.find_by(id=id)
@classmethod
def get(cls, id):
return cls.find_by(id=id)
@classmethod
def delete(cls, id):
models = cls.all()
index = -1
for i, e in enumerate(models):
if e.id == id:
index = i
break
# 判断是否找到了这个 id 的数据
if index == -1:
# 没找到
pass
else:
obj = models.pop(index)
l = [m.__dict__ for m in models]
path = cls.db_path()
save(l, path)
# 返回被删除的元素
return obj
def __repr__(self):
"""
__repr__ 是一个魔法方法
简单来说, 它的作用是得到类的 字符串表达 形式
比如 print(u) 实际上是 print(u.__repr__())
"""
classname = self.__class__.__name__
properties = ['{}: ({})'.format(k, v) for k, v in self.__dict__.items()]
s = '\n'.join(properties)
return '< {}\n{} \n>\n'.format(classname, s)
def json(self):
"""
返回当前 model 的字典表示
"""
# copy 会复制一份新数据并返回
d = self.__dict__.copy()
return d
def save(self):
"""
用 all 方法读取文件中的所有 model 并生成一个 list
把 self 添加进去并且保存进文件
"""
# log('debug save')
models = self.all()
# log('models', models)
# 如果没有 id,说明是新添加的元素
if self.id is None:
# 设置 self.id
# 先看看是否是空 list
if len(models) == 0:
# 我们让第一个元素的 id 为 1(当然也可以为 0)
self.id = 1
else:
m = models[-1]
# log('m', m)
self.id = m.id + 1
models.append(self)
else:
# index = self.find(self.id)
index = -1
for i, m in enumerate(models):
if m.id == self.id:
index = i
break
log('debug', index)
models[index] = self
l = [m.__dict__ for m in models]
path = self.db_path()
save(l, path)
|
[
"1204136013@qq.com"
] |
1204136013@qq.com
|
d8f3167c34525042bfc9833d02d8d53673ff7978
|
79aa4b99a48bb16a907916ad63c902443420541a
|
/0019.py
|
e1253c96f9a4ecabdca22315f7ecd7d39377a98c
|
[] |
no_license
|
mach8686devops/leetcode-100
|
62dec66c719d7cfa120ca9505701df49d8d5b982
|
f90526c9b073165b86b933cdf7d1dc496e68f2c6
|
refs/heads/main
| 2023-04-11T06:28:15.059587
| 2021-04-13T12:11:54
| 2021-04-13T12:11:54
| 329,346,572
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 375
|
py
|
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
l = []
while head: l, head = l + [head], head.next
if n != len(l): l[-n - 1].next = l[-n].next
del l[-n]
return l and l[0]
|
[
"zhangjohn202@gmail.com"
] |
zhangjohn202@gmail.com
|
e849c39bca34e650268bdd2541dc18a59e06f6a8
|
739d713d21528853c879ed404787272d8a7eca12
|
/launch/Loggings.py
|
f8b47faefd0c33cb3709aeeb2b2af2a1547afd4b
|
[] |
no_license
|
primus2019/Ressistant
|
1dc0e4a7981d3bf9b36561f7be82d8cd9e0d7177
|
cca4fe6b688caf82ec94a9c806d95c00553a79a5
|
refs/heads/master
| 2020-06-16T11:49:46.684401
| 2019-07-29T07:53:28
| 2019-07-29T07:53:28
| 195,562,134
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 919
|
py
|
import sys
sys.path.append('.')
import os
def ReadLog(listNo, ENList):
fileName = 'log/List_' + (str)(listNo) + '.log'
if not os.path.isdir('log'):
os.mkdir('log')
if not os.path.isfile(fileName):
with open(fileName, 'a+') as log:
log.write((str(0) + ' ') * len(ENList) +
'\n' + (str(0) + ' ') * len(ENList))
return open(fileName, 'a+')
def SumByEN(listNo, ENList, log):
log.seek(0, 0)
correction = [(int)(record) for record in log.readline()[:-1] if record != ' ']
# print(correction)
histories = [(int)(record) for record in log.readline()[:-1] if record != ' ']
# print(histories)
print('Summary of list ' + (str)(listNo))
correction = [correction[i] / histories[i] for i in range(len(correction))]
for cnt, rate in enumerate(correction):
print((str)(ENList[cnt][1][1:]) + ': ' + '{:.1%}'.format(rate))
|
[
"linchengji@Sha-51433-Mba.local"
] |
linchengji@Sha-51433-Mba.local
|
4aeb5076c559a2d62968ac097e20666249770856
|
03f9b8bdea312636afb4df3737b55cb0cc4b21ff
|
/CanIWin.py
|
3d81f1f782f454808169ef87a967ad9bee42ec2d
|
[] |
no_license
|
ellinx/LC-python
|
f29dd17bbe15407ba0d06ad68386efdc9a343b56
|
9190d3d178f1733aa226973757ee7e045b7bab00
|
refs/heads/master
| 2021-06-01T15:21:24.379811
| 2020-10-29T04:37:07
| 2020-10-29T04:37:07
| 132,704,788
| 1
| 1
| null | 2019-05-15T03:26:11
| 2018-05-09T05:13:26
|
Python
|
UTF-8
|
Python
| false
| false
| 2,052
|
py
|
"""
In the "100 game," two players take turns adding, to a running total,
any integer from 1..10. The player who first causes the running total to reach or exceed 100 wins.
What if we change the game so that players cannot re-use integers?
For example, two players might take turns drawing from a common pool of
numbers of 1..15 without replacement until they reach a total >= 100.
Given an integer maxChoosableInteger and another integer desiredTotal,
determine if the first player to move can force a win, assuming both players play optimally.
You can always assume that maxChoosableInteger will not be larger than 20 and
desiredTotal will not be larger than 300.
Example
Input:
maxChoosableInteger = 10
desiredTotal = 11
Output:
false
Explanation:
No matter which integer the first player choose, the first player will lose.
The first player can choose an integer from 1 up to 10.
If the first player choose 1, the second player can only choose integers from 2 up to 10.
The second player will win by choosing 10 and get a total = 11, which is >= desiredTotal.
Same with other integers chosen by the first player, the second player will always win.
"""
class Solution:
def canIWin(self, maxChoosableInteger, desiredTotal):
"""
:type maxChoosableInteger: int
:type desiredTotal: int
:rtype: bool
"""
def dfs(nums, diff, mm):
if diff<=0:
return False
key = ",".join(nums)+" "+str(diff)
if key in mm:
return mm[key]
for i,num in enumerate(nums):
if not dfs(nums[:i]+nums[i+1:], diff-int(num), mm):
mm[key] = True
return True
mm[key] = False
return False
if desiredTotal<=1:
return True
if (1+maxChoosableInteger)*maxChoosableInteger//2<desiredTotal:
return False
nums = [ str(i) for i in range(1,maxChoosableInteger+1)]
mm = dict()
return dfs(nums, desiredTotal, mm)
|
[
"ellin.xll@gmail.com"
] |
ellin.xll@gmail.com
|
34da5761c6f73408870ae13fc19165a6137f3d16
|
310763ba0b9d4803fa727555913e0e4751177557
|
/gr-MyRadio/python/qa_time_calc.py
|
0b9fd0b33427b06e56c0bc149b0fded90dc40f19
|
[] |
no_license
|
cbodes/radio
|
b10fbbdef26fd32beb246fc6cada36f78824585f
|
92d9f11552d316ca8f04a3759705883121f0b87c
|
refs/heads/master
| 2020-04-08T15:32:21.632324
| 2018-12-06T23:49:55
| 2018-12-06T23:49:55
| 159,482,917
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,218
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 <+YOU OR YOUR COMPANY+>.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
from gnuradio import blocks
from time_calc import time_calc
class qa_time_calc (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001_t (self):
# set up fg
self.tb.run ()
# check data
if __name__ == '__main__':
gr_unittest.run(qa_time_calc, "qa_time_calc.xml")
|
[
"bode4444@yahoo.com"
] |
bode4444@yahoo.com
|
1fbf4a18a1e4ea48b4004288139d3c955df1f4aa
|
503545b5e39fe8cc88b73618de270fffb23b0ecd
|
/valid_tlds.py
|
34913b7fe391c9ddacef5e1f855fd5b36ed07487
|
[] |
no_license
|
areusecure/python
|
ae3c6589fe20e2d7d4b8550a72a2186feb078f6d
|
a4f5482b16b9d5a6d844ccdc65af3f351998f425
|
refs/heads/master
| 2020-12-25T10:49:19.395240
| 2016-07-09T20:02:24
| 2016-07-09T20:02:24
| 62,964,556
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 364
|
py
|
import requests
# Retrieved the list from http://data.iana.org/TLD/tlds-alpha-by-domain.txt
tlds = ""
tlds += "tlds = ["
#with open('/Users/jj/Desktop/tlds.txt','r') as f:
f = requests.get("http://data.iana.org/TLD/tlds-alpha-by-domain.txt")
for line in f.iter_lines():
if "#" not in line:
tlds += ("," + "'" + line.strip() + "'")
tlds += "]"
print tlds.lower()
|
[
"jj@areusecure.se"
] |
jj@areusecure.se
|
7bff68f50a734ac45c488785772ff0fd493eb306
|
ef46d01cb207aaea773e95e32e871000be3a1dc1
|
/stack_implementation.py
|
8c573cbfbf952cf19e3567adc5e591e0b17bc86e
|
[] |
no_license
|
karthiknandiraju/Datastructures_Algorithms.py
|
d81c989c3cabdbc4e83d1337c65c540c151af3c3
|
3efb546bc29f22c83b031c1bda429fe0c50a4e5b
|
refs/heads/master
| 2020-03-24T04:33:17.491280
| 2018-07-31T15:46:38
| 2018-07-31T15:46:38
| 142,456,998
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 614
|
py
|
class Stack :
#Initialize the stack
def __init__ (self) :
self.stack = []
#Check if the stack is empty o(1)
def isEmpty(self) :
return self.stack == [];
#Push data into the stack o(1)
def push(self, data) :
self.stack.append(data);
#Pop data from the stack o(1)
def pop(self) :
data = self.stack[-1];
del self.stack[-1];
return data;
#Check data item on the top of the stack o(1)
def peek(self) :
return self.stack[-1];
#Check the size of the stack
def size(self) :
return len(self.stack);
|
[
"karthiknandiraju@Karthiks-MacBook-Pro.local"
] |
karthiknandiraju@Karthiks-MacBook-Pro.local
|
42b3478a1ae10ed36c842324a2e9135bffe1ac66
|
12fcd999322c946b160bb43fc3b8b255a60cc48e
|
/demo.py
|
b878babe9f39eb2f85c697cac1a5a9efbea51a41
|
[] |
no_license
|
hongduhong/HTMLTestRunner-BSTestRunner
|
5c8cadfb7b906ff44dd4042558fe5e1cfc94a20a
|
ec61265a5a7182b182f1af3b1fe5743641d65dfd
|
refs/heads/master
| 2020-07-26T18:56:52.828744
| 2019-09-16T07:32:28
| 2019-09-16T07:32:28
| 208,738,697
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 237
|
py
|
import unittest
import BSTestRunner
class DemoTest(unittest.TestCase):
def test_pass(self):
self.assertTrue(True)
def test_fail(self):
self.assertTrue(False)
if __name__ == '__main__':
BSTestRunner.main()
|
[
"1918991791@qq.com"
] |
1918991791@qq.com
|
d2fc626eaf7afbb9d52a38a71bba35eb3a73bd85
|
9b5e8478e627dc0b71948ed1f78efa5854b60f27
|
/comment_service/comments/migrations/0001_initial.py
|
07a823a6a302cdb690925b7fe2b6c934142ba5e5
|
[] |
no_license
|
Nurinuri2020/Comment-Service
|
b1dfdccb0cc5474c3ce8ce606e3d2215aa541298
|
b22eea2e7d836496318a4e18d0bdf51e836f2955
|
refs/heads/master
| 2023-01-10T19:20:02.070985
| 2020-11-09T07:03:26
| 2020-11-09T07:03:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,106
|
py
|
# Generated by Django 3.1.1 on 2020-10-08 07:41
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('posts', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField(verbose_name='Контент')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='Время создания')),
('polymorphic_ctype', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_comments.comment_set+', to='contenttypes.contenttype')),
('reply', models.ForeignKey(null=True, on_delete=django.db.models.deletion.PROTECT, related_name='replies', to='comments.comment', verbose_name='Ответ на комментарий')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_post', to=settings.AUTH_USER_MODEL, verbose_name='Пользователь')),
],
options={
'ordering': ['-timestamp'],
},
),
migrations.CreateModel(
name='PostComment',
fields=[
('comment_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='comments.comment')),
('record', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='posts.post')),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('comments.comment',),
),
]
|
[
"nuraimaratova50@gmail.com"
] |
nuraimaratova50@gmail.com
|
54ab201680f25d7abaa8c55274625c3ed557baa0
|
417bb035cb6cc60aa085c6ebbd62c37be75834cd
|
/backend/image/migrations/0001_initial.py
|
522ec51d3c1631d358e8b9954c36930633b00b0d
|
[] |
no_license
|
linkedweb/image_upload
|
c62caf6d1afb2a89cc18bc9b6bb14afb43191f06
|
08176c31b95b1e46cf84cc3feafa76fc8319b044
|
refs/heads/main
| 2023-06-08T13:58:45.420061
| 2021-06-25T19:08:46
| 2021-06-25T19:08:46
| 379,767,837
| 0
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 554
|
py
|
# Generated by Django 3.2.4 on 2021-06-24 00:10
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='images/')),
('alt', models.CharField(max_length=255)),
],
),
]
|
[
"brkicb1@gmail.com"
] |
brkicb1@gmail.com
|
8691eddb8151192864bb39bc919ebce0fa029a30
|
4abce61314c64510ccd337b2fc144e2edbe38a5d
|
/api/my_env/bin/easy_install
|
743c85b1b9218ab0eadef8d7d245c793db1df23b
|
[
"MIT"
] |
permissive
|
mogmog/tracker
|
ed442c36fb2130ba479da9b7b70355fde99a0f10
|
d2cff0a93330104b2138e384de301ee55c16c0ef
|
refs/heads/master
| 2020-03-21T08:12:20.854243
| 2018-07-05T11:40:48
| 2018-07-05T11:40:48
| 138,328,153
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 280
|
#!/c/Users/graha/OneDrive/Documents/Dev/HD/api/my_env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"mogmog@gmail.com"
] |
mogmog@gmail.com
|
|
0c0753a19e552559e99c3692b53e252d6e7c2f17
|
a394b1053f018ff8be63221c61682df03af4937b
|
/website/files/models/swift.py
|
d2f2d3e7d18987a01adea9d6d4c897ddbf6b2d2c
|
[
"Apache-2.0",
"LGPL-2.0-or-later",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"MIT",
"AGPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference",
"MPL-1.1",
"CPAL-1.0",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer",
"BSD-2-Clause"
] |
permissive
|
RCOSDP/RDM-osf.io
|
81b11d9511f6248ec9bccb6c586b54a58429e1e7
|
5d632eb6d4566d7d31cd8d6b40d1bc93c60ddf5e
|
refs/heads/develop
| 2023-09-01T09:10:17.297444
| 2023-08-28T04:59:04
| 2023-08-28T04:59:04
| 123,298,542
| 12
| 24
|
Apache-2.0
| 2023-09-12T08:58:28
| 2018-02-28T14:46:05
|
Python
|
UTF-8
|
Python
| false
| false
| 302
|
py
|
from website.files.models.base import File, Folder, FileNode
__all__ = ('SwiftFile', 'SwiftFolder', 'SwiftFileNode')
class SwiftFileNode(FileNode):
provider = 'swift'
class SwiftFolder(SwiftFileNode, Folder):
pass
class SwiftFile(SwiftFileNode, File):
version_identifier = 'version'
|
[
"fujimoto@dhcp-200-009.soum.co.jp"
] |
fujimoto@dhcp-200-009.soum.co.jp
|
ca972e2602a74cbbbc8bb86ccc454360091054c8
|
51eeec4731f99b2d9a990d9f22a00d95e5165eb9
|
/temperature_celsius_or_farenheit.py
|
53689b2615cc1d1e700c18185ce1ae7248c53b98
|
[] |
no_license
|
LRG84/python_assignments
|
d54c7cac5313d5d8f60a2669c1ba5a251b70306d
|
64eb9e0faaef72a1b56fc141a4e546dea7866d1b
|
refs/heads/master
| 2020-02-26T13:56:23.822193
| 2016-07-18T15:10:48
| 2016-07-18T15:10:48
| 62,674,163
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,219
|
py
|
#psuedo code
#enter temperature
#if temp >60 and <90 print "warm"
#if temp >90 print "hot"
#if temp <60 print chilly
#give celsius or farenheit option
#convert celsius to farenheit
#convert farenheit to celsius (F° - 32) × 5/9
temp = input("Are you entering data in Celsius or Farenheit? Type C or F: ")
if temp == "C" or temp == "c":
cel_temp = int (input ("Enter Celsius temperature:_ "))
cel_temp_conversion = cel_temp * (9/5) + 32
#cel_temp_conversion is now farenheit
print ("Farenheit conversion is " + str(cel_temp_conversion))
if cel_temp_conversion > 60 and cel_temp_conversion < 90:
print("Warm")
elif cel_temp_conversion >=90:
print("It's hot, turn on that AC - ASAP!")
else:
print("It's chilly, put some more wood on that fireplace!")
elif temp == "F" or temp == "f":
cel_temp_f_to_c = int (input ("Enter Farenheit temperature:_"))
first_temp = int(input ("Enter temperature:_ "))
if first_temp >60 and first_temp <90:
print ("It's warm, let's have a picnic lunch.")
elif first_temp >=90:
print ("It's hot, turn on that AC - ASAP!")
else:
print ("It's chilly, put some more wood on that fireplace!")
else:
|
[
"lisa.renee.goldberg@gmail.com"
] |
lisa.renee.goldberg@gmail.com
|
bfb74d3585be70cdde795e3d0cd1c184f9d9ae8b
|
fa1dea1fe0fdefb7dee1f475b5aced0798b15b80
|
/matmult-pure.py
|
53854ae4b8d4d28c526cb24efe5468381a956cd8
|
[] |
no_license
|
JeremyBYU/ROB550-WK1
|
319dcf5c31097632625574593324283ad2a81359
|
d0fd3cf72367821f5f4d84134f45217d76a087e5
|
refs/heads/master
| 2020-09-19T09:22:54.147750
| 2016-09-12T14:23:01
| 2016-09-12T14:23:01
| 67,654,854
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,072
|
py
|
#!/usr/bin/python
# Author: Jeremy Castagno
# Date: Sept 7 2016
# Summary:
# This program will multpy two matrices (A, B) together using pure Python
# The user will prompted for the sizes of A and B
# The matrices are defined in two files (A.csv, B.csv).
import sys
import common # Common functions module
# Read matrices from files
matrixA = common.readCSVFile('A.csv')
matrixB = common.readCSVFile('B.csv')
def multMatrices(matA, matB):
h, w = len(matA), len(matB[0])
matrix = [[0 for x in range(w)] for y in range(h)]
for i in range(h): # iterate over each row in MatrixA
for j in range(w): # iterate over each column in Matrix B
matrix[i][j] = multVector(matA[i], common.column(matB, j))
return matrix
def multVector(row, col):
vecSum = 0
for i in range(len(row)): # vectors should same length
vecSum += row[i] * col[i]
return vecSum
# Ensure that we have 2 arguments
if (len(sys.argv)) < 2:
common.eprint('ERROR! Expected outpute filename as argument (e.g. C.csv)')
sys.exit(0)
outFile = sys.argv[1]
# Get User Input of the matrix file size
try:
aDim = common.getMatrixDimensions('A')
bDim = common.getMatrixDimensions('B')
except Exception, e:
common.eprint('ERROR! The dimesions of the matrix must be positve and have the form n,m (e.g 2,2)')
sys.exit(0)
# Check dimensions match and that the multiplication is valid
if not common.matchingDimensions(matrixA, aDim) or not common.matchingDimensions(matrixB, bDim) or not common.validMultiplication(aDim,bDim):
common.eprint('ERROR! Dimension do not match file or invalid multiplication')
sys.exit(0)
# At this point we have matrices and supplied dimenstions that are valid
time = common.utime_now() # initial timestamp
matrixAB = multMatrices(matrixA, matrixB) # numpy matrix multiplication
elapsedTime = common.utime_now() - time # elapsed time from matrix mult
# common.print_matrix(matrixAB, aDim[0], bDim[1])
common.writeCSVFile(outFile, matrixAB)
print 'Elapsed time: ' + str(elapsedTime) + ' microseconds'
|
[
"jeremybyu@gmail.com"
] |
jeremybyu@gmail.com
|
c8dea1507ff8ae3d457df3085dc71c9c9935b51c
|
02faf850cb4282f419a75f27f1e32f7dbbc6abd2
|
/boards/tests/test_view_edit_post.py
|
8fce9709d2a08804dc0dc45d9dcd6b1d5e4b20a4
|
[
"MIT"
] |
permissive
|
Berreyol/SpiralMediaTest
|
c7036a9726ff17d82a96ad4a3da0cf17c2d11a99
|
55d3b4900586ce9cf01b7f943a401358e50ab670
|
refs/heads/master
| 2020-12-21T01:12:40.069049
| 2020-01-27T08:20:03
| 2020-01-27T08:20:03
| 236,258,762
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,293
|
py
|
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import resolve, reverse
from django.forms import ModelForm
from ..models import Board, Post, Topic
from ..views import PostUpdateView
class PostUpdateViewTestCase(TestCase):
'''
Base test case to be used in all `PostUpdateView` view tests
'''
def setUp(self):
self.board = Board.objects.create(name='Django', description='Django board.')
self.username = 'john'
self.password = '123'
user = User.objects.create_user(username=self.username, email='john@doe.com', password=self.password)
self.topic = Topic.objects.create(subject='Hello, world', board=self.board, starter=user)
self.post = Post.objects.create(message='Lorem ipsum dolor sit amet', topic=self.topic, created_by=user)
self.url = reverse('edit_post', kwargs={
'pk': self.board.pk,
'topic_pk': self.topic.pk,
'post_pk': self.post.pk
})
class LoginRequiredPostUpdateViewTests(PostUpdateViewTestCase):
def test_redirection(self):
'''
Test if only logged in users can edit the posts
'''
login_url = reverse('login')
response = self.client.get(self.url)
self.assertRedirects(response, '{login_url}?next={url}'.format(login_url=login_url, url=self.url))
class UnauthorizedPostUpdateViewTests(PostUpdateViewTestCase):
def setUp(self):
'''
Create a new user different from the one who posted
'''
super().setUp()
username = 'jane'
password = '321'
user = User.objects.create_user(username=username, email='jane@doe.com', password=password)
self.client.login(username=username, password=password)
self.response = self.client.get(self.url)
def test_status_code(self):
'''
A topic should be edited only by the owner.
Unauthorized users should get a 404 response (Page Not Found)
'''
self.assertEquals(self.response.status_code, 404)
class PostUpdateViewTests(PostUpdateViewTestCase):
def setUp(self):
super().setUp()
self.client.login(username=self.username, password=self.password)
self.response = self.client.get(self.url)
def test_status_code(self):
self.assertEquals(self.response.status_code, 200)
def test_view_class(self):
view = resolve('/boards/1/topics/1/posts/1/edit/')
self.assertEquals(view.func.view_class, PostUpdateView)
def test_csrf(self):
self.assertContains(self.response, 'csrfmiddlewaretoken')
def test_contains_form(self):
form = self.response.context.get('form')
self.assertIsInstance(form, ModelForm)
def test_form_inputs(self):
'''
The view must contain two inputs: csrf, message textarea
'''
self.assertContains(self.response, '<input', 1)
self.assertContains(self.response, '<textarea', 1)
class SuccessfulPostUpdateViewTests(PostUpdateViewTestCase):
def setUp(self):
super().setUp()
self.client.login(username=self.username, password=self.password)
self.response = self.client.post(self.url, {'message': 'edited message'})
def test_redirection(self):
'''
A valid form submission should redirect the user
'''
topic_posts_url = reverse('topic_posts', kwargs={'pk': self.board.pk, 'topic_pk': self.topic.pk})
self.assertRedirects(self.response, topic_posts_url)
def test_post_changed(self):
self.post.refresh_from_db()
self.assertEquals(self.post.message, 'edited message')
class InvalidPostUpdateViewTests(PostUpdateViewTestCase):
def setUp(self):
'''
Submit an empty dictionary to the `reply_topic` view
'''
super().setUp()
self.client.login(username=self.username, password=self.password)
self.response = self.client.post(self.url, {})
def test_status_code(self):
'''
An invalid form submission should return to the same page
'''
self.assertEquals(self.response.status_code, 200)
def test_form_errors(self):
form = self.response.context.get('form')
self.assertTrue(form.errors)
|
[
"alexberre97@gmail.com"
] |
alexberre97@gmail.com
|
8e2f6c94432692471d2dfc15299ebd9960577801
|
5f5ee86190e4f666f7fc58494c09caee013c1e6c
|
/main.py
|
13bfeaeddc63e52bd454afa431ea7124984c8edd
|
[] |
no_license
|
menghsin-2021/air
|
e3c98ed78f79e1af4f99b58468a8dffa40f33831
|
ffdd4f318edd745383ffe55b3d1542243331dfc6
|
refs/heads/main
| 2023-04-27T07:52:38.463727
| 2021-05-02T13:21:04
| 2021-05-02T13:21:04
| 363,655,842
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,865
|
py
|
from typing import List
# TGS2600
import serial
import time
# SDS011
import struct
from datetime import datetime
# db
from dbwrapper import DBWrapper
db = DBWrapper()
# TGS2600
COM_PORT = 'COM4' # 指定通訊埠名稱
BAUD_RATES = 9600 # 設定傳輸速率
# SDS011
PORT = 'COM3'
UNPACK_PAT = '<ccHHHcc'
ser_tgs = serial.Serial(COM_PORT, BAUD_RATES) # TGS2600 初始化序列通訊埠
ser_sds011 = serial.Serial(PORT, 9600, bytesize=8, parity='N', stopbits=1) # SDS011 初始化序列通訊埠
pm25_sec = []
pm10_sec = []
tvoc_tgs_sec = []
def mean(xs: List[float]) -> float:
return sum(xs) / len(xs)
while True:
while ser_tgs.in_waiting: # 若收到序列資料…
# SDS011
data = ser_sds011.read(10)
unpacked = struct.unpack(UNPACK_PAT, data)
ts = datetime.now()
pm25 = unpacked[2] / 10.0
pm10 = unpacked[3] / 10.0
pm25_sec.append(pm25)
pm10_sec.append(pm10)
# TGS2600
data_raw = ser_tgs.readline() # 讀取一行
if len(data_raw.decode().strip()) > 0: # 用預設的UTF-8解碼
tvoc_tgs = int(data_raw.decode().split('\r')[0])
tvoc_tgs_sec.append(tvoc_tgs)
else:
continue
print("{}: PM 2.5 = {}, PM 10 = {}, TVOC-TGS: {}".format(ts, pm25, pm10, tvoc_tgs))
db.insert_data(tvoc_tgs, pm25, pm10)
if len(pm25_sec) >= 60:
pm25_min = mean(pm25_sec)
pm10_min = mean(pm10_sec)
tvoc_tgs_min = mean(tvoc_tgs_sec)
print('minute data inserted')
db.insert_data_minute(tvoc_tgs_min, pm25_min, pm10_min)
pm25_sec.clear()
pm10_sec.clear()
tvoc_tgs_sec.clear()
else:
continue
time.sleep(1)
|
[
"lloivne4119@gmail.com"
] |
lloivne4119@gmail.com
|
3ad6b6e4e9387b3b9cc5855347a729c5a5d8be58
|
49caef1f93bd4673530e0a4c54c59028fb7b54e6
|
/npg7/web_printscreen_zb/controllers.py
|
f5da2a2deb809c478d12f54592606a9e2f4de36e
|
[] |
no_license
|
slevenhagen/addons-extra7.0
|
7622024198c0cf637f3f4767eb2b955532af3710
|
85611a86a0e1522fd88b5e6fbb217f425c4ae12d
|
refs/heads/master
| 2020-03-17T14:12:42.082766
| 2018-05-16T13:02:05
| 2018-05-16T13:02:05
| 133,663,372
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,142
|
py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2013 ZestyBeanz Technologies Pvt. Ltd.
# (http://wwww.zbeanztech.com)
# contact@zbeanztech.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
try:
import json
except ImportError:
import simplejson as json
import web.http as openerpweb
from web.controllers.main import ExcelExport
from web.controllers.main import Export
import re
from cStringIO import StringIO
from lxml import etree
import trml2pdf
import time, os
import locale
import openerp.tools as tools
try:
import xlwt
except ImportError:
xlwt = None
class ZbExcelExport(ExcelExport):
_cp_path = '/web/export/zb_excel_export'
def from_data(self, fields, rows):
workbook = xlwt.Workbook()
worksheet = workbook.add_sheet('Sheet 1')
style = xlwt.easyxf('align: wrap yes')
font = xlwt.Font()
font.bold = True
style.font = font
ignore_index = []
count = 0
for i, fieldname in enumerate(fields):
if fieldname.get('header_data_id', False):
field_name = fieldname.get('header_name', '')
worksheet.write(0, i-count, field_name, style)
worksheet.col(i).width = 8000
else:
count += 1
ignore_index.append(i)
style = xlwt.easyxf('align: wrap yes')
bold_style = xlwt.easyxf('align: wrap yes')
font = xlwt.Font()
font.bold = True
bold_style.font = font
for row_index, row in enumerate(rows):
count = 0
for cell_index, cell_value in enumerate(row):
if cell_index not in ignore_index:
cell_style = style
if cell_value.get('bold', False):
cell_style = bold_style
cellvalue = cell_value.get('data', '')
if isinstance(cellvalue, basestring):
cellvalue = re.sub("\r", " ", cellvalue)
if cell_value.get('number', False) and cellvalue:
cellvalue = float(cellvalue)
if cellvalue is False: cellvalue = None
worksheet.write(row_index + 1, cell_index - count, cellvalue, cell_style)
else:
count += 1
fp = StringIO()
workbook.save(fp)
fp.seek(0)
data = fp.read()
fp.close()
return data
@openerpweb.httprequest
def index(self, req, data, token):
data = json.loads(data)
return req.make_response(
self.from_data(data.get('headers', []), data.get('rows', [])),
headers=[
('Content-Disposition', 'attachment; filename="%s"'
% data.get('model', 'Export.xls')),
('Content-Type', self.content_type)
],
cookies={'fileToken': token}
)
class ExportPdf(Export):
_cp_path = '/web/export/zb_pdf'
fmt = {
'tag': 'pdf',
'label': 'PDF',
'error': None
}
@property
def content_type(self):
return 'application/pdf'
def filename(self, base):
return base + '.pdf'
def from_data(self, uid, fields, rows, company_name):
pageSize=[210.0,297.0]
new_doc = etree.Element("report")
config = etree.SubElement(new_doc, 'config')
def _append_node(name, text):
n = etree.SubElement(config, name)
n.text = text
_append_node('date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))))
_append_node('PageSize', '%.2fmm,%.2fmm' % tuple(pageSize))
_append_node('PageWidth', '%.2f' % (pageSize[0] * 2.8346,))
_append_node('PageHeight', '%.2f' %(pageSize[1] * 2.8346,))
_append_node('PageFormat', 'a4')
_append_node('header-date', time.strftime(str(locale.nl_langinfo(locale.D_FMT).replace('%y', '%Y'))))
_append_node('company', company_name)
l = []
t = 0
temp = []
tsum = []
skip_index = []
header = etree.SubElement(new_doc, 'header')
i = 0
for f in fields:
if f.get('header_data_id', False):
value = f.get('header_name', "")
field = etree.SubElement(header, 'field')
field.text = tools.ustr(value)
else:
skip_index.append(i)
i += 1
lines = etree.SubElement(new_doc, 'lines')
for row_lines in rows:
node_line = etree.SubElement(lines, 'row')
j = 0
for row in row_lines:
if not j in skip_index:
para = "yes"
tree = "no"
value = row.get('data', '')
if row.get('bold', False):
para = "group"
if row.get('number', False):
tree = "float"
col = etree.SubElement(node_line, 'col', para=para, tree=tree)
col.text = tools.ustr(value)
j += 1
transform = etree.XSLT(
etree.parse(os.path.join(tools.config['root_path'],
'addons/base/report/custom_new.xsl')))
rml = etree.tostring(transform(new_doc))
self.obj = trml2pdf.parseNode(rml, title='Printscreen')
return self.obj
class ZbPdfExport(ExportPdf):
_cp_path = '/web/export/zb_pdf_export'
@openerpweb.httprequest
def index(self, req, data, token):
data = json.loads(data)
uid = data.get('uid', False)
return req.make_response(self.from_data(uid, data.get('headers', []), data.get('rows', []),
data.get('company_name','')),
headers=[('Content-Disposition',
'attachment; filename=PDF Export'),
('Content-Type', self.content_type)],
cookies={'fileToken': int(token)})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"slevenhagen@novapointgroup.com"
] |
slevenhagen@novapointgroup.com
|
6f1348161cd3335d346bf92ea041c7be63205db0
|
bf1c7016681b5b7c6569a5f20d037e9c5db37854
|
/IV_semester/os/kernel/resources/task_at_hd.py
|
456ee60b8beec3db0c6ce99d2e9b41a2bd3194e1
|
[
"Beerware"
] |
permissive
|
dainiusjocas/labs
|
ae62d4672f4c43d27b4e9d23e126fa9fb9cf58a9
|
25aa0ae2032681dbaf0afd83f3d80bedddea6407
|
refs/heads/master
| 2021-01-23T23:03:10.271749
| 2011-12-13T15:38:48
| 2011-12-13T15:38:48
| 6,511,648
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 481
|
py
|
#!/usr/bin/env python
import sys
sys.path.append('.')
from kernel.resource import Resource
from kernel import kernel_data
from kernel.helpers.bin_sm import BinarySemaphore
class TaskAtHD(Resource):
''' '''
def __init__(self, opts = {}):
opts['name'] = 'task_at_hd_' + str(kernel_data.RID)
Resource.__init__(self, opts)
self.sem = BinarySemaphore()
self.write_info = opts['write_info']
def free(self):
return self.sem.s == 1
|
[
"hanniph@gmail.com"
] |
hanniph@gmail.com
|
18941734a91d4caff0a60bd79814784876847b1e
|
d9a8d175c149e7c148bf9d2e8dc39427e1726f0a
|
/hurl/rl_utils.py
|
08a72eac9a43862bc474a4a0b98a67a865534786
|
[
"LicenseRef-scancode-generic-cla",
"MIT"
] |
permissive
|
parkerhzb/HuRL
|
de9e1bd5700975926015ac63206373de36b82dc2
|
c9d0710ff6fd67b3cdbd46fc031cdbc3b3738cd2
|
refs/heads/main
| 2023-08-28T04:49:13.586899
| 2021-11-02T23:59:42
| 2021-11-02T23:59:42
| 424,127,074
| 1
| 0
|
MIT
| 2021-11-03T07:20:49
| 2021-11-03T07:20:48
| null |
UTF-8
|
Python
| false
| false
| 5,979
|
py
|
# Some helper functions for using garage
import numpy as np
import torch
from garage.torch.policies import GaussianMLPPolicy, TanhGaussianMLPPolicy, DeterministicMLPPolicy
from garage.torch.q_functions import ContinuousMLPQFunction
from garage.torch.value_functions import GaussianMLPValueFunction
from garage.sampler import FragmentWorker, LocalSampler, RaySampler
from garage.torch.optimizers import OptimizerWrapper
def get_mlp_policy(*,
env_spec,
stochastic=True,
clip_output=False,
hidden_sizes=(64, 64),
hidden_nonlinearity=torch.tanh,
min_std=np.exp(-20.),
max_std=np.exp(2.)):
if stochastic and clip_output:
return TanhGaussianMLPPolicy(
env_spec=env_spec,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=None,
min_std=min_std,
max_std=max_std)
if stochastic and not clip_output:
return GaussianMLPPolicy(env_spec,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=None)
if not stochastic:
return DeterministicMLPPolicy(
env_spec=env_spec,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=torch.tanh if use_tanh else None)
def get_mlp_value(form='Q',
*,
env_spec,
hidden_sizes=(256, 128),
hidden_nonlinearity=torch.tanh,
ensemble_size=1,
ensemble_mode='P'
):
if form=='Q':
return ContinuousMLPQFunction(
env_spec=env_spec,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=None)
if form=='V':
return GaussianMLPValueFunction(
env_spec=env_spec,
hidden_sizes=hidden_sizes,
hidden_nonlinearity=hidden_nonlinearity,
output_nonlinearity=None,
learn_std=False)
def collect_episode_batch(policy, *,
env,
batch_size,
n_workers=4):
"""Obtain one batch of episodes."""
sampler = get_sampler(policy, env=env, n_workers=n_workers)
agent_update = policy.get_param_values()
episodes = sampler.obtain_samples(0, batch_size, agent_update)
return episodes
from garage.sampler import Sampler
import copy
from garage._dtypes import EpisodeBatch
class BatchSampler(Sampler):
def __init__(self, episode_batch, randomize=True):
self.episode_batch = episode_batch
self.randomize = randomize
self._counter = 0
def obtain_samples(self, itr, num_samples, agent_update, env_update=None):
ns = self.episode_batch.lengths
if num_samples<np.sum(ns):
if self.randomize:
# Sample num_samples from episode_batch
ns = self.episode_batch.lengths
ind = np.random.permutation(len(ns))
cumsum_permuted_ns = np.cumsum(ns[ind])
itemindex = np.where(cumsum_permuted_ns>=num_samples)[0]
if len(itemindex)>0:
ld = self.episode_batch.to_list()
j_max = min(len(ld), itemindex[0]+1)
ld = [ld[i] for i in ind[:j_max].tolist()]
sampled_eb = EpisodeBatch.from_list(self.episode_batch.env_spec,ld)
else:
sampled_eb = None
else:
ns = self.episode_batch.lengths
ind = np.arange(len(ns))
cumsum_permuted_ns = np.cumsum(ns[ind])
counter = int(self._counter)
itemindex = np.where(cumsum_permuted_ns>=num_samples*(counter+1))[0]
itemindex0 = np.where(cumsum_permuted_ns>num_samples*counter)[0]
if len(itemindex)>0:
ld = self.episode_batch.to_list()
j_max = min(len(ld), itemindex[0]+1)
j_min = itemindex0[0]
ld = [ld[i] for i in ind[j_min:j_max].tolist()]
sampled_eb = EpisodeBatch.from_list(self.episode_batch.env_spec,ld)
self._counter+=1
else:
sampled_eb = None
else:
sampled_eb = self.episode_batch
return sampled_eb
def shutdown_worker(self):
pass
def get_sampler(policy,
*,
env,
n_workers=4,
**kwargs): # other kwargs for the sampler
if n_workers==1:
return LocalSampler(agents=policy,
envs=env,
max_episode_length=env.spec.max_episode_length,
worker_class=FragmentWorker,
**kwargs)
else:
return RaySampler(agents=policy,
envs=env,
max_episode_length=env.spec.max_episode_length,
n_workers=n_workers,
**kwargs)
from garage.replay_buffer import PathBuffer
def get_replay_buferr(capacity=int(1e6)):
return PathBuffer(capacity_in_transitions=capacity)
def get_optimizer(obj, lr,
*,
max_optimization_epochs=1,
minibatch_size=128):
return OptimizerWrapper((torch.optim.Adam, dict(lr=lr)),
obj,
max_optimization_epochs=max_optimization_epochs,
minibatch_size=minibatch_size)
|
[
"chinganc0@gmail.com"
] |
chinganc0@gmail.com
|
2b15a8c4ea4e67a5056db053d6d3882075fb2628
|
9caaacb9ff660cf24619ed65e6f18daaa51c5b51
|
/praticeProg/hashtable.py
|
da6690e2577c7e218c23e5b6231d73b1fce4008c
|
[] |
no_license
|
pavan097/Data-Structures
|
5bd1d9f04aa2cb782f80af55de9772e731a8bfa3
|
b4ba7d7f4e0e8bedc1fe45e3bb291d0368115b9e
|
refs/heads/master
| 2020-04-01T07:32:56.235685
| 2020-01-02T16:12:01
| 2020-01-02T16:12:01
| 152,993,741
| 0
| 0
| null | 2019-06-01T18:28:32
| 2018-10-14T16:18:36
|
Python
|
UTF-8
|
Python
| false
| false
| 470
|
py
|
# lst=[1,2,3]
class Hash():
def insert(self,lst)
size_of_table=10
hash_table = [list() for _ in range(size_of_table)]
for i in range(len(lst)):
index=int(i%size_of_table)
ht[index]=a[i]
print(hash_table)
def search(self,value):
if value in hash_table:
print('value is present in the hash table')
else:
print('value is not in hash table')
|
[
"noreply@github.com"
] |
noreply@github.com
|
0d646cb06b16ff13f94f7a541d1365b439cf9847
|
db987bc58a44d6bfbc4d29407b2cb4e98746f0a7
|
/fun/_select.py
|
763c0a24b5c712e8156a6357d274490245f019d3
|
[
"Apache-2.0"
] |
permissive
|
jburgy/blog
|
605e9134f51041213f06427869188bc96c4f6a64
|
dfcb3c62f3cc52ad95c7d0319be690625d83346e
|
refs/heads/master
| 2023-08-24T21:42:42.253622
| 2023-08-21T11:35:57
| 2023-08-21T11:35:57
| 182,797,432
| 4
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,109
|
py
|
from collections import namedtuple
from itertools import groupby
from operator import attrgetter, itemgetter
Foo = namedtuple("Foo", "foo bar baz")
Baz = namedtuple("Baz", "qux quux")
Quux = namedtuple("Quux", "foo bar")
_prefix_sep = itemgetter(0, 1)
_remainder = itemgetter(2)
def nest(paths):
return {
prefix: nest(map(_remainder, group)) if sep else None
for (prefix, sep), group in groupby(
(path.partition(".") for path in sorted(paths)), key=_prefix_sep
)
}
def getattrs(obj, attrs):
return {
key: val
if subattrs is None
else [getattrs(elem, subattrs) for elem in val]
if isinstance(val, list)
else getattrs(val, subattrs)
for key, val, subattrs in (
zip(attrs, attrgetter(*attrs)(obj), attrs.values())
)
}
paths = [
"foo",
"bar",
"baz.qux",
"baz.quux.foo",
"baz.quux.bar",
]
obj = Foo(
foo=1,
bar=2,
baz=[
Baz(qux=3, quux=Quux(foo=4, bar=5)),
Baz(qux=6, quux=Quux(foo=7, bar=8)),
]
)
print(getattrs(obj, nest(paths)))
|
[
"jburgy@gmail.com"
] |
jburgy@gmail.com
|
e23b5f2e5fa60aa2d69a5bf41e58564aecd13058
|
683bd37c4f728a1c2330f7dcd2f2c7dfb01e29a6
|
/sudoku/ac3_basic.py
|
ed0ec57bf589d7e9ea1c5f0893e83756d5f3fde9
|
[] |
no_license
|
NunotabaS/assignments
|
10da1d9e2db897a7f79049436a45288bcbf09570
|
6a0d586f53b2a400bee9cc252598dafd1db2fd23
|
refs/heads/master
| 2021-01-01T15:41:07.441899
| 2013-10-27T05:06:49
| 2013-10-27T05:06:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,026
|
py
|
import collections
def generate_domain(puzzle):
"""Generate the domain maintaining binary constraints"""
domain = dict()
for x in xrange(0,9):
for y in xrange(0,9):
if puzzle[x][y] != 0:
domain[(x,y)] = [puzzle[x][y]]
else:
domain[(x,y)] = [1,2,3,4,5,6,7,8,9]
return domain
def neighbors(point):
"""A generator that creates neighbors of some point"""
for x in xrange(0,9):
for y in xrange(0,9):
if x == point[0] and y == point[1]:
continue;
if x == point[0] or y == point[1]:
yield (x,y) #in the same row or col
elif int(x/3) == int(point[0]/3) and int(y/3) == int(point[1]/3):
yield (x,y) #in the same block
def revise_domain(domains, p1, p2):
"""Revise the domains using the constraints"""
revised = False
domx = domains[p1]
domy = domains[p2]
for valx in domx:
if reduce(lambda red, y: red or y != valx, domy, False):
continue; #found valid y in domain
domains[p1].remove(valx)
revised = True
return revised
def ac3(domains):
"""Runner for AC3"""
arcs = collections.deque([(y,n) for y in domains for n in neighbors(y)])
while True:
if len(arcs) == 0:
break
p = arcs.pop()
if revise_domain(domains, p[0], p[1]) :
if len(domains[p[0]]) == 0:
return None
for n in neighbors(p[0]):
if n != p[1]:
arcs.appendleft((p[0], n)) # arc 1
arcs.appendleft((n, p[0])) # arc 2
return domains
def run(puzzle):
"""Wrapper to make everything work"""
domain = generate_domain(puzzle)
solved = puzzle[:]
simplified = ac3(domain)
for pp in simplified:
if len(simplified[pp]) == 1:
solved[pp[0]][pp[1]] = simplified[pp][0]
else:
solved[pp[0]][pp[1]] = simplified[pp]
return solved
|
[
"knh_ch_mainland@126.com"
] |
knh_ch_mainland@126.com
|
130ef8765129f5bc1fdc767ebd8d062140d8c3b7
|
26f0b99b71d736962dfc371989429402c4a73ab6
|
/migrations/versions/45d712a6312a_user_table_created.py
|
fd1d2e0aceeecd391a284e5b3ba86b69cfe0ca4f
|
[] |
no_license
|
Bowrna/microblog
|
d7a6e45a42a4e6cd016d2ef18abfbac1e35742ff
|
14f6eac549d180d798f1d516039122b08e222f2b
|
refs/heads/main
| 2023-08-02T23:57:40.781357
| 2021-10-03T12:45:45
| 2021-10-03T12:45:45
| 404,637,516
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,138
|
py
|
"""user table created
Revision ID: 45d712a6312a
Revises:
Create Date: 2021-09-09 14:51:31.641960
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '45d712a6312a'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=64), nullable=True),
sa.Column('email', sa.String(length=120), nullable=True),
sa.Column('password_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=True)
op.create_index(op.f('ix_user_username'), 'user', ['username'], unique=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_user_username'), table_name='user')
op.drop_index(op.f('ix_user_email'), table_name='user')
op.drop_table('user')
# ### end Alembic commands ###
|
[
"mailbowrna@gmail.com"
] |
mailbowrna@gmail.com
|
b7e5c8ca9b89d3f15ba5ce45bd9f238361b9aea3
|
05eb7c2795da0059819ae4f964d0c71764010cf1
|
/utils.py
|
dcd9f40c7a7a25a24070e45ff1edf5b3fd9af0f9
|
[] |
no_license
|
galyaminavas/FormalLangMatrixMult
|
095a7fc96cc760f01b5778100641a66550d331b3
|
a3ab5f1ab554105bad9168975d09b9b13442b3d8
|
refs/heads/master
| 2020-04-17T05:40:28.274817
| 2019-03-03T21:32:55
| 2019-03-03T21:32:55
| 166,291,447
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,465
|
py
|
import sys
class Grammar:
def __init__(self):
self.terms = []
self.nonterms = []
self.rules = []
def add_terms(self, terms):
for t in terms:
self.terms.append(t)
def add_nonterms(self, nonterms):
for n in nonterms:
self.nonterms.append(n)
def add_rules(self, rules):
for r in rules:
self.rules.append(r)
def add_rule_string(self, rule_str):
components = rule_str.split(' ')
if len(components) == 2:
if components[0] not in self.nonterms:
self.nonterms.append(components[0])
if components[1] not in self.terms:
self.terms.append(components[1])
self.rules.append(rule_str)
else:
if len(components) == 3:
if components[0] not in self.nonterms:
self.nonterms.append(components[0])
if components[1] not in self.nonterms:
self.nonterms.append(components[1])
if components[2] not in self.nonterms:
self.nonterms.append(components[2])
self.rules.append(rule_str)
def parse_to_grammar_from_path(grammar_path):
try:
f = open(grammar_path, 'r')
except FileNotFoundError as err:
print(err)
sys.exit(1)
grammar = Grammar()
lines = f.read().splitlines()
for line in lines:
grammar.add_rule_string(line)
f.close()
return grammar
class Graph:
def __init__(self):
self.vertices = []
self.edges = []
def add_vertices(self, vertices):
for v in vertices:
self.vertices.append(v)
def add_edges(self, edges):
for e in edges:
self.edges.append(e)
def add_edge_string(self, edge_str):
components = edge_str.split(' ')
if len(components) == 3:
if components[0] not in self.vertices:
self.vertices.append(components[0])
if components[2] not in self.vertices:
self.vertices.append(components[2])
self.edges.append(edge_str)
def parse_to_graph_from_path(graph_path):
try:
f = open(graph_path, 'r')
except FileNotFoundError as err:
print(err)
sys.exit(1)
graph = Graph()
lines = f.read().splitlines()
for line in lines:
graph.add_edge_string(line)
f.close()
return graph
|
[
"galyaminavas@gmail.com"
] |
galyaminavas@gmail.com
|
11d3b382c3b1106935701e438e3452320d470392
|
8c41e9196b891920f3045498a4f0c9815a95e312
|
/api/src/base.py
|
962ee2f506e8a5390f524065ba62e3c14ce886b6
|
[] |
no_license
|
JustChasti/markets_selenium_parser
|
f271389ba7f1c1df70dcc5dada6a6a439bee18e0
|
3a1434be792013008e7d329bdd37302d04fbbd83
|
refs/heads/master
| 2023-07-05T06:14:39.378279
| 2021-08-20T16:46:20
| 2021-08-20T16:46:20
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,475
|
py
|
from time import sleep
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.engine.url import URL
import psycopg2
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from loguru import logger
from config import base_user, base_pass, base_name, base_host, base_port
Base = declarative_base()
class Link(Base):
__tablename__ = 'links'
id = Column(Integer, primary_key=True)
url = Column(String(512), nullable=False)
name = Column(String(256), nullable=True)
price = Column(Integer, nullable=True)
articul = Column(Integer, nullable=True)
col_otz = Column(Integer, nullable=True)
class Review(Base):
__tablename__ = 'rewiews'
id = Column(Integer, primary_key=True)
product_id = Column(Integer, ForeignKey(Link.id), nullable=False)
user = Column(String(64), nullable=False)
mark = Column(Integer, nullable=False)
comment = Column(String(10000), nullable=False)
class Numeric(Base):
__tablename__ = 'numericpar'
id = Column(Integer, primary_key=True)
product_id = Column(Integer, ForeignKey(Link.id), nullable=False)
price = Column(Integer, nullable=True)
articul = Column(Integer, nullable=True)
col_otz = Column(Integer, nullable=True)
data = {
'drivername': 'postgresql+psycopg2',
'host': base_host,
'port': base_port,
'username': base_user,
'password': base_pass,
'database': base_name,
}
for i in range(3):
try:
engine = create_engine(URL(**data))
engine.connect()
Base.metadata.create_all(engine)
except Exception as e:
logger.warning('I cant connect to database. Creating her***')
try:
connection = psycopg2.connect(user=base_user, password=base_pass)
connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
sql_create_database = cursor.execute("create database " + base_name)
cursor.close()
connection.close()
engine = create_engine(URL(**data))
engine.connect()
Base.metadata.create_all(engine)
Base.metadata.bind = engine
except Exception as e:
print('pizdec')
logger.exception("Postgres connection error")
sleep(5)
Session = sessionmaker(bind=engine)
|
[
"chastytim@mail.ru"
] |
chastytim@mail.ru
|
f33932b9a9946978806f121ae5d679489ba405d3
|
5f2d9ca4c7fa970ca5e51c223baa1379dbc295e6
|
/kalmanFilter1D.py
|
93ef701d8ac9149e1e339c992ca6594aaa117325
|
[] |
no_license
|
davincy33/Kalman-Filter-Python
|
039d8e30f9368d058c9a56f4873fc7f4fc72ec70
|
8bc21cadbc6fe9d010e8ddc334c6554f421b23ed
|
refs/heads/master
| 2022-08-24T22:54:24.902733
| 2020-05-20T17:32:57
| 2020-05-20T17:32:57
| 265,639,466
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,618
|
py
|
#!/usr/bin/python
import numpy as np
import matplotlib.pyplot as plt
import math
# State Matrix
stateX = [];
class KalmanFilter:
def __init__(self, initStateX, delT):
# Initial Conditions
self.deltaT = delT; # Delta T; Unit: Seconds
self.x_2dot = 0; # Acceleration in X direction; Unit: meters/sec
# Observation Errors/ Possible errors with measurement
self.deltaX = 2; # Unit: meters
self.deltaXdot = 1; # Unit: meters/sec
# Initial Process Errors in Process covariance matrix
self.deltaPx = 2; # Unit: meters
self.deltaPxdot = 1; # Unit: meters/sec
## Initial State
self.stateX = initStateX;# State Matrix
self.obsState = 0; # Observed State
self.u = self.x_2dot; # Control Variable Matrix
self.w = 0; # Predicted State Noise Matrix
self.noiseFactor = 0.1; # Noise Factor for Process Noise Covariance Matrix
self.alpha = 0.3; # Forgetting Factor to adaptively estimate R & Q; Range 0 to 1
self.Z = 0; # Measurement Noise
self.stateXp = 0; # Predicted State
self.Pkp = 0; # Predicted Process Covariance Matrix
# Matrix A; State Transition Matrix
self.matrixA = np.matrix([
[1, self.deltaT],
[0, 1]]);
# Matrix B
self.matrixB = np.matrix([ (0.5*math.pow(self.deltaT, 2)), self.deltaT ]).T;
# Matrix C
self.matrixC = np.matrix([[1, 0], [0, 1]]);
# Matrix H
self.matrixH = np.matrix([[1, 0], [0, 1]]);
# Identity Matrix
self.matrixI = np.matrix([[1, 0], [0, 1]]);
# Get Sensor Noise Covariance Matrix
self.R = self.getSensorNoiseCovarMat(self.deltaX, self.deltaXdot);
# Get Initial Process Covariance Matrix
self.Pk = self.getInitProcCovarMat(self.deltaPx, self.deltaPxdot);
# Get Process Noise Covariance Matrix
#self.Q = self.getProcNoiseCovarMat(self.noiseFactor);
self.Q = self.matrixI
### Initial Prediction ###
## Prediction ##
self.stateXp = self.getPredictedState(self.stateX);
self.Pkp = self.predictProcCovarMat(self.Pk);
## Compute Kalman Gain ##
self.kg = self.getKalmanGain(self.Pkp);
def getProcNoiseCovarMat(self, noiseFactor):
# Assuming continuous-time white noise
procNoiseCovarMat = noiseFactor*(np.matrix([[ ((1/3)*math.pow(self.deltaT, 3)), ((1/2)*math.pow(self.deltaT, 2)) ],
[ ((1/2)*math.pow(self.deltaT, 2)), self.deltaT ]]));
return procNoiseCovarMat;
def getInitProcCovarMat(self, deltaPx, deltaPxdot):
procCovarMat = np.matrix([
[math.pow(deltaPx, 2), 0],
[0, math.pow(deltaPxdot, 2)]
]);
return procCovarMat;
def getSensorNoiseCovarMat(self, deltaX, deltaXdot):
R = np.matrix([
[math.pow(deltaX, 2), 0],
[0, math.pow(deltaXdot, 2)]
]);
return R;
def getInnovation(self, mesStateX, predictedStateX):
innovation = mesStateX - self.matrixH*predictedStateX;
return innovation;
def updateEstimationQ(self, mesStateX, predictedStateX, kalmanGain):
innovation = self.getInnovation(mesStateX, predictedStateX);
self.Q = (self.alpha*self.Q) + ((1 - self.alpha)*(kalmanGain*innovation*np.transpose(innovation)*np.transpose(kalmanGain)));
def getResidual(self, mesStateX, kalmanStateX):
residual = mesStateX - self.matrixH*kalmanStateX;
return residual;
def updateEstimationR(self, mesStateX, kalmanStateX, procCovarMat):
residual = self.getResidual(mesStateX, kalmanStateX);
self.R = (self.alpha*self.R) + (1 - self.alpha)*((residual*np.transpose(residual) + self.matrixH*procCovarMat*np.transpose(self.matrixH)));
def getPredictedState(self, prevStateX):
stateXp = (self.matrixA*prevStateX) + (self.matrixB*self.u) + self.w;
return stateXp;
def predictProcCovarMat(self, prevCovarMat):
procCovarMat = self.matrixA*prevCovarMat*np.transpose(self.matrixA) + self.Q;
# Set Non-Diagonal elements to zero
for i in range(0, procCovarMat.shape[0]):
for j in range(0, procCovarMat.shape[1]):
if i != j:
procCovarMat[i, j] = 0;
return procCovarMat;
def getKalmanGain(self, procCovarMat):
num = procCovarMat*np.transpose(self.matrixH);
den = (self.matrixH*procCovarMat*np.transpose(self.matrixH)) + self.R;
kg = np.true_divide(num, den, where=(num!=0) & (den!=0));
return kg;
def getObservedState(self, measuredState):
obsState = (self.matrixC*measuredState) + self.Z;
return obsState;
def getCorrectedState(self, predictedState, kg, obsState):
newState = predictedState + kg*(obsState - self.matrixH*predictedState);
return newState;
def updateProcCovarMat(self, kg, predictedProcCovarMat):
procCovarMat = (self.matrixI - kg*self.matrixH)*self.Pkp;
return procCovarMat;
def getKalmanState(self, measuredStateMat):
## Get observed state from the Measurement System ##
self.obsState = self.getObservedState(measuredStateMat);
## Compute Kalman Gain ##
self.kg = self.getKalmanGain(self.Pkp);
## Correction / Update ##
self.stateX = self.getCorrectedState(self.stateXp, self.kg, self.obsState);
self.Pk = self.updateProcCovarMat(self.kg, self.Pkp);
self.updateEstimationQ(measuredStateMat, self.stateX, self.kg);
## Residual Based Adaptive Estimation of R ##
self.updateEstimationR(measuredStateMat, self.stateX, self.Pkp);
## Prediction of the next state ##
self.stateXp = self.getPredictedState(self.stateX);
self.Pkp = self.predictProcCovarMat(self.Pk);
print "Predicted State :\n", self.stateXp;
print "\nPredicted Process Covariance Matrix :\n", self.Pkp;
print "\nKalman Gain :\n", self.kg;
print "\nObserved State :\n", self.obsState;
print "\nKalman Corrected State :\n", self.stateX;
print "\nProcess Covariance Matrix :\n", self.Pk;
return [self.stateX, self.stateXp];
if __name__ == '__main__':
# Generate Test Data
numTestData = 1000;
positionX = 0;
velocityX = 20;
accelX = 0.1;
deltaT = 1;
idealStateX = np.matrix(np.empty([numTestData, 2]));
testStateX = np.matrix(np.empty([numTestData, 2]));
for i in range(0, numTestData):
velocityX = velocityX + deltaT*accelX;
positionX = positionX + deltaT*velocityX + (1/2)*accelX*math.pow(deltaT, 2);
idealStateX[i, 1] = velocityX;
idealStateX[i, 0] = positionX;
# Position dataset
testStateX[i, 0] = idealStateX[i, 0] + np.random.normal(0, 50, 1); #Adding Random Noise
# Velocity dataset
testStateX[i, 1] = idealStateX[i, 1] + np.random.normal(0, 3, 1); #Adding Random Noise
# Initial State Matrix
initStateX = testStateX[0, :].T; # Convert to a column matrix
prevMesStateMat = np.empty([2, 1]);
kf = KalmanFilter(initStateX, deltaT);
stateXMat = np.empty([(testStateX.shape[0] - 1), testStateX.shape[1]]);
predictedStateXMat = np.empty([(testStateX.shape[0]), testStateX.shape[1]]);
# Initial prediction of next state
predictedStateXMat[0] = kf.stateXp.T;
for i in range(1, testStateX.shape[0]):
print "\n#################################";
print "#\t Iteration %d \t\t#" %i;
print "#################################\n";
mesStateMat = testStateX[i, :].T;
[currKalmanState, nextPredictedState] = kf.getKalmanState(mesStateMat);
prevMesStateMat = mesStateMat;
predictedStateXMat[i] = nextPredictedState.T;
stateXMat[i - 1] = currKalmanState.T;
stateXMat = np.matrix(stateXMat);
positionTrackingErr = np.empty([stateXMat.shape[0], 1]);
velocityTrackingErr = np.empty([stateXMat.shape[0], 1]);
for i in range (0, stateXMat.shape[0]):
positionTrackingErr[i] = np.subtract(idealStateX[i+1, 0], stateXMat[i, 0]);
velocityTrackingErr[i] = np.subtract(idealStateX[i+1, 1], stateXMat[i, 1]);
plt.subplot(3,2,1);
plt.plot(range(0, len(positionTrackingErr)), positionTrackingErr, 'r-', linewidth=2);
plt.xlabel("Index");
plt.ylabel("Position Error (meters)");
plt.title("Position Tracking Error");
plt.grid(color='r', linestyle='--', linewidth=0.5);
plt.subplot(3,2,2);
plt.plot(range(0, len(velocityTrackingErr)), velocityTrackingErr, 'r-', linewidth=2);
plt.xlabel("Index");
plt.ylabel("Velocity Error (meters)");
plt.title("Velocity Tracking Error");
plt.grid(color='r', linestyle='--', linewidth=0.5);
plt.subplot(3,1,2);
plt.plot(range(0, testStateX.shape[0]), testStateX[:, 0], 'r-', linewidth=2, label="Observed State");
plt.hold(True);
plt.plot(range(1, predictedStateXMat.shape[0] + 1), predictedStateXMat[:, 0], 'g--', linewidth=2, label="Predicted State");
plt.hold(True);
plt.plot(range(1, stateXMat.shape[0] + 1), stateXMat[:, 0], 'b-', linewidth=2, label="Kalman Filtered State");
plt.xlabel("Index");
plt.ylabel("Position(X direction)");
plt.title("Plots of Observed, Predicted, and Kalman filtered State (Position)");
plt.legend();
plt.grid(color='r', linestyle='--', linewidth=0.5);
plt.subplot(3,1,3);
plt.plot(range(0, testStateX.shape[0]), testStateX[:, 1], 'r-', linewidth=2, label="Observed State");
plt.hold(True);
plt.plot(range(1, predictedStateXMat.shape[0] + 1), predictedStateXMat[:, 1], 'g--', linewidth=2, label="Predicted State");
plt.hold(True);
plt.plot(range(1, stateXMat.shape[0] + 1), stateXMat[:, 1], 'b-', linewidth=2, label="Kalman Filtered State");
plt.xlabel("Index");
plt.ylabel("Velocity(X direction)");
plt.title("Plots of Observed, Predicted, and Kalman filtered State (Velocity)");
plt.legend();
plt.grid(color='r', linestyle='--', linewidth=0.5);
plt.show();
|
[
"vincent.tracsens@gmail.com"
] |
vincent.tracsens@gmail.com
|
19edfb5d48f61044424ab6c2a3dd832edbd0612a
|
daae0cf103b6c9f26065f7546a7dc79281fc0bde
|
/16/3.py
|
297df94c2bd4b2df86a41fbc8e26d6952e1e12d4
|
[] |
no_license
|
oc0de/pyEPI
|
97a5d4db91d5459f407c9d414fc999de56885124
|
2b7cedecdd5b8665ab4f1ca4762a3fd5adcc9864
|
refs/heads/master
| 2021-09-05T09:32:53.646441
| 2018-01-26T03:58:10
| 2018-01-26T03:58:10
| 119,003,665
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 371
|
py
|
def number_of_ways(n, m):
cache = {}
def helper(x,y):
if x == y == 0: return 1
if (x,y) not in cache:
ways_top = 0 if x == 0 else helper(x-1, y)
ways_left = 0 if y == 0 else helper(x, y-1)
cache[(x,y)] = ways_top + ways_left
return cache[(x,y)]
return helper(n-1, m-1)
print number_of_ways(5, 5)
|
[
"valizade@mail.gvsu.edu"
] |
valizade@mail.gvsu.edu
|
ed1eca89d11a78b66ce87c266840ae4b34b7ed9b
|
d32a9eeff0ec270002f27e8c7b15b4ab8f2b5911
|
/backend/backend/api/logger.py
|
79c5abd955ac17a0d7550e0140b46b33a555ec11
|
[
"MIT"
] |
permissive
|
DOUGLASMENDES/alpharithmic-trading
|
538a8568432ea16a4b2d6e0b722c80f14712a6c7
|
41f3da302b97f0d67bb5e1af41800d92d3319e72
|
refs/heads/master
| 2023-07-07T07:50:47.729258
| 2019-10-21T02:54:48
| 2019-10-21T02:54:48
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 399
|
py
|
from websocket import create_connection
class Logger:
msg_placeholder = "{\"message\": \"%s\"}"
def __init__(self, channel):
self.ws = create_connection("ws://alpharithmic.herokuapp.com/ws/logs/%s/" % channel)
def __del__(self):
self.ws.close()
def log(self, msg):
self.ws.send(Logger.msg_placeholder % msg)
def close(self):
self.ws.close()
|
[
"zhangjack27@gmail.com"
] |
zhangjack27@gmail.com
|
43bdd8748422792459050ea42d50e3bfced01a2b
|
6a220b93215b1ddde7e04622da9a7ae20cccd1c3
|
/libs/csv_reader.py
|
046884685b64fb8257fbeacf82a1acee326ee328
|
[] |
no_license
|
open-sourcepad/python-testing-framework
|
527e36efba25c43eed8fe6d65774b686ead9b009
|
6043d95a85c93201eff46cccdbc8cd1a4cb36079
|
refs/heads/master
| 2022-07-10T13:29:31.673528
| 2020-05-26T10:13:28
| 2020-05-26T10:13:28
| 250,014,497
| 0
| 0
| null | 2022-06-27T16:11:17
| 2020-03-25T15:20:25
|
Python
|
UTF-8
|
Python
| false
| false
| 588
|
py
|
import csv
from pathlib import Path
from config import Config
from .logger import Logger
class CsvReader:
def __init__(self, **kwargs):
self.config = Config()
pwd = self.config.get('root_url')
file = f"{ pwd }"
self.file = f"{ pwd }/assets/{kwargs.get('file')}.csv"
def read(self):
result = []
with open(self.file) as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
result.append(dict(row))
Logger(file='test', message=result, type='info').log()
return result
|
[
"djy@sourcepad.com"
] |
djy@sourcepad.com
|
b166238bfdde8ebbd7f891ca2753c9ec07746959
|
85ea5a6adb2806ce556105916339bad6ae94a1ed
|
/musicRooms/wsgi.py
|
a4555e2c9d5c406e02964e77e41c36d26ec28091
|
[] |
no_license
|
MrDahaniel/musicRooms
|
34fa864495dd5c63218d59c3a9a4f38981402267
|
2970099fe63f00a8afa9d9e67fdfbc4dfa063a2f
|
refs/heads/master
| 2023-06-06T08:43:19.261925
| 2021-06-30T01:53:42
| 2021-06-30T01:53:42
| 381,430,130
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 397
|
py
|
"""
WSGI config for musicRooms project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'musicRooms.settings')
application = get_wsgi_application()
|
[
"danieldavid2001@gmail.com"
] |
danieldavid2001@gmail.com
|
71bf5967f752f1802458b6fe7e2ea379a85e712a
|
4c2924bce6058ecbddc3781c9b946da8e3f90f0f
|
/omegawatt.py
|
f72d48c93841d30a36b4c4139412ce000021dcb0
|
[] |
no_license
|
Kayoku/G5K-energy
|
f7887ec2d90b98acdb5d85f86447d15e88bd2c1c
|
5df70a73c8b7dd27e1e555aa217d0c3600b39463
|
refs/heads/master
| 2020-05-02T12:44:20.691774
| 2019-04-16T11:10:20
| 2019-04-16T11:10:20
| 177,965,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,699
|
py
|
"""
Specific module for Lyon/Grenoble G5K wattmetter (OmegaWatt)
Source from: https://gitlab.inria.fr/delamare/wattmetre-read/raw/master/tools/getwatt.py
"""
def getwatt(node, from_ts, to_ts):
"""
Get power values from Grid'5000 Lyon Wattmetre (requires Execo)
:param node: Node name
:param from_ts: Time from which metric is collected, as an integer Unix timestamp
:param from_ts: Time until which metric is collected, as an integer Unix timestamp
:return: A list of (timestamp, value) tuples.
"""
import datetime
import requests
import gzip
import time
from execo_g5k import get_host_attributes
watt = []
node_wattmetre = get_host_attributes(node)['sensors']['power']['via']['pdu'][0]
for ts in range(int(from_ts), int(to_ts)+3600, 3600):
suffix = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%dT%H')
if suffix != datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%dT%H'):
suffix += ".gz"
data=requests.get("http://wattmetre.lyon.grid5000.fr/data/"+node_wattmetre['uid']+"-log/power.csv."+suffix).content
if suffix.endswith(".gz"):
data = gzip.decompress(data)
for l in str(data).split('\\n')[1:-1]:
l = l.split(',')
if l[3] == 'OK' and l[4+node_wattmetre['port']] != '':
ts, value = (float(l[2]), float(l[4+node_wattmetre['port']]))
if from_ts <= ts and ts <= to_ts:
watt.append((ts, value))
if not suffix.endswith(".gz"):
break
return watt
# node = "orion-4"
# from_ts = 1553595857
# to_ts = 1553596857
# print(getwatt(node, from_ts, to_ts))
|
[
"jordan.bouchoucha@protonmail.com"
] |
jordan.bouchoucha@protonmail.com
|
860b005825acb032d0f94414142accddd28459a9
|
e98becd113452518f46252f0afe0df4724cd2120
|
/products/models.py
|
49eeb5d67b48c9072127b56543be72a71ecbad40
|
[] |
no_license
|
Just-Drue/producthunt-django
|
fb1105a28ca4361c886740b54fd9d9d20a0af5f7
|
42dd5481e67d713a9a32735bfdf24aba4d6427ce
|
refs/heads/master
| 2020-06-22T14:42:34.687593
| 2019-07-23T13:25:54
| 2019-07-23T13:25:54
| 197,731,485
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 639
|
py
|
from django.db import models
from django.contrib.auth.models import User
class Product(models.Model):
title = models.CharField(max_length=255)
url = models.CharField(max_length=255)
pub_date = models.DateTimeField(auto_now=True)
votes_total = models.IntegerField(default=0)
image = models.ImageField(upload_to='images/')
icon = models.ImageField(upload_to='images/')
body = models.TextField()
hunter = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return str(self.id) + ':' + self.title
def pub_date_pretty(self):
return self.pub_date.strftime('%b %e %Y')
|
[
"Justinas.DRUSKIS@akka.eu"
] |
Justinas.DRUSKIS@akka.eu
|
17209eaf388e9000e886e9e48250dc9fc3dd072b
|
ca65e958071df2775b2ee6ab380d315f9e3a252b
|
/expensiveoptimbenchmark/solvers/IDONE/test.py
|
0409d48cffde3a20b2531667874d3ee359e0d1a2
|
[
"MIT"
] |
permissive
|
ZhihaoLiu-git/ExpensiveOptimBenchmark
|
56bf814a8033fc84ced257dbde08bfee9914eded
|
642056f8d94c7f953e50c3cd05bbbf9f39ad5c3d
|
refs/heads/master
| 2023-07-13T11:54:19.491931
| 2021-08-24T08:08:47
| 2021-08-24T08:08:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,713
|
py
|
import numpy as np
d = 3
# Define basis functions Z=ReLU(W*x+B)
W = [] # Basis function weights
B = [] # Basis function bias
lb = [-5] * d
ub = [15] * d
# Add a constant basis function independent on the variable x, giving the model an offset
W.append([0]*d)
B.append([1])
# Add basis functions dependent on one variable
for k in range(d):
for i in range(lb[k],ub[k]+1):
if i == lb[k]:
temp = [0]*d
temp[k] = 1
W.append(np.copy(temp))
B.append([-i])
elif i == ub[k]:
temp = [0]*d
temp[k] = -1
W.append(np.copy(temp))
B.append([i])
else:
temp = [0]*d
temp[k] = -1
W.append(np.copy(temp))
B.append([i])
temp = [0]*d
temp[k] = 1
W.append(np.copy(temp))
B.append([-i])
# Add basis functions dependent on two subsequent variables
for k in range(1,d):
for i in range(lb[k]-ub[k-1],ub[k]-lb[k-1]+1):
if i == lb[k]-ub[k-1]:
temp = [0]*d
temp[k] = 1
temp[k-1] = -1
W.append(np.copy(temp))
B.append([-i])
elif i == ub[k]-lb[k-1]:
temp = [0]*d
temp[k] = -1
temp[k-1] = 1
W.append(np.copy(temp))
B.append([i])
else:
temp = [0]*d
temp[k] = -1
temp[k-1] = 1
W.append(np.copy(temp))
B.append([i])
temp = [0]*d
temp[k] = 1
temp[k-1] = -1
W.append(np.copy(temp))
B.append([-i])
W = np.asarray(W)
B = np.asarray(B)
|
[
"arthur.guijt12@gmail.com"
] |
arthur.guijt12@gmail.com
|
7299e4632f35352b74252a94b66eb182b8449e29
|
1fd6bffd21d94b488aa57e1b30a89ce2da24a434
|
/classifier/linear_classifier.py
|
db2769c4e43521b61f75b2c92a5d2520ef12528e
|
[] |
no_license
|
caozhang1996/CS231N_svm_and_softmax
|
69afd5b0fe0cff1dbd3af0c0b9f6a2ae5efd184e
|
b5351b136a3a57f47e0e7a5f9f52ad1421dcadfb
|
refs/heads/master
| 2020-05-24T20:54:41.916910
| 2019-05-19T10:55:34
| 2019-05-19T10:55:34
| 187,464,816
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,209
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 26 21:39:11 2018
@author: caozhang
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import numpy as np
from classifier.linear_svm import *
from classifier.softmax import *
class LinearClassifier(object):
"""
"""
def __init__(self):
self.W = None
self.B = None
def train(self, X, Y, learning_rate=1e-3, reg=1e-5, num_iters=100,
batch_size=200, verbose=False):
"""
Train this linear classifier using stochastic gradient descent.
使用随机梯度下降来训练这个分类器
Inputs:
- X: A numpy array of shape (N, D) containing training data; there are N
training samples each of dimension D.
- y: A numpy array of shape (N,) containing training labels; y[i] = c
means that X[i] has label 0 <= c < C for C classes.
- learning_rate: (float) learning rate for optimization.
- reg: (float) regularization strength.
- num_iters: (integer) number of steps to take when optimizing
- batch_size: (integer) number of training examples to use at each step.
- verbose: (boolean) If true, print progress during optimization.
Outputs:
A list containing the value of the loss function at each training iteration.
"""
num_train, dim = X.shape
num_classes = np.max(Y) + 1 # 假设y的值是0, 1, ... k-1,其中K是类别数量
if self.W == None and self.B == None:
self.W = np.random.randn(dim, num_classes) * 1e-2 # 简易初始化W
self.B = np.zeros_like(num_classes)
# 运行随机梯度下降法来优化W
loss_history = []
for it in xrange(num_iters):
X_batch = None
Y_batch = None
sample_index = np.random.choice(num_train, batch_size, replace=True)
X_batch = X[sample_index,:] # select the batch sample
Y_batch = Y[sample_index] # select the batch label
# 评估损失和梯度
loss, grad = self.loss(X_batch, Y_batch, reg)
loss_history.append(loss)
# 参数更新
self.W += -learning_rate * grad
if verbose and it % 10 == 0:
print ('iteration %d / %d: loss %f' % (it, num_iters, loss))
return loss_history
def predict(self, X):
"""
Use the trained weights of this linear classifier to predict labels for
data points.
Inputs:
- X: A numpy array of shape (N, D) containing training data; there are N
training samples each of dimension D.
Returns:
- y_pred: Predicted labels for the data in X. y_pred is a 1-dimensional
array of length N, and each element is an integer giving the predicted
class.
"""
y_pred = np.zeros(X.shape[0])
score = X.dot(self.W) + self.B
y_pred = np.argmax(score, axis=1)
return y_pred
def loss(self, X_batch, Y_batch, reg):
"""
Compute the loss function and its derivative. Subclasses will override this.
Inputs:
- X_batch: A numpy array of shape (N, D) containing a minibatch of N
data points; each point has dimension D.
- y_batch: A numpy array of shape (N,) containing labels for the minibatch.
- reg: (float) regularization strength.
Returns: A tuple containing:
- loss as a single float
- gradient with respect to self.W; an array of the same shape as W
"""
pass
class LinearSVM(LinearClassifier):
"""
"""
def loss(self, X_batch, Y_batch, reg):
return svm_loss_vectorized(self.W, self.B, X_batch, Y_batch, reg)
class Softmax(LinearClassifier):
"""
"""
def loss(self, X_batch, Y_batch, reg):
return softmax_loss_vectorized(self.W, self.B, X_batch, Y_batch, reg)
|
[
"caozhang1996@gmail.com"
] |
caozhang1996@gmail.com
|
3c53acc7a394cdd39b839e2d9ea9db430e1ffd07
|
a1ef11a1861bc94495f5cc1bc0de2355080f5c3f
|
/17/17_1.py
|
d5454f4a85de93caebdb95c901f5867ce50ed9fc
|
[] |
no_license
|
womogenes/AoC-2020-solutions
|
85208d1a2b24daf299150a40056fe9cb3c52355d
|
72c4673a24c1d06d8f385536f1881c9202991d10
|
refs/heads/main
| 2023-02-08T01:03:30.384345
| 2020-12-25T19:20:10
| 2020-12-25T19:20:10
| 318,245,689
| 3
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,473
|
py
|
with open("17_input.txt") as fin:
data = fin.read()
def next_state(state):
# Return the next state given the current state
# State is represented by a big enough 3-dimensional matrix
minz = 0
maxz = len(state) - 1
miny = 0
maxy = len(state[0]) - 1
minx = 0
maxx = len(state[0][0]) - 1
newState = [[["." for _ in range(maxx + 1)] for _ in range(maxy + 1)] for _ in range(maxz + 1)]
for x in range(minx, maxx + 1):
for y in range(miny, maxy + 1):
for z in range(minz, maxz + 1):
neighbors = 0
# Find all 26 surrounding cubes
for dx in range(-1, 2):
for dy in range(-1, 2):
for dz in range(-1, 2):
xx = x + dx
yy = y + dy
zz = z + dz
if not (minx <= xx <= maxx and miny <= yy <= maxy and minz <= zz <= maxz):
continue
if dx == 0 and dy == 0 and dz == 0:
continue
neighbors += state[zz][yy][xx] == "#"
if state[z][y][x] == "#" and (not neighbors in [2, 3]):
newState[z][y][x] = "."
elif state[z][y][x] == "." and neighbors == 3:
newState[z][y][x] = "#"
else:
newState[z][y][x] = state[z][y][x]
return newState
# Usually hard-coded stuff is bad but it works here
n = len(data.split("\n"))
size = 50
def print_state(state):
for layer in state:
if count_active(layer) > 0:
print("\n".join([" ".join(row) for row in layer]))
print()
def count_active(layer):
count = 0
for i in layer:
for j in i:
count += j != "."
return count
inputState = data.split("\n")
state = [[["." for i in range(size)] for j in range(size)] for k in range(size)]
# Fill in our grid
for y in range(size // 2, size // 2 + n):
for x in range(size // 2, size // 2 + n):
state[size // 2][y][x] = inputState[y - size // 2][x - size // 2]
print_state(state)
count = 0
for i in range(6):
state = next_state(state).copy()
print(f"Finished iteration {i + 1}")
#print_state(state)
#print()
count = 0
for layer in state:
count += count_active(layer)
print(count)
|
[
"williamfeng1729@gmail.com"
] |
williamfeng1729@gmail.com
|
7cc04ea24305a25eb8010faa7a224d022844a5ca
|
ce71d887986a3c7a90ab9dfadf70c9e2d64147f2
|
/day07/07_qiushibaike.py
|
1602bc6a2aeea0664174f460fe83e86fe9bcb943
|
[] |
no_license
|
Chenqianwu/AID1904
|
361b9d3640ad63519d8cea64e9241f15f8543161
|
5e5a0908ba24b3708d9ae832194d08b93cb89318
|
refs/heads/master
| 2020-06-05T18:34:14.752026
| 2019-08-13T03:37:53
| 2019-08-13T03:37:53
| 192,512,781
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 410
|
py
|
from selenium import webdriver
browser = webdriver.PhantomJS()
browser.get('https://www.qiushibaike.com/text/')
# 单元素查找
xpath = '//div[@class="content"]'
div = browser.find_element_by_xpath(xpath)
# print(div.text)
# 多元素查找
xpath = '//div[@class="content"]/span'
span_list = browser.find_elements_by_xpath(xpath)
for span in span_list:
print(span.text)
print('*'*50)
|
[
"noreply@github.com"
] |
noreply@github.com
|
883d265d36da118961861848f49fe6ec28c13473
|
5b52313c813c3b0e2cce6bc1096ee292fccdbada
|
/djcrm/settings.py
|
167aeee1b63834b21bb9b3f17bc9e7bf15886e53
|
[
"MIT"
] |
permissive
|
frenchdubliner/djangoTest
|
4240568e8c7159981a2f1c945c1c84d1534e64c6
|
8cb279964f902af3f524844782ca4b977f7c6ee7
|
refs/heads/main
| 2023-09-05T11:59:48.617318
| 2021-11-06T01:24:06
| 2021-11-06T01:24:06
| 421,076,547
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,470
|
py
|
"""
Django settings for djcrm project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-na+$rse)t)d&r(-escw&4ow7+gt$=%$i^#3n+ab0!bs8im#d_x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'leads'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'djcrm.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / "templates"],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'djcrm.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
BASE_DIR / "static"
]
STATIC_ROOT = "static_root"
AUTH_USER_MODEL = 'leads.User'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
LOGIN_REDIRECT_URL = "/leads"
|
[
"olivier.chambon@gmail.com"
] |
olivier.chambon@gmail.com
|
24d2af17dd3749befa8832fee7ee08d62a1a9063
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/JgYPQrYdivmqN4KKX_18.py
|
51bf0ca4c9e57c6e4d4df644268825f4357b96e2
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213
| 2021-04-06T20:17:44
| 2021-04-06T20:17:44
| 355,318,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 530
|
py
|
def BMI(weight, height):
if "kilos" in weight:
fBmi = round(float(weight.split()[0]) / float(height.split()[0])**2,1)
else :
a = (float(weight.split()[0])) * 0.453592
b = (float(height.split()[0])) * 0.0254
fBmi = round(a/b**2,1)
if fBmi < 18.5:
return "{0} Underweight".format(fBmi)
elif fBmi >= 18.5 and fBmi < 24.9:
return "{0} Normal weight".format(fBmi)
elif fBmi >= 25 and fBmi < 29.9:
return "{0} Overweight".format(fBmi)
elif fBmi >= 30:
return "{0} Obesity".format(fBmi)
|
[
"daniel.reich@danielreichs-MacBook-Pro.local"
] |
daniel.reich@danielreichs-MacBook-Pro.local
|
ff1f313894df0d8ce05f9613010ff31d92abca19
|
a21982c4c56944c2c852efa947f0f2ee4afb374a
|
/setup.py
|
941b183739c4ecef99377b723bdbd0da1aedea33
|
[] |
no_license
|
mephicide/semantic-version-updater
|
0920586528218d0aef7db98f8ec21625a21ae5e3
|
2d2947259ad5f520653bb44457ddbb49716968b3
|
refs/heads/master
| 2023-02-05T18:11:46.731500
| 2020-12-21T13:17:58
| 2020-12-21T13:17:58
| 322,597,760
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 801
|
py
|
from setuptools import setup, find_packages
entrypoints = {}
console_scripts = entrypoints['console_scripts'] = [
'update-version = version_updater.main:main',
]
try:
long_description = open('README.md').read()
except IOError:
long_description = ''
setup(
name='version_updater',
version='0.0.2',
description="Updates semantic versions, either of the artifact itself, or its dependencies",
author="Jeremy Vickery",
author_email="jeremyvickery@gmail.com",
platforms=['any'],
license='MIT',
packages=find_packages(),
zip_safe=False,
install_requires=[
'semver==2.13.0',
'jsonpath-ng==1.5.2',
'ruamel.yaml==0.16.12'
],
include_package_data=True,
entry_points=entrypoints,
long_description=long_description
)
|
[
"jeremyvickery@gmail.com"
] |
jeremyvickery@gmail.com
|
9030713c7c930ffabe85beaff385f9f272140ffe
|
2e6105480d6e99ca6c290e3ed64cb64d36de6491
|
/Database_Server/Views.py
|
491bf883dcc3074eea921035e39fe9281fe2ea96
|
[] |
no_license
|
avpatel26/CityAnalyticsOnCloud
|
fb65f01e647b5835b1f12bc4f562ae3ede1a2683
|
1c06569ed13e8c48bc89a85da0777be721a940a7
|
refs/heads/main
| 2023-07-11T21:19:24.834187
| 2021-08-13T03:08:21
| 2021-08-13T03:08:21
| 395,503,582
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,382
|
py
|
#--------------------------------
#Group No. 31
#Team Members
#Akshay Agarwal, 1141290,Melbourne
#Avi Patel,1143213,Melbourne
#Monit Patel,1135025,Melbourne
#Prabha Choudhary,1098776,Melbourne
#Shubham Parakh,1098807,Melbourne
#--------------------------------
import couchdb
import couchdb.design
import os
cities = ["Melbourne","Brisbane","Perth","Sydney","Adelaide"]
states = ["VIC","QLD","WA","NSW","SA"]
years = [2020,2019,2018,2017,2016,2015]
def server_connection():
ip = os.getenv('couchdb_ip')
username = os.getenv('couchdb_user')
password = os.getenv('couchdb_pass')
server = couchdb.Server("http://" + username + ":" + password + "@" + ip + ":5984/")
return server
server = server_connection()
def create_views_2partyvotes(dbname,cities):
db = server[dbname]
for city in cities:
view_name = city+"_voters_view"
map = 'function(doc) { if(doc.division_name.includes("'+city+'")) {emit(doc.tpp_australian_labor_party_votes,doc.tpp_liberal_national_coalition_votes);}}'
view = couchdb.design.ViewDefinition(city, view_name, map)
view.sync(db)
def create_views_liberalpartyvotes(dbname,cities):
db = server[dbname]
for city in cities:
view_name = city+"_liberal_party_view"
map = 'function(doc) { if(doc.division_name.includes("'+city+'")) {emit(doc.id,doc.tpp_liberal_national_coalition_votes);}}'
reduce = 'function(keys, values) { return sum(values); }'
view = couchdb.design.ViewDefinition(city, view_name, map,reduce_fun=reduce)
view.sync(db)
def create_views_laborpartyvotes(dbname, cities):
db = server[dbname]
for city in cities:
view_name = city + "_labor_party_view"
map = 'function(doc) { if(doc.division_name.includes("' + city + '")) {emit(doc.id,doc.tpp_australian_labor_party_votes);}}'
reduce = 'function(keys, values) { return sum(values); }'
view = couchdb.design.ViewDefinition(city, view_name, map,reduce_fun=reduce)
view.sync(db)
def create_views_totalvoterCount(dbname, cities):
db = server[dbname]
for city in cities:
view_name = city + "_total_voterCount_view"
city = city.upper()
map = 'function(doc) { if(doc.Division.includes("' + city + '")) {emit(doc.id,doc.Enrolment);}}'
view = couchdb.design.ViewDefinition(city, view_name, map)
view.sync(db)
def create_views_AgeAbove15(dbname, cities):
db = server[dbname]
i = 0
for state in states:
view_name = cities[i] + "_Age_view"
map = 'function(doc) { if(doc.State.includes("' + state + '") && doc.Region.includes("(C)")) {var youth = parseInt(doc["15 - 19"])+parseInt(doc["20 - 24"]);var adult = parseInt(doc["25 - 29"])+parseInt(doc["30 - 34"])+parseInt(doc["35 - 39"])+parseInt(doc["40 - 44"])+parseInt(doc["45 - 49"])+parseInt(doc["50 - 54"])+parseInt(doc["55 - 59"])+parseInt(doc["60 - 64"]);var senior = parseInt(doc["65 - 69"])+parseInt(doc["70 - 74"])+parseInt(doc["75 - 79"])+parseInt(doc["80 - 84"])+parseInt(doc["85 - 89"])+parseInt(doc["90 - 94"])+parseInt(doc["95 - 99"])+parseInt(doc["100 and over"]);emit(doc.Region, [youth,adult,senior]);}}'
print(map)
view = couchdb.design.ViewDefinition(cities[i], view_name, map)
i=i+1
view.sync(db)
|
[
"avipatel2610@gmail.com"
] |
avipatel2610@gmail.com
|
d5d776f59da1238f1032e6305e2eeb6e866c8e96
|
e0d5423de509451c56a4ac18549142c70842445f
|
/DailyPractice/Linkedlist/148_Sort_List.py
|
7b2a4f8895f37f81a8222e54d48bc47d5b9b8be1
|
[] |
no_license
|
xyliu09/DailyPractice
|
82e39259972260c62289483f5227265715ea08f8
|
aa925ef83624261fd5b4b8ba140de26ba8cd0074
|
refs/heads/master
| 2021-06-15T22:21:36.505334
| 2021-05-14T01:59:47
| 2021-05-14T01:59:47
| 196,878,921
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,513
|
py
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def sortList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head: return None
def getSize(head):
counter = 0
while head:
counter += 1
head = head.next
return counter
def split(head, step):
i = 1
while i < step and head:
head = head.next
i += 1
if not head:
return None
temp, head.next = head.next, None
return temp
def merge(l, r, head):
cur = head
while l and r:
if l.val < r.val:
cur.next, l = l, l.next
else:
cur.next, r = r, r.next
cur = cur.next
cur.next = l or r
while cur.next: cur = cur.next
return cur
size = getSize(head)
bs = 1
dummy = ListNode(None)
dummy.next = head
l, r, tail = None, None, None
while bs < size:
cur = dummy.next
tail = dummy
while cur:
l = cur
r = split(l, bs)
cur = split(r, bs)
tail = merge(l, r, tail)
bs <<= 1
return dummy.next
|
[
"38929468+xyliu09@users.noreply.github.com"
] |
38929468+xyliu09@users.noreply.github.com
|
eb4cc5b31c4a9629a928412b4df80c8ee843fe8e
|
9e35d755536790a8dd28c1ee7473d1e70deec8b9
|
/Testy/Dictionaries.py
|
d3db2897e6aa727a4eaac3a3731dc74cf63c8f8d
|
[] |
no_license
|
cichali/praktykiGIT
|
805210b047b01fc9ee814d0a8a222992af763e44
|
8b36ba6e3bbfc72b9b1ebf110b4477ad668cceef
|
refs/heads/master
| 2020-06-30T02:55:28.630883
| 2019-09-20T10:25:37
| 2019-09-20T10:25:37
| 200,699,753
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 322
|
py
|
monthConversions = {
"Jan": "January",
"Fab": "February",
"Mar": "March",
"Apr": "April",
"May": "May",
"Jun": "June",
"Jul": "July",
"Aug": "August",
"Sep": "September",
"Oct": "October",
"Nov": "November",
"Dec": "December",
}
print(monthConversions.get("Deg", "Error"))
|
[
"cichyakms@gmail.com"
] |
cichyakms@gmail.com
|
716aca32e454d7b820d8e09254aa93a48ca27a20
|
21be53b22a5ecc8a6c3f49e7378a78904b5c9c85
|
/main.py
|
286729c95c730e334a1825639ed56e367c0f5656
|
[] |
no_license
|
S-Hani/pythonProject
|
983e56e0116782c78a3ed33b8c932ae0834e5579
|
6bb4c8f139e35d33b08d3c7f200b8473eafeba42
|
refs/heads/master
| 2023-08-12T22:31:54.983548
| 2021-09-30T12:23:52
| 2021-09-30T12:23:52
| 412,055,034
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,170
|
py
|
import requests
import json
import csv
prtcl = "http"
url = "www.omdbapi.com"
params = {
"apikey": "926c06e4",
"s": "Avengers"
}
payload = {}
headers = {}
def dict_to_query(parameters, domain, protocol):
query = ''
for key in parameters.keys():
query += str(key) + '=' + str(parameters[key]) + "&"
return protocol + "://" + domain + "/?" + query
response = requests.request("GET", dict_to_query(params, url, prtcl), headers=headers, data=payload)
data = json.loads(response.text)
search_data = data['Search']
# Path to the CSV file
csv_file_path = 'movies.csv'
csv_file = open(csv_file_path, 'w')
# create the csv writer object
csv_writer = csv.writer(csv_file)
# Counter variable used for writing
# headers to the CSV file
count = 0
for attributes in search_data:
if count == 0:
# Writing headers of CSV file
header = attributes.keys()
csv_writer.writerow(header)
count += 1
# Writing data of CSV file
csv_writer.writerow(attributes.values())
csv_file.close()
csv_file = open(csv_file_path, 'r')
reader = csv.reader(csv_file)
for row in reader:
print(" ".join(row))
csv_file.close()
|
[
"shettyhanish@gmail.com"
] |
shettyhanish@gmail.com
|
44c462d9e7b54f0d65128ccbeaeac6d1bea09293
|
738d53191472e3322d332c22b9a39bf8e243a549
|
/flaskr/auth.py
|
b546bee70b8485634407588e4db48e939fdf0b6d
|
[] |
no_license
|
bahatiphill/flaskr
|
bfa75bd0326db406094aecdfff4cf8b902d1c035
|
7df397cd3491a37955f7a2e2abc48d58d0d676cf
|
refs/heads/master
| 2020-08-16T23:30:09.516042
| 2019-10-17T13:58:43
| 2019-10-17T13:58:43
| 215,570,800
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,532
|
py
|
import functools
from flask import (
Blueprint, flash, g, redirect, render_template, request, session, url_for
)
from werkzeug.security import check_password_hash, generate_password_hash
from flaskr.db import get_db
bp = Blueprint('auth', __name__, url_prefix='/auth')
@bp.route('/register', methods=('GET', 'POST'))
def register():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
db = get_db()
error = None
if not username:
error = 'Username required'
elif not password:
error = 'Password required'
elif db.execute(
'SELECT id FROM user WHERE username = ?', (username, )
).fetchone() is not None:
error = 'User {} is already registered'.format(username)
if error is None:
db.execute(
'INSERT INTO user (username, password) VALUES (?, ?)',
(username, generate_password_hash(password))
)
db.commit()
return redirect(url_for('auth.login'))
flash(error)
return render_template('auth/register.html')
@bp.route('/login', methods=('GET', 'POST'))
def login():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
db = get_db()
error = None
user = db.execute(
'SELECT * FROM user WHERE username =?', (username,)
).fetchone()
if user is None:
error = "Incorrect username"
elif not check_password_hash(user['password'], password):
error = 'Incorect password'
if error is None:
session.clear()
session['user_id'] = user['id']
return redirect(url_for('index'))
flash(error)
return render_template('auth/login.html')
@bp.before_app_request
def load_logged_in_user():
user_id = session.get('user_id')
if user_id is None:
g.user = None
else:
g.user = get_db().execute(
'SELECT * FROM user WHERE id = ?', (user_id,)
).fetchone()
@bp.route('/logout')
def logout():
session.clear()
return redirect(url_for('index'))
#decorator to check if user is logged in
def login_required(view):
@functools.wraps(view)
def wrapped_view(**kwargs):
if g.user is None:
return redirect(url_for('auth.login'))
return view(**kwargs)
return wrapped_view
|
[
"phill@localhost.localdomain"
] |
phill@localhost.localdomain
|
e0b7367a019a91e2fa1bcd3bff959a74b8b7323a
|
e8cc4cd00990a4f8a75e538ca68fa77456f37e3c
|
/telebot/apps.py
|
f6201278a2003ae19e1031d371370381cf66d2e3
|
[] |
no_license
|
devRayanwv/djangoTest
|
950b5d54a3a53f52f615e2ed0a99bac975fb0db9
|
71bb9377f70fde5b28c5685e8800c4209f265a9f
|
refs/heads/master
| 2020-04-06T04:28:08.942379
| 2017-02-24T22:20:08
| 2017-02-24T22:20:08
| 82,883,125
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 130
|
py
|
from __future__ import unicode_literals
from django.apps import AppConfig
class TelebotConfig(AppConfig):
name = 'telebot'
|
[
"you@example.com"
] |
you@example.com
|
b95fd459a30460bf9d8bc15c82f847b32064b38c
|
0523da031e1478eabb453afb08c89d8789f78007
|
/bazy/baza.py
|
be6359a0eeb81591c1edc204f5b93d500bbdc7fb
|
[] |
no_license
|
miiszka/kody
|
980501925bcfa32b0a08f59df6213f8e39b74c3a
|
309d8adf6cf1e8dc534fdbe82ceb8eab041111f8
|
refs/heads/master
| 2021-01-25T17:05:23.986413
| 2019-02-11T12:01:45
| 2019-02-11T12:01:45
| 102,843,645
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,038
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# baza.py
import csv
import sqlite3
def czytaj_dane(plik, separator=","):
dane = [] # pusta lista na rekordy
with open(plik, 'r', newline='', encoding='utf-8') as plikcsv:
tresc = csv.reader(plikcsv, delimiter=separator)
for rekord in tresc:
dane.append(rekord)
print(dane)
return dane
def main(args):
con = sqlite3.connect('bazagus.db') # połączenie
cur = con.cursor() # obiekt kursora
with open('bazagus.sql', 'r') as plik:
cur.executescript(plik.read())
# ~dane = czytaj_dane('miasta.csv', ',')
# ~dane.pop(0) # usuń pierwszy rekord z listy
# ~cur.executemany('INSERT INTO miasta VALUES(?, ?, ?)', dane)
dane = czytaj_dane('dane_demograficzne.csv')
dane.pop(0)
cur.executemany('INSERT INTO dane_demograficzne VALUES(?, ?, ?, ?, ?, ?)', dane)
con.commit()
con.close()
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
|
[
"emeryt555@tlen.pl"
] |
emeryt555@tlen.pl
|
93926aae0bfed5953da6df5e99392bf9ce52c8ca
|
c06f40b11119c9005df5ab9e1353897604a7ca23
|
/chartsdb/students/views.py
|
0bf6c23b04d150a11d2dc637b7cc0c047c701321
|
[] |
no_license
|
Drew81/Abc-Data
|
d98320299a5d127ce139732278a45fe771ed1124
|
b13db677439f446461a32550ec5296d1e1030188
|
refs/heads/master
| 2021-05-15T21:45:53.685029
| 2019-07-12T15:48:00
| 2019-07-12T15:48:00
| 106,577,370
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,620
|
py
|
from django.views import generic
from .models import Student, Detail, Score
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.core.urlresolvers import reverse_lazy
from django.views.generic import TemplateView
#Restframworks
from rest_framework.response import Response
from rest_framework import generics
from rest_framework.views import APIView
from .serializers import StudentSerializer, DetailSerializer
#Json response(data)
from django.contrib.auth import get_user_model
from django.http import JsonResponse
#API views for charts data output
from django.db.models import Count, Avg, Sum
#views
class HomeView(TemplateView):
template_name = 'students/home.html'
class StudentView(generic.ListView):
template_name = 'students/student_list.html'
context_object_name = 'all_students'
def get_queryset(self):
return Student.objects.all()
class AvgView(generic.ListView):
template_name = 'students/home.html'
context_object_name = 'avg_score'
def get_queryset(self):
return Score.objects.annotate(Sum('score'))
class DetailView(generic.DetailView):
model = Student
template_name = 'students/detail.html'
# Student forms
class StudentCreate(CreateView):
model = Student
fields = ['first_name', 'last_name', 'grade', 'gender', 'age', 'race']
success_url = reverse_lazy('students:student_list')
class StudentUpdate(UpdateView):
model = Student
fields = ['first_name', 'last_name', 'grade', 'gender', 'age', 'race']
success_url = reverse_lazy('students:student_list')
class StudentDelete(DeleteView):
model = Student
success_url = reverse_lazy('students:student_list')
class DetailCreate(CreateView):
model = Detail
fields = ['student', 'incident', 'incident_level', 'location', 'method', 'notes']
success_url = reverse_lazy('students:student_list')
class DetailUpdate(UpdateView):
model = Detail
fields = ['student','incident', 'incident_level', 'location', 'method', 'notes']
success_url = reverse_lazy('students:student_list')
class DetailDelete(DeleteView):
model = Detail
success_url = reverse_lazy('students:student_list')
#API views
class StudentList(generics.ListCreateAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class StudentDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Student.objects.all()
serializer_class = StudentSerializer
class DetailList(generics.ListCreateAPIView):
queryset = Detail.objects.all()
serializer_class = DetailSerializer
class DetailsDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Detail.objects.all()
serializer_class = DetailSerializer
#API Score View for each student
class ScoreView(generic.ListView):
template_name = 'students/score_list.html'
context_object_name = 'all_scores'
def get_queryset(self):
return Score.objects.all()
class ScoreDetailView(generic.DetailView):
model = Student
template_name = 'students/score_list.html'
#Score
class ScoreCreate(CreateView):
model = Score
fields = ['student', 'math_score', 'reading_score']
success_url = reverse_lazy('students:student_list')
#API to calculate for charts
class ChartsApi(APIView):
def get(self, request, format=None):
queryset = Student.objects.all().count()
labels = ["id", "default"]
default_items = [queryset]
serializer = Student.objects.all()
data = {
"labels":labels,
"default": default_items
}
return Response(data)
class DetailsApi(APIView):
def get(self, request, format=None):
qs_count = Detail.objects.all().count()
values = Detail.objects.aggregate(Sum('incident_level'))
serializer = Detail.objects.all()
labels = [ "date", "incident_level"]
default_items = [qs_count, values]
data = {
"labels":labels,
"default": default_items,
"incident_level_sum": values
}
return Response(data)
class ChartDetailApi(APIView):
def get(self, request, format=None):
qs_count = Detail.objects.all().count()
values = Detail.objects.values_list('id', 'incident_level')
serializer = Detail.objects.all()
labels = ["id", "incident_level"]
default_items = [values, "id"]
data = {
"labels":labels,
"default": default_items,
"incident_level": values
}
return Response(data)
class AvgApi(APIView):
def get(self, request, format=None):
queryset = Score.objects.all().count()
values = Score.objects.annotate(Sum('math_score'))
serializer = Score.objects.all()
labels = ["avg_score" ]
default_items = [queryset, values]
data = {
"labels":labels,
"default": default_items,
"avg_score": values
}
return Response(data)
|
[
"winterz@Drews-iMac.home"
] |
winterz@Drews-iMac.home
|
9ed3302317bb7901f6b3244ef26fc1ecb990a599
|
5b9f9b4ea1494943e6f7f842df55909599ed1304
|
/python/onshape_client/oas/models/bt_shaded_render_document_response.py
|
7f8e89af07f0ac165d25afbbf29e6536706ff134
|
[] |
no_license
|
jenniferyoung02/onshape-clients
|
f50534f033428027515b7fc0b801b1caab4d0aec
|
8ee31a17d7af32f105b851e45f69fd4a3006e1ba
|
refs/heads/master
| 2020-09-07T06:44:37.682545
| 2019-10-08T18:52:06
| 2019-10-08T18:52:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,884
|
py
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.104
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class BTShadedRenderDocumentResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'image_data': 'list[str]',
'status': 'BTNodeStatus'
}
attribute_map = {
'image_data': 'imageData',
'status': 'status'
}
def __init__(self, image_data=None, status=None): # noqa: E501
"""BTShadedRenderDocumentResponse - a model defined in OpenAPI""" # noqa: E501
self._image_data = None
self._status = None
self.discriminator = None
if image_data is not None:
self.image_data = image_data
if status is not None:
self.status = status
@property
def image_data(self):
"""Gets the image_data of this BTShadedRenderDocumentResponse. # noqa: E501
:return: The image_data of this BTShadedRenderDocumentResponse. # noqa: E501
:rtype: list[str]
"""
return self._image_data
@image_data.setter
def image_data(self, image_data):
"""Sets the image_data of this BTShadedRenderDocumentResponse.
:param image_data: The image_data of this BTShadedRenderDocumentResponse. # noqa: E501
:type: list[str]
"""
self._image_data = image_data
@property
def status(self):
"""Gets the status of this BTShadedRenderDocumentResponse. # noqa: E501
:return: The status of this BTShadedRenderDocumentResponse. # noqa: E501
:rtype: BTNodeStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this BTShadedRenderDocumentResponse.
:param status: The status of this BTShadedRenderDocumentResponse. # noqa: E501
:type: BTNodeStatus
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, BTShadedRenderDocumentResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"ekeller@onshape.com"
] |
ekeller@onshape.com
|
e11630ca7d98b098071e4dbe2e4d7ccbd0e87332
|
525dc175d55c2f5f33f87df6915f3633537da17c
|
/oas_dev/notebooks/eusaari/02-02-Sizedistrib/02-02-sizedistribution_allstations-sec_and_mod_separately.py
|
d470894aa30e6884b01c96e3bce822ef6d69d6b6
|
[
"CC0-1.0"
] |
permissive
|
sarambl/OAS-DEV
|
1b4c020ff862075034536ea38f30a131968791fb
|
8dec6d29ef23dee8135bc937cd6ee1ef5b64d304
|
refs/heads/master
| 2023-04-09T07:59:31.051158
| 2021-10-26T12:20:04
| 2021-10-26T12:20:04
| 310,578,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 16,442
|
py
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.3.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %%
import xarray as xr
from oas_dev.util.Nd.sizedist_class_v2 import SizedistributionStation
from oas_dev.util.eusaar_data.eusaar_noresm import compute_all_subsets_percs_flag, get_all_distc_noresm
from useful_scit.util.make_folders import make_folders
# %% [markdown]
# ## Cases:
# %%
# Case names:
cases_sec = ['SECTv21_ctrl_koagD']#, 'SECTv21_ctrl']#,'SECTv11_redSOA_LVnuc','SECTv11_incBVOC']#'PD_SECT_CHC7_diur_ricc']#, 'PD_SECT_CHC7_diurnal']# , 'PD_SECT_CHC7_diur_ricc_incC']
cases_orig = ['noSECTv21_default_dd','noSECTv21_ox_ricc_dd']#'noSECTv11_ctrl']#,'PD_noSECT_nudgeERA_eq20']#'Original eq.20'] # , 'Original eq.18','Original eq.20, 1.5xBVOC','Original eq.20, rednuc']
cases = cases_sec + cases_orig
# %% [markdown]
# ## Settings
# %%
from_t = '2008-01-01'
to_t = '2010-01-01'
nr_of_bins = 5
maxDiameter = 39.6 # 23.6 #e-9
minDiameter = 5.0 # e-9
time_resolution = 'hour'
history_field='.h1.'
# %% [markdown]
# ### EUSAAR subset:
# %%
year_subset='BOTH'
# %% [markdown]
# ## Load datasets:
# %% [markdown]
# ### Models
# %%
dic_finish = {}
for case_name in cases:
#_ds = dic_mod_all[case_name]
ds = get_all_distc_noresm(case_name, from_t, to_t)
dic_finish[case_name] = ds
# %% [markdown]
# ## make sectional not sum of mode and sectional
# %% [markdown]
# for case in cases_sec:
# _ds = dic_finish[case]
# _ds['dNdlogD_mod_mod'] = _ds['dNdlogD_mod'].where(_ds['diameter']>=39.6, other=0)
# _ds['dNdlogD'] = _ds['dNdlogD_sec'] + _ds['dNdlogD_mod_mod']
# dic_finish[case] = _ds
# %%
dic_finish
import numpy as np
for key in cases:
dic_finish[key]['dNdlog10dp'] = dic_finish[key]['dNdlogD']*np.log(10)
# %%
dic_finish['SECTv21_ctrl_koagD']
# %% [markdown]
# ### Eusaar:
# %%
# # %load_ext autoreload
# # %autoreload 2
import numpy as np
from oas_dev.util.eusaar_data import distc_var # import load_var_as_dtframe
import matplotlib.pyplot as plt
from useful_scit.plot import get_cmap_dic
# %%
ds_eusaar = distc_var.get_distc_xarray_all(from_nc=True)
# %%
# select bottom layer
for case in dic_finish.keys():
ds = dic_finish[case]
if 'lev' in ds.coords:
dic_finish[case] = ds.isel(lev=-1)
# %%
ds_eusaar
# %%
ds_eusaar = ds_eusaar.sel(year=year_subset)
# %%
dic_finish['eusaar'] = ds_eusaar
# %% [markdown]
# ### Various functions:
# %%
from oas_dev.constants import collocate_locations, paths_plotsave, collocate_locations, collocate_locations
# %%
coll_ltr = collocate_locations.transpose()
# %%
from useful_scit.util.pd_fix import pd_custom_sort_values
#pd_custom_sort_values(coll_ltr, sorter, dall_c)
#coll_ltr
# %%
dall_c = "Dall'Osto 2018 categories"
sorter = ['North','Center','South (spring)', 'South (winter)', 'Overlap']
def make_new_cat():
"""
Make new category
"""
coll_ltr = collocate_locations.transpose()
td = {
'Low altitude sites (less than 1000 m a.s.l.)': 'LA',
'High altitude sites (over 1000 m a.s.l.)': 'HA'
}
coll_ltr['AC'] = coll_ltr['Altitude category'].apply(lambda x: td[x])
coll_ltr['new_cat'] = coll_ltr['AC'] + ': ' + coll_ltr['Region']
coll_ltr = coll_ltr.sort_values('new_cat', ascending=False)
return coll_ltr
def get_ordered_stations():
coll_ltr = make_new_cat()
coll_ltr=pd_custom_sort_values(coll_ltr, sorter, dall_c)
return coll_ltr.index
list(get_ordered_stations())
# %%
# %%
coll_ltr = collocate_locations.transpose()
# %%
# %%
from oas_dev.data_info import get_nice_name_case
# %%
from useful_scit.plot import get_cmap_dic
from oas_dev.constants import collocate_locations, paths_plotsave, collocate_locations, collocate_locations
from oas_dev.util.plot.colors import get_case_col
def plot_grid(dic_finish, subs = 'TOT', st_ls=None, name='all_stations', ylim=[5,8.8e3],
yscale='linear', plot_sec=True,nr_col=4, figsize=None, ylim_ZEP=[0,500]):
colors_source = get_cmap_dic(dic_finish.keys())
colors_source['EUSAAR']='k'
dic_ds = dic_finish
t_cs =dic_ds[ list(dic_ds.keys())[0]]
st_ls =list(get_ordered_stations())
# st_ls = list(loc_tr[loc_tr['Region']==reg].index)
print(list(st_ls))
if len(st_ls)>nr_col:
nr_row = int(np.ceil(len(st_ls)/nr_col))
else:
nr_row=1
nr_col=len(st_ls)
if figsize is None:
figsize = [10 / 4 * nr_col, 10 / 6 * nr_row]
fig, axs = plt.subplots(nr_row, nr_col , sharex=True,sharey=True, figsize=figsize )
axs_nf = axs
if nr_row>1: axs = axs.flatten()
for station, ax in zip(st_ls, axs):
lines=[]
labels= []
for key in dic_finish.keys():
_ds = dic_finish[key]
#if 'dNdlog10dp_sec' in _ds:
# plt_perc(dic_finish[key]['dNdlog10dp_sec'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
## plt_perc(dic_finish[key]['dNdlog10dp_mod'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
#else:
plt_perc(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=ax,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
for key in dic_finish.keys():
_ds = dic_finish[key]
#if 'dNdlog10dp_sec' in _ds:
# line =plt_median(dic_finish[key]['dNdlog10dp_sec'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
# line =plt_median(dic_finish[key]['dNdlog10dp_mod'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
#else:
if 'dNdlog10dp_sec' in _ds:
line =plt_median(dic_finish[key]['dNdlog10dp_sec'].where(dic_finish[key]['dNdlog10dp_sec']>0), station, key,
color=get_case_col(key),
ax=ax,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim,
plt_kwargs = {'linestyle':'dashed'})
line =plt_median(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=ax,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim)
lines = lines+line
labels.append(get_nice_name_case(key))
if station=='ZEP':
axins = insert_ZEP(ax)
for key in dic_finish.keys():
plt_perc(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=axins,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim_ZEP)
for key in dic_finish.keys():
plt_median(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=axins,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim_ZEP)
if 'dNdlog10dp_sec' in _ds:
plt_median(dic_finish[key]['dNdlog10dp_sec'], station, key,
color=get_case_col(key),
ax=axins,
subs=subs, percs=[16,84], yscale=yscale, ylim=ylim_ZEP)
ax.indicate_inset_zoom(axins,edgecolor='r',)
fix_ins(axins)
if subs=='TOT':
cat = coll_ltr.loc[station, dall_c]
ax.set_title(station)#+' '+cat)
else:
cat = coll_ltr.loc[station, dall_c]
ax.set_title(station+' '+ subs)#+', '+cat)
ax.grid(True)
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.tick_params(axis=u'y', which=u'both',length=0)
if nr_row>1:
for ii in range(nr_row):
for jj in range(nr_col):
if ii !=nr_row-1:
axs_nf[ii,jj].set_xlabel('')
if jj!=0:
axs_nf[ii,jj].set_ylabel('')
else:
axs_nf[ii,jj].set_ylabel('dN/dlog$_{10}$D [cm$^{-3}$]')
fig.tight_layout()
lgn = fig.legend(lines, labels, bbox_to_anchor=(0, 1., 1, 0.5), # (0, -0.04, 1., .1),
loc='lower center', ncol=4,
# mode="expand",
borderaxespad=0., fontsize=11, frameon=False) # bbox_to_anchor=(0, 1., 1, 0.5))
#fig.legend(lines, labels, bbox_to_anchor=(0,1.,1,0.5),#(0, -0.04, 1., .1),
# loc='lower center', ncol=4,
# #mode="expand",
# borderaxespad=0., fontsize=11, frameon=False)# bbox_to_anchor=(0, 1., 1, 0.5))
fn = paths_plotsave['eusaar'] + '/sizedist/%s_overview_yscale_%s_sec%s_%s.'%(name.replace(' ','-'),yscale, plot_sec, subs)
print(fn)
make_folders(fn)
plt.savefig(fn+'png', bbox_extra_artists=(lgn,), bbox_inches='tight')
plt.savefig(fn+'pdf', bbox_extra_artists=(lgn,), bbox_inches='tight')
plt.show()
# %%
def plt_median(ds, station, label, percs=[5, 95], ax=None, ylim=[1, 8e3], xlim=[5, 5e2],
yscale='linear', color='k', subs='TOT', year='2008', plt_kwargs={}):
if ax is None:
fig, ax= plt.subplots(1)
da = ds.sel(subset=subs)
_da50 = da.sel(station=station, percentile='50th percentile')#
lines = _da50.plot(yscale=yscale,xscale='log', label=label+', %s'%station,
color=color ,
ax=ax,**plt_kwargs)
return lines
def plt_perc(ds, station, label, percs=[5, 95], ax=None, ylim=[1, 8e3], xlim=[5, 5e2],
yscale='linear', color='k', subs='TOT', year='2008'):
if ax is None:
fig, ax= plt.subplots(1)
da = ds.sel(subset=subs)
_da95 = da.sel(station=station, percentile='%sth percentile'%percs[1])
_da5 = da.sel(station=station, percentile='%sth percentile'%percs[0])
ax.fill_between(da.diameter.values, _da5,_da95, alpha=.2 , color = color)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
return
# %%
def plt_median_perc(ds, station, label, percs=[5, 95], ax=None, ylim=[1, 8e3], xlim=[5, 5e2],
yscale='linear', color='k', subs='TOT', year='2008'):
if ax is None:
fig, ax= plt.subplots(1)
da = ds.sel(subset=subs)
_da50 = da.sel(station=station, percentile='50th percentile')#
lines = _da50.plot(yscale=yscale,xscale='log', label=label+', %s'%station,color=color , ax=ax)
_da95 = da.sel(station=station, percentile='%sth percentile'%percs[1])
_da5 = da.sel(station=station, percentile='%sth percentile'%percs[0])
ax.fill_between(da.diameter.values, _da5,_da95, alpha=.2 , color = color)
ax.set_ylim(ylim)
ax.set_xlim(xlim)
return lines
# %%
def insert_ZEP(ax):
axins = ax.inset_axes([0.23, 0.2, 0.67, 0.67])#zoomed_inset_axes(ax, zoom=1, loc='upper right')
return axins
def fix_ins(axins):
axins.set_xticklabels('')
axins.grid(False)
axins.yaxis.label.set_color('red')
axins.tick_params(axis='y', colors='red')
# #axins.tick_params(axis=u'both', which=u'both',length=1)
axins.spines['left'].set_color('r')
axins.spines['right'].set_color('r')
axins.spines['bottom'].set_color('r')
axins.spines['top'].set_color('r')
axins.set_title('')
axins.set_xlabel('')
axins.set_ylabel('')
# %%
dic_clean={}
_ds_eu = dic_finish['eusaar']
_ds_eu = _ds_eu.where(_ds_eu!=-1).drop('year')
# %%
for key in cases:
dic_clean[key] = dic_finish[key].where(_ds_eu.notnull())
if 'dNdlog10dp_sec' in dic_finish[key]:
dic_clean[key]['dNdlog10dp_sec']=dic_finish[key]['dNdlog10dp_sec']
dic_clean['eusaar'] = _ds_eu
# %%
plot_grid(dic_clean)
# %%
plot_grid(dic_clean,subs = 'SUM', ylim=[0,1e4])
# %%
plot_grid(dic_clean,subs = 'WIN', ylim=[0,5e3])
# %%
plot_grid(dic_clean,subs = 'SPR', ylim=[0,8e3])
# %%
plot_grid(dic_clean,subs = 'AUT', ylim=[0,8e3])
# %%
plot_grid(dic_finish)
# %%
plot_grid(dic_finish,subs = 'SUM', ylim=[0,1e4])
# %%
plot_grid(dic_finish,subs = 'WIN', ylim=[0,8e3])
# %%
plot_grid(dic_finish,subs = 'SPR', ylim=[0,8e3])
# %%
plot_grid(dic_finish,subs = 'AUT', ylim=[0,8e3])
# %%
from oas_dev.constants import collocate_locations
station='ASP'
colors_source = get_cmap_dic(dic_finish.keys())
colors_source['EUSAAR']='k'
for station in dic_finish[cases[0]].coords['station'].values:
fig, axs = plt.subplots(2,2)
seasons = ['SPR', 'SUM','AUT','WIN']
for seas, ax in zip(seasons, axs.flatten()):
for key in dic_finish.keys():
plt_median_perc(dic_clean[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=ax,
subs=seas, percs=[16,84], yscale='log', ylim=[10,1e4])
ax.set_title(station+', '+ seas)
plt.legend()
plt.show()
# %%
from oas_dev.constants import collocate_locations
station='ASP'
colors_source = get_cmap_dic(dic_finish.keys())
colors_source['EUSAAR']='k'
for station in dic_finish[cases[0]].coords['station'].values:
fig, axs = plt.subplots(2,2)
seasons = ['SPR', 'SUM','AUT','WIN']
for seas, ax in zip(seasons, axs.flatten()):
for key in dic_finish.keys():
plt_median_perc(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=ax,
subs=seas, percs=[16,84], yscale='linear', ylim=[10,1e4])
ax.set_title(station+', '+ seas)
plt.legend()
plt.show()
# %%
# %%
# %%
# %%
#for station in dic_finish[cases[0]].coords['station'].values:
fig, axs = plt.subplots(1,2, figsize=[10,5])
period = ['DAY', 'NIG']
for seas, ax in zip(period, axs.flatten()):
for key in dic_finish.keys():
dic_finish[key]['dNdlog10dp'].median('station').sel(subset=seas, percentile='50th percentile').plot(color=get_case_col(key), xscale='log', ax=ax, label=key)
#plt_median_perc(dic_finish[key]['dNdlog10dp'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=seas, percs=[16,84], yscale='linear')
ax.set_title(station+', '+ seas)
plt.legend()
plt.show()
# %%
#for station in dic_finish[cases[0]].coords['station'].values:
fig, axs = plt.subplots(1,4, figsize=[20,5])
period = seasons
for seas, ax in zip(period, axs.flatten()):
for key in dic_finish.keys():
for station in dic_finish[key].station.values:
_plt_da=dic_finish[key]['dNdlog10dp'].sel(subset=seas,station=station, percentile='50th percentile')#
_plt_da.plot(color=get_case_col(key), xscale='log', ax=ax, label=key,
linewidth=.5, alpha=.8)
#plt_median_perc(dic_finish[key]['dNdlog10dp'], station, key,
# color=get_case_col(key),
# ax=ax,
# subs=seas, percs=[16,84], yscale='linear')
ax.set_title(station+', '+ seas)
ax.set_xlim([4,800])
#plt.legend()
plt.show()
# %%
for station in dic_finish[cases[0]].coords['station'].values:
print(station)
fig, axs = plt.subplots(1,2)
period = ['DAY', 'NIG']
for seas, ax in zip(period, axs.flatten()):
for key in dic_finish.keys():
plt_median_perc(dic_finish[key]['dNdlog10dp'], station, key,
color=get_case_col(key),
ax=ax,
subs=seas, percs=[16,84], yscale='linear')
ax.set_title(station+', '+ seas)
plt.legend()
plt.show()
|
[
"samebr@gmail.com"
] |
samebr@gmail.com
|
223c76129aa724394fe8bc8c04f4979601d194fe
|
e8d589e95dc16c386f6af2ab09da2742123a5e0e
|
/selenium_test/cahealth_parameterized.py
|
a13277d8033063e59f434aa1f1dea9e498ab9f9f
|
[] |
no_license
|
moyosoreS/selenium_crossbrowsertests
|
ccab8664489d34e76a6d79950969f00d0a36523a
|
11a1c9f25e0eb6c6c4770985cdad6a6cf10f5527
|
refs/heads/master
| 2020-09-27T00:09:14.949495
| 2016-08-24T12:48:00
| 2016-08-24T12:48:00
| 66,463,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,923
|
py
|
import sys
__author__ = 'Desmond'
# Please visit http://selenium-python.readthedocs.org/en/latest/index.html for detailed installation and instructions
import unittest
import csv
from selenium import webdriver
class SeleniumCBT(unittest.TestCase):
def setUp(self):
caps = {}
caps['name'] = 'Selenium Test Example'
caps['build'] = '1.0'
caps['browser_api_name'] = 'IE10'
caps['os_api_name'] = 'Win7x64-C2'
caps['screen_resolution'] = '1024x768'
caps['record_video'] = 'true'
caps['record_network'] = 'true'
caps['record_snapshot'] = 'false'
# start the remote browser on our server
self.driver = webdriver.Remote(
desired_capabilities=caps,
command_executor="your crossbrowser server"
)
self.driver.implicitly_wait(20)
def test_CBT(self):
# maximize the window - DESKTOPS ONLY
#print('Maximizing window')
#self.driver.maximize_window()
try:
filename = 'test_cases.csv'
with open(filename) as cases:
reader = csv.reader(cases)
for row in reader:
print(row)
category = row[0]
location = row[1]
search_num = row[2]
desired_hospital = row[3]
hospital_rating = row[4]
blob_rating = row[5]
cost = row[6]
cost_breakdown = row[7]
print(
category + ',' + location + ',' + search_num + ',' + desired_hospital + ',' + hospital_rating + ','
+ blob_rating + ',' + cost + ',' + cost_breakdown)
#load the page url
print('Loading Url')
self.driver.get('http://www.cahealthcarecompare.org/search.jsp')
#check the title
print('Checking title')
self.assertTrue('Look-up California Hospital/Medical Group Cost & Quality' in self.driver.title)
category = self.driver.find_element_by_id(category)
textBox = self.driver.find_element_by_id('sterm')
button = self.driver.find_element_by_name('submit')
category.click()
textBox.clear()
textBox.send_keys(location)
button.click()
desired_hospital_link = self.driver.find_element_by_link_text(desired_hospital)
desired_hospital_link.click()
except csv.Error as e:
sys.exit('file %s, line %d: %s' % (filename, reader.line_num, e))
def tearDown(self):
print("Done with session %s" % self.driver.session_id)
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
[
"sogaolumoyo@gmail.com"
] |
sogaolumoyo@gmail.com
|
9d7cbfab03ab9199772ad7b3dddca430afc55551
|
94226c24eb31fff29af3403b7b0672b032277b23
|
/settings/development.py
|
42f1ee7423e8eb8d862817056fa638d530c98be8
|
[] |
no_license
|
vaibhavmathur91/zsavs
|
deba23511023b0efe3c1c2d8a9b2faef93c3625f
|
a7040d218bc4cce06657cefa6d57137ec38f502e
|
refs/heads/master
| 2020-03-16T11:53:09.865619
| 2018-05-08T19:28:28
| 2018-05-08T19:28:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 612
|
py
|
# Import base settings
from .base import *
BASE_URL = 'http://127.0.0.1:8000'
DEBUG = True
# Secret key (default value is set in development
# settings so that the .env file is not necessary)
SECRET_KEY = os.getenv('SECRET_KEY', 'fruh-%ut#q*2av0@d+vf!*gkc=vbnxwox^h6-a-$9avh32z+ya')
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Email
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Static files
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
|
[
"robin@bisping.ch"
] |
robin@bisping.ch
|
02791ea231590cbd94794e774adb135d03ae64fb
|
97ad16e12ff2425a0d78b68a254eb93438b07688
|
/countBST using arr elements as head.py
|
b779ff7a5b78a8ddb84013abb7a19e0291d685e1
|
[] |
no_license
|
poojithayadavalli/Trees
|
54a3878646c255793c28ec4de0632ef03bbf5982
|
c3fd45860b85a57ffac2c5d903ba3d2aadde3630
|
refs/heads/master
| 2022-11-27T20:51:54.934823
| 2020-08-09T11:51:54
| 2020-08-09T11:51:54
| 282,373,911
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,337
|
py
|
"""
Rohan is learning about Binary search Trees.He went through a task as follows:
Given an array arr[] of N integers. The task is to count the number of Binary Search Tree can be made using each node of element in arr[] as a root node.
Input:
Firstline contains size of array N
Next line contains elements of array
Output:
print the number of bsts possible with head as element
Example1:
Input:
3
20 10 30
Output:
1 2 2
Example 2:
Input:
5
1 2 3 4 5
Output:
14 5 4 5 14
Hint:For every element in arr[]:
Count the number of element(say c1) less than the current node.
Count the number of element(say c2) greater than the current node.
Then total number of Binary Search Tree(BST) can be formed using current element as a root node
is equals to the product of total number of BST formed using c1 elements and otal number of BST formed using c2 elements.
"""
def fact(n) :
res = 1
for i in range(1, n + 1):
res *= i
return res
def catalan(n):
return fact(2 * n)//(fact(n)*fact(n + 1))
n = int(input())
arr = list(map(int,input().split()))
for k in range(n):
s = 0
for i in range(n):
if arr[i] < arr[k]:
s+= 1
catalan_leftBST = catalan(s)
catalan_rightBST = catalan(n-s-1)
totalBST = catalan_rightBST * catalan_leftBST
print(totalBST, end =" ")
|
[
"noreply@github.com"
] |
noreply@github.com
|
6cb49efe8e184a53097740ca49c6d05bb5dc2a54
|
87165e7526a5466a91f779dbf7e4b8d008ca73df
|
/api_serve/app/celery/tasks.py
|
6ca9ef29c81a18f9105fd63bec217f7fd658adb6
|
[] |
no_license
|
gocode2016/wx_web_api
|
d2f5ca3970c165c168d187df03680412f8a388b6
|
bf039fbe3b29e47e9aa2db47c57cc043eb0c2f47
|
refs/heads/master
| 2020-05-03T05:18:55.276899
| 2019-01-12T03:13:53
| 2019-01-12T03:13:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 332
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : classmate Lin.
# @Email : 406728295@qq.com
# @File : tasks.py
# @Time : 18-10-8 下午2:42
'''
异步任务
'''
from __future__ import absolute_import, unicode_literals
from app.celery.celery import cel_app
@cel_app.task
def say_hi():
print('hello world')
|
[
"ly@lydeMacBook-Air.local"
] |
ly@lydeMacBook-Air.local
|
3e976a9ea0c5437ca957b93d5cff70c8791ca51c
|
8bff5bbfa082602a05d96fc53ea52c3b77a0e89c
|
/store/views/checkout.py
|
aa4394cecda9cfdda4436c4abff7e1edfcd495b2
|
[] |
no_license
|
Patelrahul4884/Eshop
|
98d3956c6213dbb621e08a6e5aef2c593a4abda4
|
29ced3870ab4279986c13787013335b0eca8fb33
|
refs/heads/master
| 2022-12-25T05:49:07.115490
| 2020-10-02T12:34:46
| 2020-10-02T12:34:46
| 288,974,716
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 823
|
py
|
from django.shortcuts import render, redirect
from django.contrib.auth.hashers import check_password
from django.views import View
from store.models.customer import Customer
from store.models.product import Product
from store.models.order import Order
class Checkout(View):
def post(self,request):
address=request.POST.get('address')
phone=request.POST.get('phone')
customer=request.session.get('customer')
cart=request.session.get('cart')
products=Product.get_products_by_id(list(cart.keys()))
for product in products:
order=Order(product=product,customer=Customer(id=customer),quantity=cart.get(str(product.id)),price=product.price,address=address,phone=phone,)
order.save()
request.session['cart']={}
return redirect('orders')
|
[
"patelr4142@gmail.com"
] |
patelr4142@gmail.com
|
a2bf0550e08af94a953a64917f25bba007a88e24
|
ffc5ba59967913b6210c50319cd8fa7a1dc5772c
|
/genkeras_stack.py
|
71c3f27c83017a8df62b6461822f715097be73f1
|
[] |
no_license
|
hoihui/allstate
|
62c8f39ae3326cea0a7efbda37ce5b960de7bf5a
|
add457a1b4d6e734653f39810acfe886bc5afe83
|
refs/heads/master
| 2021-05-11T19:40:31.319064
| 2018-01-17T20:15:23
| 2018-01-17T20:15:23
| 117,885,769
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,742
|
py
|
from data import *
thislevel=load_save_4
base= 'nn4'
import os,time,argparse,random,sys
parser = argparse.ArgumentParser()
parser.add_argument('-gpu', type=int, default=0)
parser.add_argument('-acq', type=str, default=random.choice(['poi']))
parser.add_argument("--verbose", action="store_true")
parser.add_argument("--debug", action="store_true")
parser.add_argument('-init', type=int, default=1)
parser.add_argument('-iter', type=int, default=2)
args = parser.parse_args()
maxepoch=1000 if not args.debug else 1
batchSize=512 if not args.debug else 1024
patience=10 #10 for early stopping
shift=200
if args.gpu<0: os.environ["THEANO_FLAGS"] = "base_compiledir=/tmp/hyhui/.theano,device=cpu,floatX=float32"
else: os.environ["THEANO_FLAGS"] = "base_compiledir=/tmp/hyhui/.theano,device=gpu%d,floatX=float32,force_device=True" % (args.gpu)
if args.gpu>0: time.sleep(30*args.gpu)
try:import theano
except Exception,err:
print err
import sys
argv = sys.argv
if '-gpu' not in argv:
argv.extend(['-gpu','1'])
else:
argv[argv.index('-gpu')+1]=str(int(argv[argv.index('-gpu')+1])+1) if 'Bad device number' not in str(err) else '0'
print 'restarting with ', argv
os.execv(sys.executable,[sys.executable]+argv)
exit()
from keras.models import Sequential
from keras.callbacks import EarlyStopping,ModelCheckpoint
from keras.layers import Dense, Dropout, Activation
from keras.layers.normalization import BatchNormalization
from keras.layers.advanced_activations import PReLU
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
X0,y,X0_test,_ = load_cat1_cont()
trainthis,testthis = thislevel()
ntrain = trainthis.shape[0]
train_testthis = pd.concat((trainthis,testthis)).reset_index(drop=True)
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
Xdense2 = scaler.fit_transform(np.log(np.clip(train_testthis.values+shift,1,np.inf)))
scores0=trainthis.apply(lambda x:mean_absolute_error(x,y))
scores=(scores0-scores0.min())/scores0.min()
corr=trainthis.corr()
idx=[]
idx.append(np.argsort(scores.values)[0])
while len(idx)<trainthis.shape[1]:
cur_scores=[]
for i in xrange(trainthis.shape[1]):
if i not in idx:
maxcorr=corr.values[idx,i].max()
scorecorr=1-maxcorr
cur_scores.append( (scorecorr-scores[i],i) ) #max corr and min scores wrt best one
idx.append(max(cur_scores)[1])
Xdense2 = Xdense2[:,idx]
print trainthis.columns[idx]
p_path = opath+base+".csv"
print base, args.gpu
if True: #parameters initialization
discreteP=['hiddenSize1','hiddenSize2','ncols']
p_init= {'hiddenSize1':400,'dropOut1':.4,
'hiddenSize2':200,'dropOut2':.2,
}
p_range={'hiddenSize1': (10,500),'dropOut1': (.01,.99),
'hiddenSize2': (2,250), 'dropOut2': (.01,.99),
}
p_init['ncols'] =Xdense2.shape[1]/2
p_range['ncols']=(2,Xdense2.shape[1])
if True: #data,y,make_model,batch_generators preparation
yforw=lambda x:np.log(x+shift)
yback=lambda x:np.exp(x)-shift
def make_model(p):
dropOuts = filter(None,[p.get('dropOut1'),p.get('dropOut2'),p.get('dropOut3')])
hiddenSizes = filter(None,[p.get('hiddenSize1'),p.get('hiddenSize2'),p.get('hiddenSize3')])
model = Sequential()
input_dim = p['ncols']
for hiddenSize,dropOut in zip(hiddenSizes,dropOuts):
model.add(Dense(hiddenSize, input_dim = input_dim,init='he_normal'))
model.add(BatchNormalization())
model.add(PReLU())
model.add(Dropout(float(dropOut)))
input_dim = hiddenSize
model.add(Dense(1, init = 'he_normal'))
model.compile(loss = 'mae', optimizer = 'adadelta')
return model
if True: #optimization
weightsfile='/tmp/'+str(time.time()).replace(".","")+'weights.h5' #use time for weightsfile to prevent collision
def score(**params):
for k in params.keys():
if k in discreteP:
params[k]=int(params[k])
if (params.get('dropOut3') and not (params['hiddenSize1']>params['hiddenSize2'] and params['hiddenSize2']>params['hiddenSize3'])) or\
(not params['hiddenSize1']>params['hiddenSize2']):
return -3000+10*np.random.normal()
# X = scipy.sparse.hstack((X0,Xdense2[:ntrain,:params['ncols']]),format='csr')
X = Xdense2[:ntrain,:params['ncols']]
# X_test = scipy.sparse.hstack((X0_test,Xdense2[ntrain:,:params['ncols']]),format='csr')
p_all=dict(p_init)
p_all.update(params)
y_=yforw(y)
s=[]
modelfile='/tmp/m'+str(time.time()).replace(".","")+'model.h5'
model = make_model(p_all)
model.save(modelfile)
for train_idx, vales_idx in kftune.split(X): #########################
val_idx,es_idx=np.array_split(vales_idx,2)
X_train,y_train_=X[train_idx],y_[train_idx]
X_val ,y_val_ =X[val_idx] ,y_[val_idx]
X_es ,y_es_ =X[es_idx] ,y_[es_idx]
if scipy.sparse.issparse(X_es): X_es =X_es.toarray()
try: model = load_model(modelfile)
except: model = make_model(p_all)
es=EarlyStopping(monitor='val_loss', patience=patience, verbose=args.verbose)
mc=ModelCheckpoint(weightsfile,monitor='val_loss',save_best_only=True,save_weights_only=True,verbose=0)
X_train_ = X_train
X_es_ = X_es
X_val_ = X_val
hist=model.fit(X_train_,y_train_,128,nb_epoch=maxepoch,
callbacks=[es,mc],validation_data=(X_es_,y_es_), verbose=args.verbose)
time.sleep(5)
try:
model.load_weights(weightsfile)
os.remove(weightsfile)
except:pass
predy_val_=model.predict(X_val_).ravel()
s.append(mean_absolute_error(yback(y_val_),yback(predy_val_)))
if args.verbose:
print 'epochs:', hist.history['val_loss'].index(min(hist.history['val_loss']))
print 'test loss:', s[-1]
s = -np.average(s)
return s
while True:
init_points=args.init
n_iter=args.iter
p_prev,p_range_ = load_save_config(p_path,default=p_init)
bo = BayesianOptimization(score, p_range)
if p_range_: p_range=p_range_
if p_prev: bo.initialize(p_prev)
else: init_points,n_iter=1,0
bo.maximize(init_points=init_points, n_iter=n_iter, acq=args.acq)
print "best: ", bo.res['max']
load_save_config(p_path,default=p_init,bo=bo,p_prev_before_run=p_prev)
break
os.execv(sys.executable,[sys.executable]+sys.argv)
# os.execv(sys.executable,[sys.executable]+[sys.argv[0].replace('gen','make')])
|
[
"hyhui@sun.phys.vt.edu"
] |
hyhui@sun.phys.vt.edu
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.