hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a26266a4fdcfcd0c96232392fec99b6244059514
| 2,008
|
py
|
Python
|
pythonVersion/interpolateMetm.py
|
oradules/Deconvolution_short_long
|
730a55a257a376e2b347c0d2453347c2c463ab17
|
[
"BSD-3-Clause"
] | 1
|
2021-05-26T12:41:45.000Z
|
2021-05-26T12:41:45.000Z
|
pythonVersion/interpolateMetm.py
|
oradules/Deconvolution_short_long
|
730a55a257a376e2b347c0d2453347c2c463ab17
|
[
"BSD-3-Clause"
] | null | null | null |
pythonVersion/interpolateMetm.py
|
oradules/Deconvolution_short_long
|
730a55a257a376e2b347c0d2453347c2c463ab17
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 11 21:36:32 2021
@author: rachel
"""
import numpy as np
from scipy import interpolate
import matplotlib.pyplot as plt
def interpolateMetm(uxsg, ufsg, uxl, ufl, m, M, fact1, fact2):
#%%%%% interpolate on [m,M];
xss = uxsg
yss = ufsg
src = [0,0]; #% (0,0)
dest = [max(m,M),1]; #% (m,1)
xss = np.hstack((xss,dest[0]))
yss = np.hstack((yss,dest[1]))
t = np.linspace(0, 1, np.size(xss))
mn=5000
tt = np.linspace(0, 1, mn);
# xx = interpolate.interp1d(t,xss, kind = 'cubic')(tt)
# yy = interpolate.interp1d(t,yss, kind = 'cubic')(tt)
xx = interpolate.PchipInterpolator(t,xss)(tt)
yy = interpolate.PchipInterpolator(t,yss)(tt)
# plt.plot(xss,yss,'ro');
# plt.plot(xx, yy, 'b', 'LineWidth', 1.5);
xll = uxl
yll = ufl;
t = np.linspace(0, 1, np.size(xll))
mn=5000
ttl = np.linspace(0, 1, mn);
# xxl = interpolate.interp1d(t,xll, kind = 'cubic')(ttl)
# yyl = interpolate.interp1d(t,yll, kind = 'cubic')(ttl)
xxl = interpolate.PchipInterpolator(t,xll)(ttl)
yyl = interpolate.PchipInterpolator(t,yll)(ttl)
# plt.plot(xss,yss,'ro')
# plt.plot(xxl, yyl, 'b', 'LineWidth', 1.5);
# plt.plot(xll,yll, 'ro');
# plt.plot(xx, yy, 'g', 'LineWidth', 1.5);
M_ = max(uxsg)/fact2;
m_ = min(uxl[1:])*fact1;
m = min(m_,M_); M = max(m_,M_);
inddm = np.transpose(np.logical_and([xxl>m],[xxl<M])[0])
y2 = yyl[inddm];xt = xxl[inddm]
indxx1 = np.where(xx>=m)[0];
indxx1 = indxx1[0]-1;
indxx2 = np.where(xx<=M)[0];
indxx2 = indxx2[-1];
ytest = yy[indxx1:indxx2+1];
x1 = xx[indxx1:indxx2+1];
#[ux1,ux1i] = unique(x1);
ux1, ux1i = np.unique(x1, return_index=True)
x1 = x1[ux1i];
ytest = ytest[ux1i];
#y1 = interpolate.interp1d(x1, ytest, kind = 'cubic')(xt)
y1 = interpolate.PchipInterpolator(x1, ytest)(xt)
return y1, y2
| 28.685714
| 63
| 0.564741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 686
| 0.341633
|
a263c93196bd64932fb6ed1c2feb12221c53c151
| 3,036
|
py
|
Python
|
mad_scientist_lab/histogram.py
|
wusui/squidly_dorkle
|
45992db8b2a9f6fa1264549ad88c25a8989af5c0
|
[
"MIT"
] | null | null | null |
mad_scientist_lab/histogram.py
|
wusui/squidly_dorkle
|
45992db8b2a9f6fa1264549ad88c25a8989af5c0
|
[
"MIT"
] | null | null | null |
mad_scientist_lab/histogram.py
|
wusui/squidly_dorkle
|
45992db8b2a9f6fa1264549ad88c25a8989af5c0
|
[
"MIT"
] | null | null | null |
import os
import sys
import codecs
import itertools
sys.stdout = codecs.getwriter('utf=8')(sys.stdout.buffer)
sys.stderr = codecs.getwriter('utf=8')(sys.stderr.buffer)
fname = os.sep.join(["D:", "Users", "Warren", "python3",
"squirrels_on_caffeine", "src", "sedecordle",
"answers.txt"])
with open(fname, "r", encoding="UTF-8") as rfile:
glist = rfile.read()
def add_to(word, hist):
for letter in word:
if letter in hist:
hist[letter] += 1
else:
hist[letter] = 1
histogram_full = {}
histogram_once = {}
for word in glist.split():
f_word = sorted(word)
o_word = list(set(f_word))
add_to(f_word, histogram_full)
add_to(o_word, histogram_once)
print(dict(sorted(histogram_full.items(), key=lambda item: item[1])))
print(dict(sorted(histogram_once.items(), key=lambda item: item[1])))
def test_word(word, glist):
sw1 = ''.join(sorted(word))
for guess in glist.split():
sw2 = ''.join(sorted(guess))
if sw1 == sw2:
print(guess)
ok_list = []
for word in glist.split():
bad = False
for tlet in 'jqvwxz':
if tlet in word:
bad = True
break
if bad:
continue
if len(list(set(word))) != 5:
continue
ok_list.append(word)
print(ok_list)
print(len(ok_list), len(glist))
def get_2x5_wlist(ok_list, lset, hbl):
ret_list = []
for word in ok_list:
bad = False
for tlet in word:
if tlet not in lset:
bad = True
break
if bad:
continue
#print(word)
other = [hbl]
for letr in lset:
if letr not in word:
other.append(letr)
#print(other)
for word2 in ok_list:
bad2 = False
for tlet2 in word2:
if tlet2 not in other:
bad2 = True
break
if bad2:
continue
ret_list.append([word, word2])
return ret_list
OKLETS = 'bcdefghiklmnoprstuy'
acombos = list(itertools.combinations(OKLETS, 9))
print(acombos[50000])
lset = list(acombos[50000])
# print(get_2x5_wlist(ok_list, lset, 'a'))
out_str = []
for entry in acombos:
ret_list = get_2x5_wlist(ok_list, list(entry), 'a')
if ret_list:
nstr = ret_list[0][0] + ret_list[0][1]
str2 = []
for let2 in OKLETS:
if let2 not in nstr:
str2.append(let2)
rlist2 = get_2x5_wlist(ok_list, str2[1:], str2[0])
if rlist2:
print(ret_list, " pairs with ", rlist2)
for p1 in ret_list:
for p2 in rlist2:
out_str += [p1 + p2]
txtlist = []
for entry in out_str:
s = ", ".join(sorted(entry))
txtlist.append(s)
slist = list(set(sorted(txtlist)))
ostr = "\n".join(slist)
with open("wlist20.txt", "w") as wlist:
wlist.write(ostr)
| 26.867257
| 73
| 0.54249
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 246
| 0.081028
|
a265970c825b69a6bcc7be605b442dbeced8128f
| 9,491
|
py
|
Python
|
app/jobHistory/migrations/0003_auto_20190804_1403.py
|
stephengtuggy/job-history
|
5c4931ff7b594494a687da0253262c7fc46f8b13
|
[
"MIT"
] | 2
|
2020-01-18T00:39:35.000Z
|
2020-01-18T02:03:26.000Z
|
app/jobHistory/migrations/0003_auto_20190804_1403.py
|
stephengtuggy/job-history
|
5c4931ff7b594494a687da0253262c7fc46f8b13
|
[
"MIT"
] | 18
|
2020-08-07T23:22:37.000Z
|
2021-06-10T18:38:42.000Z
|
app/jobHistory/migrations/0003_auto_20190804_1403.py
|
stephengtuggy/job-history
|
5c4931ff7b594494a687da0253262c7fc46f8b13
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-08-04 21:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('jobHistory', '0002_auto_20190106_0202'),
]
operations = [
migrations.AlterField(
model_name='employer',
name='city',
field=models.CharField(blank=True, max_length=200, verbose_name='City'),
),
migrations.AlterField(
model_name='employer',
name='country',
field=models.CharField(blank=True, max_length=200, verbose_name='Country'),
),
migrations.AlterField(
model_name='employer',
name='county_or_parish',
field=models.CharField(blank=True, max_length=200, verbose_name='County or Parish'),
),
migrations.AlterField(
model_name='employer',
name='email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Email'),
),
migrations.AlterField(
model_name='employer',
name='industry',
field=models.CharField(blank=True, max_length=254, verbose_name='Industry'),
),
migrations.AlterField(
model_name='employer',
name='long_name',
field=models.CharField(max_length=254, null=True, unique=True, verbose_name='Long Name'),
),
migrations.AlterField(
model_name='employer',
name='phone',
field=models.CharField(blank=True, max_length=50, verbose_name='Phone'),
),
migrations.AlterField(
model_name='employer',
name='short_name',
field=models.CharField(max_length=50, unique=True, verbose_name='Short Name'),
),
migrations.AlterField(
model_name='employer',
name='state_or_province',
field=models.CharField(blank=True, max_length=200, verbose_name='State or Province'),
),
migrations.AlterField(
model_name='employer',
name='zip_or_postal_code',
field=models.CharField(blank=True, max_length=50, verbose_name='Zip Code or Postal Code'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='contributions_and_accomplishments',
field=models.TextField(blank=True, verbose_name='Contributions and Accomplishments'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='end_day',
field=models.PositiveSmallIntegerField(null=True, verbose_name='End Day'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='end_month',
field=models.PositiveSmallIntegerField(null=True, verbose_name='End Month'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='end_year',
field=models.PositiveIntegerField(null=True, verbose_name='End Year'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='ending_pay',
field=models.CharField(max_length=50, verbose_name='Ending Pay'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='hours_per_week',
field=models.PositiveSmallIntegerField(null=True, verbose_name='Hours per Week'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='is_current_position',
field=models.BooleanField(default=True, verbose_name='Current Position?'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='position',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobHistory.Position', verbose_name='Position'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='start_day',
field=models.PositiveSmallIntegerField(null=True, verbose_name='Start Day'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='start_month',
field=models.PositiveSmallIntegerField(null=True, verbose_name='Start Month'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='start_year',
field=models.PositiveIntegerField(verbose_name='Start Year'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='starting_pay',
field=models.CharField(max_length=50, verbose_name='Starting Pay'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='work_city',
field=models.CharField(blank=True, max_length=200, verbose_name='Work City'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='work_country',
field=models.CharField(blank=True, max_length=200, verbose_name='Work Country'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='work_county_or_parish',
field=models.CharField(blank=True, max_length=200, verbose_name='Work County or Parish'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='work_state_or_province',
field=models.CharField(blank=True, max_length=200, verbose_name='Work State or Province'),
),
migrations.AlterField(
model_name='jobtimeperiod',
name='work_zip_or_postal_code',
field=models.CharField(blank=True, max_length=50, verbose_name='Work Zip Code or Postal Code'),
),
migrations.AlterField(
model_name='position',
name='can_contact',
field=models.BooleanField(verbose_name='Can Contact?'),
),
migrations.AlterField(
model_name='position',
name='contributions_and_accomplishments',
field=models.TextField(blank=True, verbose_name='Contributions and Accomplishments'),
),
migrations.AlterField(
model_name='position',
name='employer',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='jobHistory.Employer', verbose_name='Employer'),
),
migrations.AlterField(
model_name='position',
name='responsibilities',
field=models.TextField(blank=True, verbose_name='Responsibilities'),
),
migrations.AlterField(
model_name='position',
name='supervisor_city',
field=models.CharField(blank=True, max_length=200, verbose_name='Supervisor City'),
),
migrations.AlterField(
model_name='position',
name='supervisor_country',
field=models.CharField(blank=True, max_length=200, verbose_name='Supervisor Country'),
),
migrations.AlterField(
model_name='position',
name='supervisor_county_or_parish',
field=models.CharField(blank=True, max_length=200, verbose_name='Supervisor County or Parish'),
),
migrations.AlterField(
model_name='position',
name='supervisor_email',
field=models.EmailField(blank=True, max_length=254, verbose_name='Supervisor Email'),
),
migrations.AlterField(
model_name='position',
name='supervisor_given_name',
field=models.CharField(max_length=200, verbose_name='Supervisor Given Name'),
),
migrations.AlterField(
model_name='position',
name='supervisor_middle_name',
field=models.CharField(blank=True, max_length=200, verbose_name='Supervisor Middle Name'),
),
migrations.AlterField(
model_name='position',
name='supervisor_name_prefix',
field=models.CharField(blank=True, max_length=50, verbose_name='Supervisor Name Prefix'),
),
migrations.AlterField(
model_name='position',
name='supervisor_name_suffix',
field=models.CharField(blank=True, max_length=50, verbose_name='Supervisor Name Suffix'),
),
migrations.AlterField(
model_name='position',
name='supervisor_phone',
field=models.CharField(blank=True, max_length=50, verbose_name='Supervisor Phone'),
),
migrations.AlterField(
model_name='position',
name='supervisor_state_or_province',
field=models.CharField(blank=True, max_length=200, verbose_name='Supervisor State or Province'),
),
migrations.AlterField(
model_name='position',
name='supervisor_surname',
field=models.CharField(max_length=200, verbose_name='Supervisor Surname'),
),
migrations.AlterField(
model_name='position',
name='supervisor_zip_or_postal_code',
field=models.CharField(blank=True, max_length=50, verbose_name='Supervisor Zip Code or Postal Code'),
),
migrations.AlterField(
model_name='position',
name='title',
field=models.CharField(max_length=200, verbose_name='Title'),
),
]
| 40.387234
| 132
| 0.603624
| 9,365
| 0.986724
| 0
| 0
| 0
| 0
| 0
| 0
| 2,167
| 0.228322
|
a265a038ab356fbb6e17091c1ee11fb5ec910fe6
| 518
|
py
|
Python
|
Messaging/Packets/Server/Home/LobbyInfoMessage.py
|
Kuler2006/BSDS-V40
|
9e9a6e5b36cd5082fe428ebb0279df23d5d9c7b7
|
[
"Apache-2.0"
] | 4
|
2021-11-27T16:49:30.000Z
|
2021-12-21T13:50:00.000Z
|
Messaging/Packets/Server/Home/LobbyInfoMessage.py
|
Kuler2006/BSDS-V40
|
9e9a6e5b36cd5082fe428ebb0279df23d5d9c7b7
|
[
"Apache-2.0"
] | null | null | null |
Messaging/Packets/Server/Home/LobbyInfoMessage.py
|
Kuler2006/BSDS-V40
|
9e9a6e5b36cd5082fe428ebb0279df23d5d9c7b7
|
[
"Apache-2.0"
] | 1
|
2021-12-21T13:38:20.000Z
|
2021-12-21T13:38:20.000Z
|
from Logic.Data.DataManager import Writer
from Logic.Client.ClientsManager import ClientsManager
class LobbyInfoMessage(Writer):
def __init__(self, client, player):
super().__init__(client)
self.id = 23457
self.client = client
self.player = player
def encode(self):
self.writeVint(ClientsManager.GetCount())
self.writeString("Brawl Stars\n"f"Version: {self.player.device.major}.{self.player.device.build}.{self.player.device.minor}")
self.writeVint(0)
| 34.533333
| 133
| 0.694981
| 419
| 0.80888
| 0
| 0
| 0
| 0
| 0
| 0
| 107
| 0.206564
|
a265d646f255b96ee6cd63611d22fe0c03ffcd24
| 1,560
|
py
|
Python
|
article/views.py
|
TianyongWang/TyBlog
|
2d3543a314beafe55762b58ab23d4ef4dc2cbfe9
|
[
"MIT"
] | null | null | null |
article/views.py
|
TianyongWang/TyBlog
|
2d3543a314beafe55762b58ab23d4ef4dc2cbfe9
|
[
"MIT"
] | null | null | null |
article/views.py
|
TianyongWang/TyBlog
|
2d3543a314beafe55762b58ab23d4ef4dc2cbfe9
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.http import HttpResponse
from article.models import Article
from datetime import datetime
# Create your views here.
def home(request):
post_list = Article.objects.all() #获取全部的Article对象
return render(request, 'home.html', {'post_list' : post_list})
def detail(request, id):
try:
post = Article.objects.get(id=str(id))
except Article.DoesNotExist:
raise Http404
return render(request, 'post.html', {'post' : post})
def archives(request) :
try:
post_list = Article.objects.all()
except Article.DoesNotExist :
raise Http404
return render(request, 'archives.html', {'post_list' : post_list,
'error' : False})
def about_me(request) :
return render(request, 'aboutme.html')
def search_tag(request, tag) :
try:
post_list = Article.objects.filter(category__iexact = tag) #contains
except Article.DoesNotExist :
raise Http404
return render(request, 'tag.html', {'post_list' : post_list})
# def home(request):
# return HttpResponse("Hello World, Django,这是我的Blog")
# def detail(request, my_args):
# # return HttpResponse("You're looking at my_args %s." % my_args)
# post = Article.objects.all()[int(my_args)]
# str = ("title = %s,category = %s,date_time = %s,content = %s" % (post.title,post.category,post.date_time,post.content))
# return HttpResponse(str)
# def test(request):
# return render(request,'test.html',{'current_time':datetime.now()})
| 32.5
| 125
| 0.663462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 644
| 0.40708
|
a26749689fb404e888e2125613c846cdef380811
| 405
|
py
|
Python
|
PythonExercicios/ex067.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex067.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
PythonExercicios/ex067.py
|
Luis-Emanuel/Python
|
92936dfb005b9755a53425d16c3ff54119eebe78
|
[
"MIT"
] | null | null | null |
#Faça um programa que mostre a tabuada de vários números, um de cada vez, para cada valor digitado pelo usuário.
#O programa será interronpido quando o número solicitado for negativo.
c = 0
while True:
print(30*'-')
num = int(input('Quer ver a tabuada de qual valor ?'))
print(30*'-')
if num < 0:
break
for c in range(1,11):
print(f'{num} X {c} = {num*c}')
print('FIM')
| 33.75
| 112
| 0.637037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 259
| 0.63017
|
a26af4c2704297b324a8b326cbf17e3cd4d232f6
| 1,251
|
py
|
Python
|
examples/src/python/bolt/half_ack_bolt.py
|
takeratta/heron
|
7b7c38594186f009741c62d379364b9b45d82b61
|
[
"Apache-2.0"
] | 1
|
2021-06-29T07:00:10.000Z
|
2021-06-29T07:00:10.000Z
|
examples/src/python/bolt/half_ack_bolt.py
|
kalimfaria/heron
|
d59bd016b826006e2af22c7a6452342f5e7d637c
|
[
"Apache-2.0"
] | null | null | null |
examples/src/python/bolt/half_ack_bolt.py
|
kalimfaria/heron
|
d59bd016b826006e2af22c7a6452342f5e7d637c
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# copyright 2016 twitter. all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''half ack bolt'''
from heronpy.api.bolt.bolt import Bolt
class HalfAckBolt(Bolt):
"""Half of data tuples will be acked and the other half will be failed"""
# pylint: disable=unused-argument
def initialize(self, config, context):
self.total = 0
def process(self, tup):
self.total += 1
if self.total % 2 == 0:
self.logger.debug("Failing a tuple: %s" % str(tup))
self.fail(tup)
else:
self.logger.debug("Acking a tuple: %s" % str(tup))
self.ack(tup)
def process_tick(self, tup):
self.log("Got tick tuple!")
self.log("Total received: %d" % self.total)
| 32.921053
| 75
| 0.694644
| 549
| 0.438849
| 0
| 0
| 0
| 0
| 0
| 0
| 828
| 0.661871
|
a26b73d904e11aae41e76e1fb93f09e8f345dc84
| 534
|
py
|
Python
|
projects/cassava-leaf-disease/code/src/config.py
|
dric2018/coding-room
|
ff538ed16d09ab4918d1b0d55aef09fe95b1078a
|
[
"MIT"
] | 1
|
2021-02-02T08:30:50.000Z
|
2021-02-02T08:30:50.000Z
|
projects/cassava-leaf-disease/code/src/.ipynb_checkpoints/config-checkpoint.py
|
dric2018/coding-room
|
ff538ed16d09ab4918d1b0d55aef09fe95b1078a
|
[
"MIT"
] | null | null | null |
projects/cassava-leaf-disease/code/src/.ipynb_checkpoints/config-checkpoint.py
|
dric2018/coding-room
|
ff538ed16d09ab4918d1b0d55aef09fe95b1078a
|
[
"MIT"
] | 1
|
2021-03-09T14:27:00.000Z
|
2021-03-09T14:27:00.000Z
|
import os
class Config:
data_dir = os.path.abspath('../data/input/')
models_dir = os.path.abspath('../models')
logs_dir = os.path.abspath('../logs')
train_data_dir = os.path.abspath('../data/input/train_images')
test_data_dir = os.path.abspath('../data/input/test_images')
num_epochs = 15
lr = 2e-2
resize = 500
img_h = 400
img_w = 400
weight_decay = .01
eps = 1e-8
train_batch_size = 16
test_batch_size = 16
base_model = 'resnet34'
seed_val = 2021
num_workers = 2
| 25.428571
| 66
| 0.629213
| 522
| 0.977528
| 0
| 0
| 0
| 0
| 0
| 0
| 101
| 0.189139
|
a26c405342f3cf01116c7589d07a48162ad6f4f5
| 1,265
|
py
|
Python
|
midburn/migrations/0007_auto_20160116_0902.py
|
mtr574/projectMidbrunFirstReg
|
2569c3f07e1af746bfc1f213632708c76d8fc829
|
[
"Apache-2.0"
] | null | null | null |
midburn/migrations/0007_auto_20160116_0902.py
|
mtr574/projectMidbrunFirstReg
|
2569c3f07e1af746bfc1f213632708c76d8fc829
|
[
"Apache-2.0"
] | 1
|
2016-01-22T09:32:04.000Z
|
2016-01-22T12:14:12.000Z
|
midburn/migrations/0007_auto_20160116_0902.py
|
mtr574/projectMidbrunFirstReg
|
2569c3f07e1af746bfc1f213632708c76d8fc829
|
[
"Apache-2.0"
] | 3
|
2016-11-04T12:10:03.000Z
|
2017-02-23T08:52:53.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('midburn', '0006_auto_20160116_0842'),
]
operations = [
migrations.AlterField(
model_name='camp',
name='contact_email',
field=models.CharField(max_length=254, blank=True, unique=True, default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='camp',
name='contact_facebook',
field=models.CharField(max_length=254, blank=True, default=''),
preserve_default=False,
),
migrations.AlterField(
model_name='camp',
name='contact_name_en',
field=models.CharField(max_length=50, blank=True),
),
migrations.AlterField(
model_name='camp',
name='contact_name_he',
field=models.CharField(max_length=50, blank=True),
),
migrations.AlterField(
model_name='camp',
name='contact_phone',
field=models.CharField(max_length=50, blank=True, default=''),
preserve_default=False,
),
]
| 29.418605
| 88
| 0.575494
| 1,156
| 0.913834
| 0
| 0
| 0
| 0
| 0
| 0
| 175
| 0.13834
|
a26ce5cbff56541c401d259eb84396d16a623b3d
| 329
|
py
|
Python
|
win/test_ddg.py
|
janakhpon/PersonalAssistant
|
bacd6743d23d139af1199df12c7bf99d092764b1
|
[
"MIT"
] | null | null | null |
win/test_ddg.py
|
janakhpon/PersonalAssistant
|
bacd6743d23d139af1199df12c7bf99d092764b1
|
[
"MIT"
] | null | null | null |
win/test_ddg.py
|
janakhpon/PersonalAssistant
|
bacd6743d23d139af1199df12c7bf99d092764b1
|
[
"MIT"
] | null | null | null |
from bs4 import BeautifulSoup
import requests
text = input("text : ")
text.replace(" ", "+")
params = {"q": text}
content = requests.get("https://duckduckgo.com/?q=", params=params)
soup = BeautifulSoup(content.text, 'html.parser')
res = soup.find_all('div', class_="result__snippet js-result-snippet")
for r in res:
print(r)
| 25.307692
| 70
| 0.702128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 99
| 0.300912
|
a26e63d9279b0f8a80c97662c0a07a697eeb4cdf
| 2,771
|
py
|
Python
|
experiments/scripts/third_party/roll_q/roll_q.py
|
AnonymousALifer/primordium
|
0c37d387f2cc5b343c6cbd038ae197fd9a502d76
|
[
"MIT"
] | null | null | null |
experiments/scripts/third_party/roll_q/roll_q.py
|
AnonymousALifer/primordium
|
0c37d387f2cc5b343c6cbd038ae197fd9a502d76
|
[
"MIT"
] | null | null | null |
experiments/scripts/third_party/roll_q/roll_q.py
|
AnonymousALifer/primordium
|
0c37d387f2cc5b343c6cbd038ae197fd9a502d76
|
[
"MIT"
] | null | null | null |
import sys
#### CONFIG OPTIONS
# replicates = 50 #Will be dynamically determined
roll_q_dir = './'
if len(sys.argv) < 2:
print('Must pass one argument, the number of jobs in the queue!')
exit(-1)
jobs_in_queue = int(sys.argv[1])
if len(sys.argv) >= 3:
roll_q_dir = sys.argv[2]
do_resub = True
if len(sys.argv) > 4:
do_resub = sys.argv[3].strip() == '1'
if roll_q_dir[-1] != '/':
roll_q_dir += '/'
if do_resub:
open_slots = 999 - jobs_in_queue
else:
open_slots = 999 - jobs_in_queue
print(open_slots, 'slots available in queue.')
cur_tasks_to_run = 0
#num_jobs_to_run = open_slots // replicates
cur_idx = 0
with open(roll_q_dir + 'roll_q_idx.txt', 'r') as fp:
cur_idx = int(fp.readline().strip())
print('Current index in job array:', cur_idx)
room_for_all_jobs = False
jobs_to_run = []
with open(roll_q_dir + 'roll_q_job_array.txt', 'r') as fp:
all_jobs_finished = False
for i in range(0, cur_idx):
line = fp.readline().strip()
if line == '':
all_jobs_finished = True
break
#print('Skipping:', line)
if all_jobs_finished:
print('All jobs already running or done, there\'s nothing to queue!')
exit(0)
while True:
#for i in range(0, num_jobs_to_run):
line = fp.readline().strip()
#print(line)
if line == '':
print('We hit the end of the queue! Submitting the last few jobs...')
room_for_all_jobs = True
break
num_tasks = 1
with open(line, 'r') as job_fp:
for job_line in job_fp:
L = job_line.split()
if len(L) > 0:
if L[0] == '#SBATCH':
L2 = L[1].split('=')
if L2[0] == '--array':
start, end = [int(x) for x in L2[1].split('-')]
num_tasks = (end - start) + 1
if cur_tasks_to_run + num_tasks > open_slots:
break
cur_tasks_to_run += num_tasks
jobs_to_run.append(line)
if not room_for_all_jobs and do_resub:
base_script = ''
with open(roll_q_dir + 'roll_q_resub_base.sb', 'r') as in_fp:
base_script = in_fp.read()
print(base_script)
with open(roll_q_dir + 'roll_q_resub_job.sb', 'w') as out_fp:
out_fp.write(base_script.replace('<<ROLL_Q_DIR>>', roll_q_dir))
with open(roll_q_dir + 'roll_q_submit.sh', 'w') as out_fp:
out_fp.write('#!/bin/bash\n')
for job in jobs_to_run:
out_fp.write('sbatch ' + job + '\n')
with open(roll_q_dir + 'roll_q_idx.txt', 'w') as idx_fp:
idx_fp.write(str(cur_idx + len(jobs_to_run)))
print('Prepared', len(jobs_to_run), 'jobs, with ' + str(cur_tasks_to_run) + ' tasks, to run!')
| 32.6
| 94
| 0.583544
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 694
| 0.250451
|
a275677a628b972b4fd284b9ad40ccf51d3ac9ae
| 390
|
py
|
Python
|
prplatform/exercises/migrations/0002_auto_20180508_1200.py
|
piehei/prplatform
|
f3248b66019f207bb06a4681a62057e175408b3e
|
[
"MIT"
] | 3
|
2018-10-07T18:50:01.000Z
|
2020-07-29T14:43:51.000Z
|
prplatform/exercises/migrations/0002_auto_20180508_1200.py
|
piehei/prplatform
|
f3248b66019f207bb06a4681a62057e175408b3e
|
[
"MIT"
] | 9
|
2019-08-26T11:55:00.000Z
|
2020-05-04T13:56:06.000Z
|
prplatform/exercises/migrations/0002_auto_20180508_1200.py
|
piehei/prplatform
|
f3248b66019f207bb06a4681a62057e175408b3e
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.4 on 2018-05-08 12:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('courses', '0013_auto_20180426_0754'),
('exercises', '0001_initial'),
]
operations = [
migrations.RenameModel(
old_name='GeneralExercise',
new_name='SubmissionExercise',
),
]
| 20.526316
| 47
| 0.610256
| 305
| 0.782051
| 0
| 0
| 0
| 0
| 0
| 0
| 143
| 0.366667
|
a277d99ca9d564507caf9cea939d843c77111614
| 777
|
py
|
Python
|
spirit/utils/paginator/infinite_paginator.py
|
rterehov/Spirit
|
515894001da9d499852b7ebde25892d290e26c38
|
[
"MIT"
] | null | null | null |
spirit/utils/paginator/infinite_paginator.py
|
rterehov/Spirit
|
515894001da9d499852b7ebde25892d290e26c38
|
[
"MIT"
] | null | null | null |
spirit/utils/paginator/infinite_paginator.py
|
rterehov/Spirit
|
515894001da9d499852b7ebde25892d290e26c38
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.http import Http404
from infinite_scroll_pagination.paginator import SeekPaginator, EmptyPage
def paginate(request, query_set, lookup_field, per_page=15, page_var='value'):
# TODO: remove
page_pk = request.GET.get(page_var, None)
paginator = SeekPaginator(query_set, per_page=per_page, lookup_field=lookup_field)
# First page
if page_pk is None:
return paginator.page()
try:
obj = query_set.model.objects.get(pk=page_pk)
except query_set.model.DoesNotExist:
raise Http404()
value = getattr(obj, lookup_field)
try:
page = paginator.page(value=value, pk=page_pk)
except EmptyPage:
raise Http404()
return page
| 24.28125
| 86
| 0.700129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 56
| 0.072072
|
a27856f4617a1105202515667ba0f2cfc6adb560
| 10,172
|
py
|
Python
|
lib/exaproxy/configuration.py
|
oriolarcas/exaproxy
|
5dc732760d811fd4986f83e6dd78d29228927aec
|
[
"BSD-2-Clause"
] | 124
|
2015-01-03T10:42:17.000Z
|
2021-12-24T05:30:25.000Z
|
lib/exaproxy/configuration.py
|
oriolarcas/exaproxy
|
5dc732760d811fd4986f83e6dd78d29228927aec
|
[
"BSD-2-Clause"
] | 14
|
2015-02-06T02:21:16.000Z
|
2019-01-10T18:22:18.000Z
|
lib/exaproxy/configuration.py
|
oriolarcas/exaproxy
|
5dc732760d811fd4986f83e6dd78d29228927aec
|
[
"BSD-2-Clause"
] | 25
|
2015-04-11T04:01:57.000Z
|
2021-07-21T07:46:31.000Z
|
# encoding: utf-8
"""
configuration.py
Created by Thomas Mangin on 2011-11-29.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
# NOTE: reloading mid-program not possible
import os
import sys
import logging
import pwd
import math
import socket
import struct
_application = None
_config = None
_defaults = None
class ConfigurationError (Exception):
pass
_syslog_name_value = {
'CRITICAL' : logging.CRITICAL,
'ERROR' : logging.ERROR,
'WARNING' : logging.WARNING,
'INFO' : logging.INFO,
'DEBUG' : logging.DEBUG,
}
_syslog_value_name = {
logging.CRITICAL : 'CRITICAL',
logging.ERROR : 'ERROR',
logging.WARNING : 'WARNING',
logging.INFO : 'INFO',
logging.DEBUG : 'DEBUG',
}
class NoneDict (dict):
def __getitem__ (self,name):
return None
nonedict = NoneDict()
home = os.path.normpath(sys.argv[0]) if sys.argv[0].startswith('/') else os.path.normpath(os.path.join(os.getcwd(),sys.argv[0]))
class value (object):
@staticmethod
def nop (_):
return _
@staticmethod
def syslog (log):
if log not in _syslog_name_value:
raise TypeError('invalid log level %s' % log)
return _syslog_name_value[log]
@staticmethod
def root (path):
roots = home.split(os.sep)
location = []
for index in range(len(roots)-1,-1,-1):
if roots[index] in ('lib','bin'):
if index:
location = roots[:index]
break
root = os.path.join(*location)
paths = [
os.path.normpath(os.path.join(os.path.join(os.sep,root,path))),
os.path.normpath(os.path.expanduser(value.unquote(path))),
os.path.normpath(os.path.join('/',path)),
os.path.normpath(os.path.join('/','usr',path)),
]
return paths
@staticmethod
def integer (_):
value = int(_)
if value <= 0:
raise TypeError('the value must be positive')
return value
@staticmethod
def lowunquote (_):
return _.strip().strip('\'"').lower()
@staticmethod
def unquote (_):
return _.strip().strip('\'"')
@staticmethod
def boolean (_):
return _.lower() in ('1','yes','on','enable','true')
@staticmethod
def list (_):
return _.split()
@staticmethod
def ports (_):
try:
return [int(x) for x in _.split()]
except ValueError:
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def methods (_):
return _.upper().split()
@staticmethod
def user (_):
try:
pwd.getpwnam(_)
# uid = answer[2]
except KeyError:
raise TypeError('user %s is not found on this system' % _)
return _
@staticmethod
def folder(path):
paths = value.root(path)
options = [path for path in paths if os.path.exists(path)]
if not options: raise TypeError('%s does not exists' % path)
first = options[0]
if not first: raise TypeError('%s does not exists' % first)
return first
@staticmethod
def conf(path):
first = value.folder(path)
if not os.path.isfile(first): raise TypeError('%s is not a file' % path)
return first
@staticmethod
def resolver(path):
global _application
paths = value.root('etc/%s/dns/resolv.conf' % _application)
paths.append(os.path.normpath(os.path.join('/','etc','resolv.conf')))
paths.append(os.path.normpath(os.path.join('/','var','run','resolv.conf')))
for resolver in paths:
if os.path.exists(resolver):
with open(resolver) as r:
if 'nameserver' in (line.strip().split(None,1)[0].lower() for line in r.readlines() if line.strip()):
return resolver
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def exe (path):
argv = path.split(' ',1)
program = value.conf(argv.pop(0))
if not os.access(program, os.X_OK):
raise TypeError('%s is not an executable' % program)
return program if not argv else '%s %s' % (program,argv[0])
@staticmethod
def services (string):
try:
services = []
for service in value.unquote(string).split():
host,port = service.split(':')
services.append((host,int(port)))
return services
except ValueError:
raise TypeError('resolv.conf can not be found (are you using DHCP without any network setup ?)')
@staticmethod
def ranges (string):
try:
ranges = []
for service in value.unquote(string).split():
network,netmask = service.split('/')
if ':' in network:
high,low = struct.unpack('!QQ',socket.inet_pton(socket.AF_INET6,network))
start = (high << 64) + low
end = start + pow(2,128-int(netmask)) - 1
ranges.append((6,start,end))
else:
start = struct.unpack('!L',socket.inet_pton(socket.AF_INET,network))[0]
end = start + pow(2,32-int(netmask)) - 1
ranges.append((4,start,end))
return ranges
except ValueError:
raise TypeError('Can not parse the data as IP range')
@staticmethod
def redirector (name):
if name == 'url' or name.startswith('icap://'):
return name
raise TypeError('invalid redirector protocol %s, options are url or header' % name)
class string (object):
@staticmethod
def nop (_):
return _
@staticmethod
def syslog (log):
if log not in _syslog_value_name:
raise TypeError('invalid log level %s' % log)
return _syslog_value_name[log]
@staticmethod
def quote (_):
return "'%s'" % str(_)
@staticmethod
def lower (_):
return str(_).lower()
@staticmethod
def path (path):
split = sys.argv[0].split('lib/%s' % _application)
if len(split) > 1:
prefix = os.sep.join(split[:1])
if prefix and path.startswith(prefix):
path = path[len(prefix):]
home = os.path.expanduser('~')
if path.startswith(home):
return "'~%s'" % path[len(home):]
return "'%s'" % path
@staticmethod
def list (_):
return "'%s'" % ' '.join((str(x) for x in _))
@staticmethod
def services (_):
l = ' '.join(('%s:%d' % (host,port) for host,port in _))
return "'%s'" % l
@staticmethod
def ranges (_):
def convert ():
for (proto,start,end) in _:
bits = int(math.log(end-start+1,2))
if proto == 4:
network = socket.inet_ntop(socket.AF_INET,struct.pack('!L',start))
yield '%s/%d' % (network,32-bits)
else:
high = struct.pack('!Q',start >> 64)
low = struct.pack('!Q',start & 0xFFFFFFFF)
network = socket.inet_ntop(socket.AF_INET6,high+low)
yield '%s/%d' % (network,128-bits)
return "'%s'" % ' '.join(convert())
import ConfigParser
class Store (dict):
def __getitem__ (self,key):
return dict.__getitem__(self,key.replace('_','-'))
def __setitem__ (self,key,value):
return dict.__setitem__(self,key.replace('_','-'),value)
def __getattr__ (self,key):
return dict.__getitem__(self,key.replace('_','-'))
def __setattr__ (self,key,value):
return dict.__setitem__(self,key.replace('_','-'),value)
def _configuration (conf):
location = os.path.join(os.sep,*os.path.join(home.split(os.sep)))
while location and location != '/':
location, directory = os.path.split(location)
if directory in ('lib','bin'):
break
_conf_paths = []
if conf:
_conf_paths.append(os.path.abspath(os.path.normpath(conf)))
if location:
_conf_paths.append(os.path.normpath(os.path.join(location,'etc',_application,'%s.conf' % _application)))
_conf_paths.append(os.path.normpath(os.path.join('/','etc',_application,'%s.conf' % _application)))
_conf_paths.append(os.path.normpath(os.path.join('/','usr','etc',_application,'%s.conf' % _application)))
configuration = Store()
ini = ConfigParser.ConfigParser()
ini_files = [path for path in _conf_paths if os.path.exists(path)]
if ini_files:
ini.read(ini_files[0])
for section in _defaults:
default = _defaults[section]
for option in default:
convert = default[option][0]
try:
proxy_section = '%s.%s' % (_application,section)
env_name = '%s.%s' % (proxy_section,option)
rep_name = env_name.replace('.','_')
if env_name in os.environ:
conf = os.environ.get(env_name)
elif rep_name in os.environ:
conf = os.environ.get(rep_name)
else:
try:
# raise and set the default
conf = value.unquote(ini.get(section,option,nonedict))
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
# raise and set the default
conf = value.unquote(ini.get(proxy_section,option,nonedict))
# name without an = or : in the configuration and no value
if conf is None:
conf = default[option][2]
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
conf = default[option][2]
try:
configuration.setdefault(section,Store())[option] = convert(conf)
except TypeError,error:
raise ConfigurationError('invalid value for %s.%s : %s (%s)' % (section,option,conf,str(error)))
return configuration
def load (application=None,defaults=None,conf=None):
global _application
global _defaults
global _config
if _config:
return _config
if conf is None:
raise RuntimeError('You can not have an import using load() before main() initialised it')
_application = application
_defaults = defaults
_config = _configuration(conf)
return _config
def default ():
for section in sorted(_defaults):
for option in sorted(_defaults[section]):
values = _defaults[section][option]
default = "'%s'" % values[2] if values[1] in (string.list,string.path,string.quote) else values[2]
yield '%s.%s.%s %s: %s. default (%s)' % (_application,section,option,' '*(20-len(section)-len(option)),values[3],default)
def ini (diff=False):
for section in sorted(_config):
if section in ('proxy','debug'):
continue
header = '\n[%s]' % section
for k in sorted(_config[section]):
v = _config[section][k]
if diff and _defaults[section][k][0](_defaults[section][k][2]) == v:
continue
if header:
print header
header = ''
print '%s = %s' % (k,_defaults[section][k][1](v))
def env (diff=False):
print
for section,values in _config.items():
if section in ('proxy','debug'):
continue
for k,v in values.items():
if diff and _defaults[section][k][0](_defaults[section][k][2]) == v:
continue
if _defaults[section][k][1] == string.quote:
print "%s.%s.%s='%s'" % (_application,section,k,v)
continue
print "%s.%s.%s=%s" % (_application,section,k,_defaults[section][k][1](v))
| 27.197861
| 128
| 0.665946
| 5,806
| 0.570783
| 831
| 0.081695
| 5,203
| 0.511502
| 0
| 0
| 1,520
| 0.14943
|
a278b6850520063ea039b2fa761bcc89b24ae7fc
| 1,009
|
py
|
Python
|
timo/exception.py
|
Minsoo-web/TIMO
|
79051cdce4539bc62d01b19e98b4fce6a3f02fae
|
[
"MIT"
] | null | null | null |
timo/exception.py
|
Minsoo-web/TIMO
|
79051cdce4539bc62d01b19e98b4fce6a3f02fae
|
[
"MIT"
] | null | null | null |
timo/exception.py
|
Minsoo-web/TIMO
|
79051cdce4539bc62d01b19e98b4fce6a3f02fae
|
[
"MIT"
] | 2
|
2020-07-13T00:55:52.000Z
|
2020-07-27T04:23:41.000Z
|
from typing import AnyStr
from typing import NoReturn
class Error(Exception):
"""
This is the basis of all errors.\n
Basically the same as Exception.
"""
def __init__(self, msg: AnyStr) -> NoReturn:
self.msg: str = msg
def __str__(self) -> str:
return self.msg
class FileExtensionError(Error):
"""This error occurs when the file extension does not meet the requirements."""
def __str__(self) -> str:
return '파일 확장명이 올바르지 않습니다.'
class UnknownTestTestToolError(Error):
"""This error is caused when an invalid test tool is entered."""
def __str__(self) -> str:
return '현재 지원하지 않거나 잘못된 테스트 도구입니다.'
class UnknownDataError(Error):
"""This error occurs when data cannot be found."""
def __str__(self) -> str:
return '찾을 수 없거나 잘못된 데이터입니다.'
class UnknownDatabaseError(Error):
"""This error occurs when the database cannot be found."""
def __str__(self) -> str:
return '찾을 수 없거나 존재하지 않는 데이터베이스입니다.'
| 22.931818
| 83
| 0.654113
| 1,080
| 0.939948
| 0
| 0
| 0
| 0
| 0
| 0
| 577
| 0.502176
|
a27af76ac557d5a5a06d9803200c94099e5080e2
| 301
|
py
|
Python
|
scikit/Adaboost/example.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | 1
|
2017-02-13T10:25:11.000Z
|
2017-02-13T10:25:11.000Z
|
scikit/Adaboost/example.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | null | null | null |
scikit/Adaboost/example.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from sklearn.datasets import load_iris
from sklearn.model_selection import cross_val_score
from sklearn.ensemble import AdaBoostClassifier
iris = load_iris()
clf = AdaBoostClassifier(n_estimators=1000)
scores = cross_val_score(clf, iris.data, iris.target)
print scores.mean()
| 30.1
| 53
| 0.800664
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.076412
|
a27d6ad41df2cef9c59436191968c1e6444af6da
| 4,720
|
py
|
Python
|
main.py
|
jg-fisher/indeed-bot
|
601720c3f20f62a99e02ef2f017cfb225a3f770e
|
[
"MIT"
] | 9
|
2019-11-28T08:54:50.000Z
|
2022-02-23T05:12:53.000Z
|
main.py
|
jg-fisher/indeed-bot
|
601720c3f20f62a99e02ef2f017cfb225a3f770e
|
[
"MIT"
] | null | null | null |
main.py
|
jg-fisher/indeed-bot
|
601720c3f20f62a99e02ef2f017cfb225a3f770e
|
[
"MIT"
] | 9
|
2019-12-07T08:32:10.000Z
|
2022-03-28T17:47:30.000Z
|
import os
import sys
import time
from selenium import webdriver
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
class IndeedBot:
def __init__(self):
"""
Initilializes the Chrome webdriver.
Sets the job search query string
self.driver:selenium.webdriver.Chrome
self.query_string:str
self.jobs:arr
self.express_apply_jobs:arr
"""
self.driver = webdriver.Chrome('./chromedriver.exe')
self.query_string = "https://www.indeed.com/jobs?q={job}&l={city}%2C+{state}"
self.jobs = []
self.express_apply_jobs = []
def nav(self, url):
"""
Navigates to a given url
Args:
url:str url chromedriver Chrome instance navigates to.
"""
self.driver.get(url)
time.sleep(3) # wait for page load
def __convert_query(self, job, city, state):
"""
Reformats the query for expected syntax of the search
Args:
job:str: Job type to search for.
city:str: City location of the job.
state:str State location of the job.
Returns:
job:str
city:str
state:str
"""
job = '+'.join(job.split(" "))
city = city.lower()
# State must be valid two letter code
if len(state) != 2:
raise Exception("State must be valid two letter code.")
state = state.upper()
return job, city, state
def query(self, job, city, state):
"""
Searches indeed for a job in given city and state
Args:
job:str: Job type to search for.
city:str: City location of the job.
state:str State location of the job.
"""
job, city, state = self.__convert_query(job, city, state)
query = self.query_string.format(job=job, city=city, state=state)
self.nav(query)
def find_express_jobs(self):
""""
Called after chromedriver Chrome instance navigates to job search results.
Fills list with express jobs in search results.
"""
self.jobs = self.driver.find_elements_by_class_name("jobsearch-SerpJobCard")
print(f'Number of jobs {len(self.jobs)}')
for job in self.jobs:
try: # Express apply indicator
job.find_element_by_class_name('jobCardShelfContainer')
self.express_apply_jobs.append(job)
except: # Job is not express apply
pass
def apply_to_express_jobs(self, profile):
"""
Extracts jobs with express apply.
Args:
profile:dict
"""
print(f'Number of express jobs {len(self.express_apply_jobs)}')
for job in self.express_apply_jobs:
self.__process_job(job)
self.__process_apply_button()
self.__fill_applicant_form(profile)
# self.driver.find_element_by_id('form-action-continue').click()
def __process_apply_button(self):
apply_button = self.driver.find_element_by_id('indeedApplyButtonContainer')
apply_button.click()
time.sleep(4)
def __process_job(self, job):
"""
Refines url of job posting and navigates to it
Args:
job:Selenium.Webdriver.Chrome.WebElement
"""
job_a_tag = job.find_element_by_tag_name('a')
job_href = job_a_tag.get_attribute('href')
# Removing all extraneous indeed url query string parameters
job_href = job_href.split('&from')[0]
self.nav(job_href)
def __fill_applicant_form(self, profile):
"""
Finds elements on the applicant form
Args:
profile:dict
"""
actions = ActionChains(self.driver)
actions.send_keys(profile['name'] + Keys.TAB + \
profile['email'] + Keys.TAB + \
profile['phone_number'] + Keys.TAB)
actions.perform()
if __name__ == '__main__':
profile = {
'name': "John Fisher",
'email': "jfishersolutions@gmail.com",
'phone_number': '860-364-3249',
'resume': os.getcwd() + '\\resume.txt'
}
id_bot = IndeedBot()
# keywords, city, state
id_bot.query('python developer', 'dallas', 'tx')
id_bot.find_express_jobs()
id_bot.apply_to_express_jobs(profile)
| 27.764706
| 88
| 0.561017
| 4,108
| 0.870339
| 0
| 0
| 0
| 0
| 0
| 0
| 2,203
| 0.466737
|
a27f8d0583f96864cbfcef5f30b901f38868d920
| 1,266
|
py
|
Python
|
biobb_adapters/pycompss/biobb_md/gromacs_extra/append_ligand_pc.py
|
jfennick/biobb_adapters
|
a37c1c696476c93225e7d3c661b0d4393af9dfe1
|
[
"Apache-2.0"
] | null | null | null |
biobb_adapters/pycompss/biobb_md/gromacs_extra/append_ligand_pc.py
|
jfennick/biobb_adapters
|
a37c1c696476c93225e7d3c661b0d4393af9dfe1
|
[
"Apache-2.0"
] | 4
|
2019-03-04T15:22:06.000Z
|
2021-09-24T14:43:48.000Z
|
biobb_adapters/pycompss/biobb_md/gromacs_extra/append_ligand_pc.py
|
jfennick/biobb_adapters
|
a37c1c696476c93225e7d3c661b0d4393af9dfe1
|
[
"Apache-2.0"
] | 2
|
2020-09-08T05:26:23.000Z
|
2022-03-28T07:09:20.000Z
|
import traceback
from pycompss.api.task import task
from pycompss.api.constraint import constraint
from pycompss.api.parameter import FILE_IN, FILE_OUT
from biobb_common.tools import file_utils as fu
from biobb_md.gromacs_extra import append_ligand
import os
import sys
@constraint(computingUnits="1")
@task(input_top_zip_path=FILE_IN, input_itp_path=FILE_IN,
output_top_zip_path=FILE_OUT, input_posres_itp_path=FILE_IN,
on_failure="IGNORE")
def append_ligand_pc(input_top_zip_path, input_itp_path,
output_top_zip_path, input_posres_itp_path,
properties, **kwargs):
try:
os.environ.pop('PMI_FD', None)
os.environ.pop('PMI_JOBID', None)
os.environ.pop('PMI_RANK', None)
os.environ.pop('PMI_SIZE', None)
append_ligand.AppendLigand(input_top_zip_path=input_top_zip_path, input_itp_path=input_itp_path,
output_top_zip_path=output_top_zip_path, input_posres_itp_path=input_posres_itp_path,
properties=properties, **kwargs).launch()
except Exception:
traceback.print_exc()
fu.write_failed_output(output_top_zip_path)
finally:
sys.stdout.flush()
sys.stderr.flush()
| 42.2
| 120
| 0.704581
| 0
| 0
| 0
| 0
| 995
| 0.78594
| 0
| 0
| 50
| 0.039494
|
a27fb79cf4664c162660d09fef51a46e76ed5c1e
| 3,277
|
py
|
Python
|
sellalert.py
|
sjmiller609/cvs_scraper
|
f6a1e5673869a7031c028d38a6bb4b37a3ba5473
|
[
"MIT"
] | null | null | null |
sellalert.py
|
sjmiller609/cvs_scraper
|
f6a1e5673869a7031c028d38a6bb4b37a3ba5473
|
[
"MIT"
] | null | null | null |
sellalert.py
|
sjmiller609/cvs_scraper
|
f6a1e5673869a7031c028d38a6bb4b37a3ba5473
|
[
"MIT"
] | null | null | null |
import requests
import json
from pprint import pprint
import re
import time
import sys
#getdata = requests.get(geturl)
#pprint (vars(getdata))
from bs4 import BeautifulSoup
from geopy.geocoders import Nominatim
if len(sys.argv) != 4:
print(sys.argv[0]+" <item> <location> <num items>")
exit()
#get list of product IDs
item = sys.argv[1].replace(" ","+")
print("searching for items with: "+item)
geturl = "http://www.cvs.com/search/N-0?searchTerm="+item+"&navNum="+sys.argv[3]
print("search url: "+geturl)
#This step is important.Converting QString to Ascii for lxml to process
#archive_links = html.fromstring(str(result.toAscii()))
#print archive_links
response = requests.get(geturl)
print(str(response))
page = str(BeautifulSoup(response.content,"html.parser"))
print(page)
exit()
def getURL(page):
start_link = page.find("href")
if start_link == -1:
return None, 0
start_quote = page.find('"', start_link)
end_quote = page.find('"', start_quote + 1)
url = page[start_quote + 1: end_quote]
return url, end_quote
def getUrls(urls,page):
url, n = getURL(page)
if url:
urls.append(url)
getUrls(urls,page[n:])
urls = []
getUrls(urls,page)
for url in urls:
print(url)
itemlist = []
skuidlist = []
for i in range(0,len(urls)):
m = re.search('/shop/.*/.*/.*/(.*)-skuid-(\d{6})',urls[i])
if m and m.group(2) not in skuidlist:
itemlist.append(m.group(1))
skuidlist.append(m.group(2))
print("items found:")
for item in itemlist:
print("\t"+item)
#TODO: now the page loads these in js, so we need to interpret js
exit()
geolocator = Nominatim()
location = geolocator.geocode(sys.argv[2])
print((location.latitude,location.longitude))
posturl = "http://www.cvs.com/rest/bean/cvs/catalog/CvsBohServiceHandler/storeInventoryValues"
dicts = []
print('loading initial inventory...')
for i in range(0,len(skuidlist)):
time.sleep(2)
productId = skuidlist[i]
postdata = {'productId': productId, 'productSPUlnd': 'true','favstore':'NULL','geolatitude':str(location.latitude),'geolongitude':str(location.longitude)}
inv = requests.post(posturl,data=postdata)
dict = {}
jsons = inv.json()['atgResponse']
for j in range(0,len(jsons)):
temp = jsons[j]
if(temp['Qty'] == ''):
temp['Qty'] = '0'
dict[temp['storeAddress']] = temp['Qty']
dicts.append(dict)
print(str(100*i/len(skuidlist))+"%")
while True:
for j in range(0,len(skuidlist)):
#delay between requests
print('3 seconds...')
time.sleep(3)
productId = skuidlist[j]
postdata = {'productId': productId, 'productSPUlnd': 'true','favstore':'NULL','geolatitude':str(location.latitude),'geolongitude':str(location.longitude)}
inv = requests.post(posturl,data=postdata)
jsons = inv.json()['atgResponse']
for i in range(0,len(jsons)):
temp = jsons[i]
if(temp['Qty'] == ''):
temp['Qty'] = '0'
if(dicts[j][temp['storeAddress']] != temp['Qty']):
print("was: "+dicts[j][temp['storeAddress']]+" now: "+temp['Qty'])
sold = int(dicts[j][temp['storeAddress']]) - int(temp['Qty'])
print(temp['storeAddress']+" sold "+str(sold) + " of item " +itemlist[j])
dicts[j][temp['storeAddress']] = temp['Qty']
| 29.522523
| 159
| 0.648764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,003
| 0.306073
|
a27fd6c4631670b333af8985d1aba8f26af3183c
| 5,670
|
py
|
Python
|
neucom/utils.py
|
jacobver/diag_context
|
ca8d008b745743bf20c4bedcf6faa412a5ad8080
|
[
"MIT"
] | null | null | null |
neucom/utils.py
|
jacobver/diag_context
|
ca8d008b745743bf20c4bedcf6faa412a5ad8080
|
[
"MIT"
] | null | null | null |
neucom/utils.py
|
jacobver/diag_context
|
ca8d008b745743bf20c4bedcf6faa412a5ad8080
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import numpy as np
from copy import copy
import torch
import torch.nn.functional as F
from torch.autograd import Variable
import torch.nn as nn
def apply_var(v, k):
if isinstance(v, Variable) and v.requires_grad:
v.register_hook(inves(k))
def apply_dict(dic):
for k, v in dic.iteritems():
apply_var(v, k)
if isinstance(v, nn.Module):
key_list = [a for a in dir(v) if not a.startswith('__')]
for key in key_list:
apply_var(getattr(v, key), key)
for pk, pv in v._parameters.iteritems():
apply_var(pv, pk)
def inves(name=''):
def f(tensor):
if np.isnan(torch.mean(tensor).data.cpu().numpy()):
print('\ngradient of {} :'.format(name))
print(tensor)
assert 0, 'nan gradient'
return tensor
return f
def reduce_sum(inputs, dim=None, keep_dim=False):
if dim is None:
return torch.sum(inputs)
output = torch.sum(inputs, dim)
if not keep_dim:
return output
else:
return expand_dims(output, dim)
def pairwise_add(u, v=None, is_batch=False):
"""
performs a pairwise summation between vectors (possibly the same)
can also be performed on batch of vectors.
Parameters:
----------
u, v: Tensor (m,) or (b,m)
Returns:
---------
Tensor (m, n) or (b, m, n)
"""
u_shape = u.size()
if v is None:
v = u
v_shape = v.size()
if len(u_shape) > 2 and not is_batch:
raise ValueError("Expected at most 2D tensor or 3D tensor with batch")
if len(v_shape) > 2 and not is_batch:
raise ValueError("Expected at most 2D tensor or 3D tensor with batch")
m = u_shape[0] if not is_batch else u_shape[1]
n = v_shape[0] if not is_batch else v_shape[1]
u = expand_dims(u, axis=-1)
new_u_shape = list(u.size())
new_u_shape[-1] = n
U_ = u.expand(*new_u_shape)
v = expand_dims(v, axis=-2)
new_v_shape = list(v.size())
new_v_shape[-2] = m
V_ = v.expand(*new_v_shape)
return U_ + V_
def to_device(src, ref):
return src.cuda(ref.get_device()) if ref.is_cuda else src
def cumprod(inputs, dim=1, exclusive=True):
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/g3doc/api_docs/python/functions_and_classes/shard0/tf.cumprod.md
if type(inputs) is not Variable:
temp = torch.cumprod(inputs, dim)
if not exclusive:
return temp
else:
temp = temp / (inputs[0].expand_as(temp) + 1e-8)
temp[-1] = temp[-1] / (inputs[-1] + 1e-8)
return temp
else:
shape_ = inputs.size()
ndim = len(shape_)
n_slot = shape_[dim]
slice_ = [slice(0, None, 1) for _ in range(ndim)]
results = [[]] * n_slot
for ind in range(0, n_slot):
this_slice, last_slice = copy(slice_), copy(slice_)
this_slice[dim] = ind
last_slice[dim] = ind - 1
this_slice = tuple(this_slice)
last_slice = tuple(last_slice)
if exclusive:
if ind > 0:
results[ind] = results[ind - 1] * inputs[last_slice]
else:
results[ind] = torch.div(
inputs[this_slice], inputs[this_slice] + 1e-8)
else:
if ind > 0:
results[ind] = results[ind - 1] * inputs[this_slice]
else:
results[ind] = inputs[this_slice]
return torch.stack(results, dim)
def expand_dims(input, axis=0):
input_shape = list(input.size())
if axis < 0:
axis = len(input_shape) + axis + 1
input_shape.insert(axis, 1)
return input.view(*input_shape)
def matmal(left, right):
'''
left is of size (*N, n1,n2), where N is a list
right is of size(*M, m1,m2), where M is a list
output is of size
'''
pass
def cosine_distance(memory_matrix, cos_keys):
"""
compute the cosine similarity between keys to each of the
memory slot.
Parameters:
----------
memory_matrix: Tensor (batch_size, mem_slot, mem_size)
the memory matrix to lookup in
keys: Tensor (batch_size, mem_size, number_of_keys)
the keys to query the memory with
strengths: Tensor (batch_size, number_of_keys, )
the list of strengths for each lookup key
Returns: Tensor (batch_size, mem_slot, number_of_keys)
The list of lookup weightings for each provided key
"""
memory_norm = torch.norm(memory_matrix, 2, 2, keepdim=True)
keys_norm = torch.norm(cos_keys, 2, 1, keepdim=True)
normalized_mem = torch.div(
memory_matrix, memory_norm.expand_as(memory_matrix) + 1e-8)
normalized_keys = torch.div(cos_keys, keys_norm.expand_as(cos_keys) + 1e-8)
out = torch.bmm(normalized_mem, normalized_keys)
# print(normalized_keys)
# print(out)
# apply_dict(locals())
return out
def softmax(input, axis=1):
"""
Apply softmax on input at certain axis.
Parammeters:
----------
input: Tensor (N*L or rank>2)
axis: the axis to apply softmax
Returns: Tensor with softmax applied on that dimension.
"""
input_size = input.size()
trans_input = input.transpose(axis, len(input_size) - 1)
trans_size = trans_input.size()
input_2d = trans_input.contiguous().view(-1, trans_size[-1])
soft_max_2d = F.softmax(input_2d)
soft_max_nd = soft_max_2d.view(*trans_size)
# apply_dict(locals())
return soft_max_nd.transpose(axis, len(input_size) - 1)
| 28.069307
| 134
| 0.603351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,502
| 0.264903
|
a27ff6238bdd6adda0370578acda1918aca05e2f
| 776
|
py
|
Python
|
school/lecture1/isi_cv_02_task.py
|
kubekbreha/ML-Python-Algorithms
|
8058b68a2d98a79a6debcc69abdd188c97420d75
|
[
"MIT"
] | null | null | null |
school/lecture1/isi_cv_02_task.py
|
kubekbreha/ML-Python-Algorithms
|
8058b68a2d98a79a6debcc69abdd188c97420d75
|
[
"MIT"
] | null | null | null |
school/lecture1/isi_cv_02_task.py
|
kubekbreha/ML-Python-Algorithms
|
8058b68a2d98a79a6debcc69abdd188c97420d75
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Sun Feb 19 20:41:09 2017
@author: pd
"""
#from IPython import get_ipython
#get_ipython().magic('reset -sf')
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.tree import DecisionTreeClassifier
X, Y = datasets.make_classification(n_samples=1000,
n_features=3, n_redundant=0)
# print(X, Y)
clf = DecisionTreeClassifier()
clf = clf.fit(X*10, Y*10)
x,y,z = clf.predict([[-2, 2, 0],[-131, -123, -435],[-22, 100, 53]])
#### initial visualization
plt.xlim(0.0, 20.0)
plt.ylim(0.0, 20.0)
# plt.scatter(X, Y, color="b", label="fast")
# plt.scatter(x, y, color="r", label="slow")
# plt.legend()
# plt.xlabel("bumpiness")
# plt.ylabel("grade")
plt.show()
| 20.972973
| 67
| 0.640464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 353
| 0.454897
|
a280eaab2887649d537621914d70995f7a90e0ab
| 327
|
py
|
Python
|
rotary/rotary/doctype/monthly_report/monthly_report.py
|
neilLasrado/rotary
|
66659b41c6fbd04d22aa368573c786dabe1102e5
|
[
"MIT"
] | null | null | null |
rotary/rotary/doctype/monthly_report/monthly_report.py
|
neilLasrado/rotary
|
66659b41c6fbd04d22aa368573c786dabe1102e5
|
[
"MIT"
] | null | null | null |
rotary/rotary/doctype/monthly_report/monthly_report.py
|
neilLasrado/rotary
|
66659b41c6fbd04d22aa368573c786dabe1102e5
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Neil Lasrado and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.utils import now
class MonthlyReport(Document):
def on_submit(self):
self.date = now()
| 25.153846
| 51
| 0.770642
| 72
| 0.220183
| 0
| 0
| 0
| 0
| 0
| 0
| 123
| 0.376147
|
a281068a96d517af66fbb0b7cc8c9a41a817af13
| 109
|
py
|
Python
|
kili/mutations/project_version/fragments.py
|
ASonay/kili-playground
|
9624073703b5e6151cf496f44f17f531576875b7
|
[
"Apache-2.0"
] | 214
|
2019-08-05T14:55:01.000Z
|
2022-03-28T21:02:22.000Z
|
kili/mutations/project_version/fragments.py
|
x213212/kili-playground
|
dfb94c2d54bedfd7fec452b91f811587a2156c13
|
[
"Apache-2.0"
] | 10
|
2020-05-14T10:44:16.000Z
|
2022-03-08T09:39:24.000Z
|
kili/mutations/project_version/fragments.py
|
x213212/kili-playground
|
dfb94c2d54bedfd7fec452b91f811587a2156c13
|
[
"Apache-2.0"
] | 19
|
2019-11-26T22:41:09.000Z
|
2022-01-16T19:17:38.000Z
|
"""
Fragments of project version mutations
"""
PROJECT_VERSION_FRAGMENT = '''
content
id
name
projectId
'''
| 9.909091
| 38
| 0.733945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 79
| 0.724771
|
a281c8f1cacd2892e9e276b0c28506e1a7b6dc79
| 6,037
|
py
|
Python
|
metrics/fid/fid_score.py
|
vfcosta/coegan-trained
|
44174e68909d9c03bf2e4b7e4c7a48237a560183
|
[
"MIT"
] | null | null | null |
metrics/fid/fid_score.py
|
vfcosta/coegan-trained
|
44174e68909d9c03bf2e4b7e4c7a48237a560183
|
[
"MIT"
] | null | null | null |
metrics/fid/fid_score.py
|
vfcosta/coegan-trained
|
44174e68909d9c03bf2e4b7e4c7a48237a560183
|
[
"MIT"
] | 1
|
2021-06-11T16:52:55.000Z
|
2021-06-11T16:52:55.000Z
|
# Code apapted from https://github.com/mseitzer/pytorch-fid
"""Calculates the Frechet Inception Distance (FID) to evalulate GANs
The FID metric calculates the distance between two distributions of images.
Typically, we have summary statistics (mean & covariance matrix) of one
of these distributions, while the 2nd distribution is given by a GAN.
When run as a stand-alone program, it compares the distribution of
images that are stored as PNG/JPEG at a specified location with a
distribution given by summary statistics (in pickle format).
The FID is calculated by assuming that X_1 and X_2 are the activations of
the pool_3 layer of the inception net for generated samples and real world
samples respectively.
See --help to see further details.
Code apapted from https://github.com/bioinf-jku/TTUR to use PyTorch instead
of Tensorflow
Copyright 2018 Institute of Bioinformatics, JKU Linz
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import numpy as np
import torch
from scipy import linalg
from torch.nn.functional import adaptive_avg_pool2d
from util import tools
def get_activations(dataset, model, size=1000, batch_size=50, dims=2048, device='cpu'):
"""Calculates the activations of the pool_3 layer for all images.
Params:
-- files : List of image files paths
-- model : Instance of inception model
-- batch_size : Batch size of images for the model to process at once.
Make sure that the number of samples is a multiple of
the batch size, otherwise some samples are ignored. This
behavior is retained to match the original FID score
implementation.
-- dims : Dimensionality of features returned by Inception
-- device : Device to run calculations
Returns:
-- A numpy array of dimension (num images, dims) that contains the
activations of the given tensor when feeding inception with the
query tensor.
"""
model.eval()
if batch_size > size:
print(('Warning: batch size is bigger than the data size. '
'Setting batch size to data size'))
batch_size = size
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False, drop_last=False)
pred_arr = np.empty((size, dims))
start_idx = 0
for batch, _ in dataloader:
if batch.shape[1] == 1:
batch = torch.cat((batch, batch, batch), 1)
batch = batch.to(device)
with torch.no_grad():
pred = model(batch)[0]
# If model output is not scalar, apply global spatial average pooling.
# This happens if you choose a dimensionality not equal 2048.
if pred.size(2) != 1 or pred.size(3) != 1:
pred = adaptive_avg_pool2d(pred, output_size=(1, 1))
pred = pred.squeeze(3).squeeze(2).cpu().numpy()
pred_arr[start_idx:start_idx + pred.shape[0]] = pred
start_idx = start_idx + pred.shape[0]
if start_idx >= size:
break
return pred_arr
def calculate_frechet_distance(mu1, sigma1, mu2, sigma2, eps=1e-6):
"""Numpy implementation of the Frechet Distance.
The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1)
and X_2 ~ N(mu_2, C_2) is
d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)).
Stable version by Dougal J. Sutherland.
Params:
-- mu1 : Numpy array containing the activations of a layer of the
inception net (like returned by the function 'get_predictions')
for generated samples.
-- mu2 : The sample mean over activations, precalculated on an
representative data set.
-- sigma1: The covariance matrix over activations for generated samples.
-- sigma2: The covariance matrix over activations, precalculated on an
representative data set.
Returns:
-- : The Frechet Distance.
"""
mu1 = np.atleast_1d(mu1)
mu2 = np.atleast_1d(mu2)
sigma1 = np.atleast_2d(sigma1)
sigma2 = np.atleast_2d(sigma2)
assert mu1.shape == mu2.shape, 'Training and test mean vectors have different lengths'
assert sigma1.shape == sigma2.shape, 'Training and test covariances have different dimensions'
diff = mu1 - mu2
# Product might be almost singular
start_time = time.time()
covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False)
print("FID: sqrtm --- %s seconds ---" % (time.time() - start_time))
if not np.isfinite(covmean).all():
msg = ('fid calculation produces singular product; '
'adding %s to diagonal of cov estimates') % eps
print(msg)
offset = np.eye(sigma1.shape[0]) * eps
covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset))
# Numerical error might give slight imaginary component
if np.iscomplexobj(covmean):
if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3):
m = np.max(np.abs(covmean.imag))
# raise ValueError('Imaginary component {}'.format(m))
print('Imaginary component {}'.format(m))
covmean = covmean.real
tr_covmean = np.trace(covmean)
return diff.dot(diff) + np.trace(sigma1) + np.trace(sigma2) - 2 * tr_covmean
def calculate_activation_statistics(dataset, model, size=1000, batch_size=50, dims=2048):
act = get_activations(dataset, model, size, batch_size, dims, tools.device_name())
mu = np.mean(act, axis=0)
sigma = np.cov(act, rowvar=False)
return mu, sigma
| 42.514085
| 108
| 0.68047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,672
| 0.608249
|
a281d5a3c0cadb9b0e4f53931b714575ab5662a4
| 169
|
py
|
Python
|
test/test.py
|
ttkltll/fisher
|
8889705c7bde10304cfde7972b805226ac59d735
|
[
"MIT"
] | null | null | null |
test/test.py
|
ttkltll/fisher
|
8889705c7bde10304cfde7972b805226ac59d735
|
[
"MIT"
] | 3
|
2020-09-15T23:37:18.000Z
|
2020-09-16T00:36:55.000Z
|
test/test.py
|
ttkltll/fisher
|
8889705c7bde10304cfde7972b805226ac59d735
|
[
"MIT"
] | 1
|
2020-09-15T02:55:54.000Z
|
2020-09-15T02:55:54.000Z
|
from flask import Flask, current_app, request, Request
app = Flask(__name__)
ctx = app.app_context()
ctx.push()
current_app.static_floder = 'static'
ctx.pop()
app.run
| 16.9
| 54
| 0.751479
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 8
| 0.047337
|
a28375161ebc70272c07037bc6d8933c4916ada9
| 4,108
|
py
|
Python
|
augmentation.py
|
Pandoro/tools
|
631c6036cb74dc845668fd912588fd31aae46f8b
|
[
"MIT"
] | 1
|
2019-04-22T16:38:03.000Z
|
2019-04-22T16:38:03.000Z
|
augmentation.py
|
afcarl/tools-Pandoro
|
631c6036cb74dc845668fd912588fd31aae46f8b
|
[
"MIT"
] | 2
|
2018-03-13T10:49:48.000Z
|
2018-03-13T10:54:01.000Z
|
augmentation.py
|
afcarl/tools-Pandoro
|
631c6036cb74dc845668fd912588fd31aae46f8b
|
[
"MIT"
] | 2
|
2018-03-08T19:40:10.000Z
|
2018-06-11T14:43:49.000Z
|
import scipy.ndimage
import cv2
import numpy as np
class flip_augmentation(object):
def __init__(self, chance=0.5, image_x_axis=-1, semantics_x_axis=-1):
'''
The standard parameters will flip the image along the x axis,
assuming a 'bc01' format for the image and a 'b01' format for
the semantic image.
'''
self.chance=chance
self.image_x_axis=image_x_axis
self.semantics_x_axis=semantics_x_axis
def apply(self, image, semantic_image):
if np.random.uniform() < self.chance:
return (np.swapaxes(np.swapaxes(image, 0, self.image_x_axis)[::-1], 0, self.image_x_axis),
np.swapaxes(np.swapaxes(semantic_image, 0, self.semantics_x_axis)[::-1], 0, self.semantics_x_axis))
else:
return image, semantic_image
class scale_augmentation(object):
def __init__(self, min_scale=1.2, max_scale=1.2,image_axis_y_x=[-2, -1], semantic_axis_y_x=[-2, -1], depth_axes=0, depth_channels=None):
'''
max_x/max_y speficy the maximum scaling factor. The minimum corresponds to 1.0/max.
the axis specify which axis to use as y and x axis.
The depth axes and channels specify which channels in which axes could represent depth,
as these need to be treated differently.
'''
self.scale_min = np.log(1./min_scale)/np.log(2.0)
self.scale_max = np.log(max_scale)/np.log(2.0)
self.image_axis_y_x = image_axis_y_x
self.semantic_axis_y_x = semantic_axis_y_x
self.depth_axes = depth_axes
self.depth_channels = depth_channels
def apply(self, image, semantic_image):
s = np.power(2.0,np.random.uniform(low=self.scale_min, high=self.scale_max))
zoom_im = np.ones(len(image.shape))
zoom_im[self.image_axis_y_x] = [s,s]
im = scipy.ndimage.interpolation.zoom(image, zoom=zoom_im, order=0)
# if there are depth channels, we devide by the scaling factor.
if self.depth_channels is not None:
np.swapaxes(np.swapaxes(im, 0, self.depth_axes)[depth_channels]/s, 0, self.depth_axes)
zoom_ta = np.ones(len(semantic_image.shape))
zoom_ta[self.semantic_axis_y_x] = s, s
ta = scipy.ndimage.interpolation.zoom(semantic_image, zoom=zoom_ta, order=0)
return im, ta
class pca_color_augmentation(object):
def __init__(self, sigma=0.1, color_axis=1, color_channels=[0,1,2]):
'''
color_axis represents the color_axis in the images when the augmentation is applied.
This is not considered for the training step. The color_channels parameter is used
in both cases though and should be consistent!
'''
self.sigma=sigma
self.color_axis=color_axis
self.color_channels=color_channels
def train(self, color_images, color_axis):
'''
Expects a list of color images, each having 3 dimensions. The color axis needs to be specified.
'''
self.d = len(self.color_channels)
pixel_counts = [np.prod(np.swapaxes(im,color_axis, -1).shape[0:2]) for im in color_images]
pixels=np.zeros((np.sum(pixel_counts), self.d), dtype=np.float32)
count = 0
for im, current_count in zip(color_images, pixel_counts):
pixels[count:(count+current_count)] = np.swapaxes(im,color_axis, -1).reshape(current_count, -1)[:, self.color_channels]
count += current_count
self.data_mean = np.mean(pixels, 0)
pixels = pixels - self.data_mean
self.covariance = np.dot(pixels.T, pixels)/pixels.shape[0]
self.u,s,v = np.linalg.svd(self.covariance)
self.ev = np.sqrt(s)
def apply(self, image, semantic_image):
color_noise =np.dot(self.u, np.random.normal(0.0, self.sigma, self.d)*self.ev)
augmented = np.swapaxes(image, self.color_axis, -1).astype(np.float32).copy()
augmented[..., self.color_channels] += color_noise
augmented = np.swapaxes(augmented, self.color_axis, -1)
return augmented, semantic_image
| 42.791667
| 140
| 0.657741
| 4,048
| 0.985394
| 0
| 0
| 0
| 0
| 0
| 0
| 932
| 0.226874
|
a2840316fe01ccb59fbb68f41487073e6d6d5fcd
| 9,653
|
py
|
Python
|
src/config/utils/db-loader/contrail_db_loader/resources/security_group.py
|
hamzazafar/contrail-controller
|
67df90fa2d9d10263cf507c2751171c4e52f10dd
|
[
"Apache-2.0"
] | 1
|
2020-04-16T20:34:55.000Z
|
2020-04-16T20:34:55.000Z
|
src/config/utils/db-loader/contrail_db_loader/resources/security_group.py
|
hamzazafar/contrail-controller
|
67df90fa2d9d10263cf507c2751171c4e52f10dd
|
[
"Apache-2.0"
] | null | null | null |
src/config/utils/db-loader/contrail_db_loader/resources/security_group.py
|
hamzazafar/contrail-controller
|
67df90fa2d9d10263cf507c2751171c4e52f10dd
|
[
"Apache-2.0"
] | 1
|
2020-11-20T06:49:58.000Z
|
2020-11-20T06:49:58.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
from __future__ import unicode_literals
from builtins import str
from builtins import range
import logging
from netaddr import IPNetwork
from random import randint, choice
import uuid
from .resource import Resource
from ..utils import timeit
logger = logging.getLogger(__name__)
class SecurityGroup(Resource):
_SG_ID_ALLOC_PATH = '/id/security-groups/id/'
_SG_ID_PADDING = 100000
_SG_ID_ALLOC_START_IDX = 8000000 + _SG_ID_PADDING
def __init__(self, db_manager, batch_size, zk_client, project_amount,
amount_per_project):
super(SecurityGroup, self).__init__(db_manager, batch_size, zk_client,
project_amount, amount_per_project)
self._sg_id_allocator = 0
@property
def type(self):
return 'security-group'
@property
def total_amount(self):
total_sg = (self._project_amount + self._amount_per_project *
self._project_amount)
total_acl = total_sg * 2
return total_sg + total_acl
@timeit(return_time_elapsed=True)
def create_resources(self, rules_per_sg):
sgs = []
with self._uuid_cf.batch(queue_size=self._batch_size) as uuid_batch,\
self._fqname_cf.batch(queue_size=self._batch_size) as \
fqname_batch:
for project_idx in range(self._project_amount):
fq_name = [
'default-domain',
'project-%d' % project_idx,
'default',
]
attr = {
'parent_type': 'project',
'security_group_id': self._SG_ID_ALLOC_START_IDX +
self._sg_id_allocator,
'security_group_entries': {
'policy_rule': [
self._get_rule(remote_sg=':'.join(fq_name)),
self._get_rule(ethertype='IPv6',
remote_sg=':'.join(fq_name)),
self._get_rule(direction='egress',
ethertype='IPv4',
remote_ip='0.0.0.0/0'),
self._get_rule(direction='egress',
ethertype='IPv6',
remote_ip='::/0'),
],
},
}
id_str = "%(#)010d" % {'#': self._SG_ID_PADDING +
self._sg_id_allocator}
self._zk_client.create_node(self._SG_ID_ALLOC_PATH + id_str)
sgs.append(self._create_resource('security_group',
fq_name, attr, uuid_batch,
fqname_batch))
self._sg_id_allocator += 1
for resource_idx in range(self._amount_per_project):
fq_name = [
'default-domain',
'project-%d' % project_idx,
'security-group-%d' % resource_idx,
]
policy_rule = []
for _ in range(rules_per_sg):
random_port = randint(0, 65535)
policy_rule.append(
self._get_rule(
protocol=choice(['udp', 'tcp']),
remote_ip='0.0.0.0/0',
dst_ports=(random_port, random_port)
)
)
attr = {
'parent_type': 'project',
'security_group_id': self._SG_ID_ALLOC_START_IDX +
self._sg_id_allocator,
'security_group_entries': {
'policy_rule': policy_rule,
},
}
id_str = "%(#)010d" % {'#': self._SG_ID_PADDING +
self._sg_id_allocator}
self._zk_client.create_node(self._SG_ID_ALLOC_PATH +
id_str)
sgs.append(self._create_resource('security_group',
fq_name, attr, uuid_batch,
fqname_batch))
self._sg_id_allocator += 1
with self._uuid_cf.batch(queue_size=self._batch_size) as uuid_batch,\
self._fqname_cf.batch(queue_size=self._batch_size) as \
fqname_batch:
for sg in sgs:
ingress, egress = self._policy_rule_to_acl_rule(
sg['security_group_id'],
sg['security_group_entries']['policy_rule'])
fq_name = sg['fq_name'] + ['ingress-access-control-list']
attr = {
'parent_type': 'security-group',
'access_control_list_entries': {
'dynamic': None,
'acl_rule': ingress,
},
}
self._create_resource('access_control_list', fq_name, attr,
uuid_batch, fqname_batch)
fq_name = sg['fq_name'] + ['egress-access-control-list']
attr = {
'parent_type': 'security-group',
'access_control_list_entries': {
'dynamic': None,
'acl_rule': egress,
},
}
self._create_resource('access_control_list', fq_name, attr,
uuid_batch, fqname_batch)
def _policy_rule_to_acl_rule(self, sg_id, prules):
ingress = []
egress = []
for prule in prules:
if prule['src_addresses'][0]['security_group']:
src_sg = sg_id
else:
src_sg = None
if prule['dst_addresses'][0]['security_group']:
dst_sg = sg_id
else:
dst_sg = None
arule = {
'rule_uuid': prule['rule_uuid'],
'match_condition': {
'ethertype': prule['ethertype'],
'src_address': {
'security_group': src_sg,
'subnet': prule['src_addresses'][0]['subnet'],
'virtual_network': None,
'subnet_list': [],
'network_policy': None,
},
'dst_address': {
'security_group': dst_sg,
'subnet': prule['dst_addresses'][0]['subnet'],
'virtual_network': None,
'subnet_list': [],
'network_policy': None,
},
'protocol': prule['protocol'],
'src_port': prule['src_ports'][0],
'dst_port': prule['dst_ports'][0],
},
'action_list': {
'gateway_name': None,
'log': False,
'alert': False,
'assign_routing_instance': None,
'mirror_to': None,
'simple_action': 'pass',
'apply_service': [],
},
}
if (arule['match_condition']['src_address']['security_group'] or
arule['match_condition']['src_address']['subnet']):
ingress.append(arule)
else:
egress.append(arule)
return (ingress, egress)
def _get_rule(self, direction='ingress', ethertype='IPv4', protocol='any',
remote_sg=None, remote_ip=None, src_ports=(0, 65535),
dst_ports=(0, 65535)):
if remote_ip:
ip = IPNetwork(remote_ip)
remote_ip_map = {
'ip_prefix': str(ip.ip),
'ip_prefix_len': ip.prefixlen
}
else:
remote_ip_map = None
return {
'rule_uuid': str(uuid.uuid4()),
'direction': '>',
'ethertype': ethertype,
'protocol': protocol,
'action_list': None,
'application': [],
'rule_sequence': None,
'src_addresses': [{
'security_group':
remote_sg if direction == 'ingress' else 'local',
'subnet': remote_ip_map if direction == 'ingress' else None,
'virtual_network': None,
'subnet_list': [],
'network_policy': None,
}],
'dst_addresses': [{
'security_group':
remote_sg if direction == 'egress' else 'local',
'subnet': remote_ip_map if direction == 'egress' else None,
'virtual_network': None,
'subnet_list': [],
'network_policy': None,
}],
'src_ports': [{
'start_port': src_ports[0],
'end_port': src_ports[1],
}],
'dst_ports': [{
'start_port': dst_ports[0],
'end_port': dst_ports[1],
}],
}
| 40.389121
| 79
| 0.443489
| 9,275
| 0.960841
| 0
| 0
| 5,127
| 0.53113
| 0
| 0
| 1,842
| 0.190822
|
a28511d4313faddcec24e963d4aea4b50f61ce85
| 135
|
py
|
Python
|
sopy/admin/__init__.py
|
AlexFrazer/sopython-site
|
4ede64cf6d04def596be13feeaa4d84ce8503ef3
|
[
"BSD-3-Clause"
] | 81
|
2015-02-17T17:07:27.000Z
|
2021-08-15T17:46:13.000Z
|
sopy/admin/__init__.py
|
AlexFrazer/sopython-site
|
4ede64cf6d04def596be13feeaa4d84ce8503ef3
|
[
"BSD-3-Clause"
] | 81
|
2015-02-17T17:04:16.000Z
|
2021-02-21T03:52:55.000Z
|
sopy/admin/__init__.py
|
AlexFrazer/sopython-site
|
4ede64cf6d04def596be13feeaa4d84ce8503ef3
|
[
"BSD-3-Clause"
] | 29
|
2015-01-18T18:28:06.000Z
|
2022-02-05T03:11:04.000Z
|
from flask import Blueprint
bp = Blueprint('admin', __name__)
@bp.record_once
def register(state):
from sopy.admin import views
| 15
| 33
| 0.748148
| 0
| 0
| 0
| 0
| 69
| 0.511111
| 0
| 0
| 7
| 0.051852
|
a2856ec06ce72f7e0f5fc2a98ea631945b111855
| 1,790
|
py
|
Python
|
onmt/modules/extensions/fused_layer_norm/setup.py
|
quanpn90/NMTGMinor
|
0e5f989c8bc01c6c8dc3a8c1ce7c05bfd884b796
|
[
"MIT"
] | 75
|
2019-05-02T10:37:39.000Z
|
2022-02-13T17:53:24.000Z
|
onmt/modules/extensions/fused_layer_norm/setup.py
|
quanpn90/NMTGMinor
|
0e5f989c8bc01c6c8dc3a8c1ce7c05bfd884b796
|
[
"MIT"
] | 11
|
2018-11-08T16:52:51.000Z
|
2021-09-23T15:01:14.000Z
|
onmt/modules/extensions/fused_layer_norm/setup.py
|
quanpn90/NMTGMinor
|
0e5f989c8bc01c6c8dc3a8c1ce7c05bfd884b796
|
[
"MIT"
] | 34
|
2018-06-04T14:20:01.000Z
|
2022-01-26T08:10:05.000Z
|
import os
import torch
from torch.utils import cpp_extension
from setuptools import setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
# ninja build does not work unless include_dirs are abs path
this_dir = os.path.dirname(os.path.abspath(__file__))
cc_flag = []
ext_modules = []
cc_flag.append('-gencode')
cc_flag.append('arch=compute_75,code=sm_75')
cc_flag.append('-gencode')
cc_flag.append('arch=compute_80,cod =sm_80')
cc_flag.append('-gencode')
cc_flag.append('arch=compute_86,code=sm_86')
print("\n\ntorch.__version__ = {}\n\n".format(torch.__version__))
TORCH_MAJOR = int(torch.__version__.split('.')[0])
TORCH_MINOR = int(torch.__version__.split('.')[1])
version_ge_1_1 = []
if (TORCH_MAJOR > 1) or (TORCH_MAJOR == 1 and TORCH_MINOR > 0):
version_ge_1_1 = ['-DVERSION_GE_1_1']
version_ge_1_3 = []
if (TORCH_MAJOR > 1) or (TORCH_MAJOR == 1 and TORCH_MINOR > 2):
version_ge_1_3 = ['-DVERSION_GE_1_3']
version_ge_1_5 = []
if (TORCH_MAJOR > 1) or (TORCH_MAJOR == 1 and TORCH_MINOR > 4):
version_ge_1_5 = ['-DVERSION_GE_1_5']
version_dependent_macros = version_ge_1_1 + version_ge_1_3 + version_ge_1_5
ext_modules.append(
CUDAExtension(name='fused_layer_norm_cuda',
sources=['layer_norm_cuda.cpp',
'layer_norm_cuda_kernel.cu'],
extra_compile_args={'cxx': ['-O3'] + version_dependent_macros,
'nvcc':['-maxrregcount=50',
'-O3',
'--use_fast_math'] + version_dependent_macros}))
setup(
name="fused_layer_norm_cuda",
ext_modules=ext_modules,
cmdclass={"build_ext": BuildExtension},
)
| 37.291667
| 102
| 0.640782
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 428
| 0.239106
|
a285b6ae623657a020499a2ec4ea9b0765d78e0b
| 5,708
|
py
|
Python
|
expenses/migrations/0001_initial.py
|
inducer/expensely
|
b88b830e466db63cce5acfcdb0269411c7b39358
|
[
"MIT",
"Unlicense"
] | 1
|
2021-07-02T02:03:09.000Z
|
2021-07-02T02:03:09.000Z
|
expenses/migrations/0001_initial.py
|
inducer/expensely
|
b88b830e466db63cce5acfcdb0269411c7b39358
|
[
"MIT",
"Unlicense"
] | null | null | null |
expenses/migrations/0001_initial.py
|
inducer/expensely
|
b88b830e466db63cce5acfcdb0269411c7b39358
|
[
"MIT",
"Unlicense"
] | 2
|
2016-08-24T05:25:57.000Z
|
2018-12-31T01:06:07.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-24 23:01
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Account',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=20)),
('name', models.CharField(max_length=200)),
('category', models.CharField(choices=[('fund', 'Funding source'), ('exp', 'Expenses'), ('other', 'Other')], max_length=10)),
],
options={
'ordering': ['symbol'],
},
),
migrations.CreateModel(
name='AccountGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Currency',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('symbol', models.CharField(max_length=10)),
('name', models.CharField(max_length=200)),
],
options={
'verbose_name_plural': 'currencies',
},
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('valid_date', models.DateField(default=django.utils.timezone.now)),
('description', models.CharField(max_length=200)),
('create_date', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'ordering': ['valid_date', 'description'],
'verbose_name_plural': 'entries',
},
),
migrations.CreateModel(
name='EntryCategory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
options={
'verbose_name_plural': 'entry categories',
},
),
migrations.CreateModel(
name='EntryComment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_date', models.DateTimeField(default=django.utils.timezone.now)),
('comment', models.TextField()),
('creator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='expenses.Entry')),
],
),
migrations.CreateModel(
name='EntryComponent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(decimal_places=2, max_digits=19)),
('account', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entry_components', to='expenses.Account')),
('entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='components', to='expenses.Entry')),
],
options={
'ordering': ['amount'],
},
),
migrations.CreateModel(
name='EntryValidation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_date', models.DateTimeField(default=django.utils.timezone.now)),
('comments', models.TextField(blank=True, null=True)),
('entry_component', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='expenses.EntryComponent')),
('validator', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='entry',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='expenses.EntryCategory'),
),
migrations.AddField(
model_name='entry',
name='creator',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='account',
name='currency',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='expenses.Currency'),
),
migrations.AddField(
model_name='account',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='expenses.AccountGroup'),
),
migrations.AddField(
model_name='account',
name='guardian',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 43.572519
| 148
| 0.576384
| 5,455
| 0.955676
| 0
| 0
| 0
| 0
| 0
| 0
| 920
| 0.161177
|
a285bdaafcb56b79447200be9f8737064e66fac5
| 800
|
py
|
Python
|
pages/themes/ParallelProgramming-Lecture4/examples/processQueue/TASK_sharing_variable_by_processes.py
|
WWWCourses/PythonCourseNetIT-Slides
|
78dbb5eb7695cc64042b71a1911d4ef3feddb074
|
[
"MIT"
] | null | null | null |
pages/themes/ParallelProgramming-Lecture4/examples/processQueue/TASK_sharing_variable_by_processes.py
|
WWWCourses/PythonCourseNetIT-Slides
|
78dbb5eb7695cc64042b71a1911d4ef3feddb074
|
[
"MIT"
] | null | null | null |
pages/themes/ParallelProgramming-Lecture4/examples/processQueue/TASK_sharing_variable_by_processes.py
|
WWWCourses/PythonCourseNetIT-Slides
|
78dbb5eb7695cc64042b71a1911d4ef3feddb074
|
[
"MIT"
] | null | null | null |
"""ЗАДАЧА:
Разгледайте дадения по-долу код и направете необходимите промени,
така че след приключването на двата процеса променливата x да има стойност 20.
Използвайте multiprocessing.Queue() за да обмените текущата стойност на x между процесите.
"""
import multiprocessing as mp
def increment(r):
global x
for _ in r:
x+=1
print(f"x in {mp.current_process().name}: {x}")
if __name__ == "__main__":
x = 0
incr_count = 10
# create and start 2 process which should increment a variable:
pr1 = mp.Process(target=increment, args=(range(incr_count),))
pr2 = mp.Process(target=increment, args=(range(incr_count),))
pr1.start(); pr2.start()
# wait processes to finish
pr1.join();pr2.join()
print(f"x in {mp.current_process().name}: {x}")
#Очакван изход
# x in Main Process: 20
| 23.529412
| 91
| 0.71625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 662
| 0.668012
|
a28631f9170fbf0128fb181d7e9585c79cf0e573
| 241
|
py
|
Python
|
pythonProject/02al28pass_elipsis_placeholders/exercicio_num_int.py
|
D-Wolter/PycharmProjects
|
c8d6144efa30261bff72a3e0414a0d80f6730f9b
|
[
"MIT"
] | null | null | null |
pythonProject/02al28pass_elipsis_placeholders/exercicio_num_int.py
|
D-Wolter/PycharmProjects
|
c8d6144efa30261bff72a3e0414a0d80f6730f9b
|
[
"MIT"
] | null | null | null |
pythonProject/02al28pass_elipsis_placeholders/exercicio_num_int.py
|
D-Wolter/PycharmProjects
|
c8d6144efa30261bff72a3e0414a0d80f6730f9b
|
[
"MIT"
] | null | null | null |
numero_int = input('Digite um numero inteiro')
if numero_int.isdigit():
numero_int = int(numero_int)
if numero_int % 2 == 0:
print('o numero e par')
elif numero_int % 1 == 0:
print('o numero e impar')
else:
| 21.909091
| 46
| 0.605809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 60
| 0.248963
|
a286ecdd87da9c3a2db9af7dec80faeeeab6de6c
| 327
|
py
|
Python
|
Ejercicio_DecimalBinario.py
|
Sofia1306/Python_Clases
|
60bfab6425269b572ec738abcb5f96d74fc56f95
|
[
"MIT"
] | null | null | null |
Ejercicio_DecimalBinario.py
|
Sofia1306/Python_Clases
|
60bfab6425269b572ec738abcb5f96d74fc56f95
|
[
"MIT"
] | null | null | null |
Ejercicio_DecimalBinario.py
|
Sofia1306/Python_Clases
|
60bfab6425269b572ec738abcb5f96d74fc56f95
|
[
"MIT"
] | null | null | null |
"""Ejercicio Decimal a Binario """
import math
numero = int(input('Ingresa un número: \n'))
binario = ''
while (numero > 0):
if (numero%2 == 0):
binario = '0' + binario
else:
binario = '1' + binario
numero = int(math.floor(numero/2))
print(f'El número en binario es {binario}')
| 19.235294
| 45
| 0.562691
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 103
| 0.31307
|
a288d8b6411de0a207c959a000823b29df69e32d
| 743
|
py
|
Python
|
src/server.py
|
awsassets/superfish
|
77d93ec864de22b592bc4b69aa5ab7580aa383ab
|
[
"MIT"
] | null | null | null |
src/server.py
|
awsassets/superfish
|
77d93ec864de22b592bc4b69aa5ab7580aa383ab
|
[
"MIT"
] | null | null | null |
src/server.py
|
awsassets/superfish
|
77d93ec864de22b592bc4b69aa5ab7580aa383ab
|
[
"MIT"
] | null | null | null |
import flask ; from flask import *
def Serve(email_form, password_form, rd, dic, host="0.0.0.0", port="8080"):
app = Flask(__name__, template_folder="../clone")
# login storage
class Login:
email = ""
pwd = ""
ip = ""
# forms
@app.get("/")
def index():
return render_template("index.html")
@app.post("/login")
def login():
Login.ip = request.remote_addr
Login.email = request.form.get(email_form)
Login.pwd = request.form.get(password_form)
ouputfunc = dic["func"]
res = dic["res"]
ouputfunc(res=res, Login=Login)
return flask.redirect(rd)
print("\n-= Flask Logs =-")
app.run(host=host, port=port)
| 24.766667
| 75
| 0.561238
| 64
| 0.086137
| 0
| 0
| 393
| 0.528937
| 0
| 0
| 107
| 0.144011
|
a28a52e59294caa6c7f0ce984c5ca19e80db8e8f
| 152
|
py
|
Python
|
block/admin.py
|
amirkh75/user_block_chain
|
f9bdba11c1d8b724787151480cd52155ad8718e4
|
[
"MIT"
] | null | null | null |
block/admin.py
|
amirkh75/user_block_chain
|
f9bdba11c1d8b724787151480cd52155ad8718e4
|
[
"MIT"
] | null | null | null |
block/admin.py
|
amirkh75/user_block_chain
|
f9bdba11c1d8b724787151480cd52155ad8718e4
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Block
@admin.register(Block)
class ActionAdmin(admin.ModelAdmin):
list_filter = ('created',)
| 19
| 36
| 0.763158
| 67
| 0.440789
| 0
| 0
| 90
| 0.592105
| 0
| 0
| 9
| 0.059211
|
a28aac04289f6912a4237acfbf9626f8b5f787ef
| 593
|
py
|
Python
|
SOURCE/test_ex01.py
|
PinkPhayate/Information_Access_Design
|
a6ae3b055e971708d67fda7129e51cd0d9b16d2f
|
[
"MIT"
] | null | null | null |
SOURCE/test_ex01.py
|
PinkPhayate/Information_Access_Design
|
a6ae3b055e971708d67fda7129e51cd0d9b16d2f
|
[
"MIT"
] | null | null | null |
SOURCE/test_ex01.py
|
PinkPhayate/Information_Access_Design
|
a6ae3b055e971708d67fda7129e51cd0d9b16d2f
|
[
"MIT"
] | null | null | null |
import re,io,os.path,os
def remove_tag(str):
alldigit = re.compile(r"^<.+")
if alldigit.search(str) != None:
return False
return True
for line in open('./../text_list', "r"):
filename = './../TXT/tragedies/'+line.rstrip()
print filename
f = open("./../TXT/test_"+line.rstrip(),"w")
for line in io.open(filename,"r",encoding="utf-16"):
if remove_tag(line):
# remove signiture
line = re.sub(re.compile("[!-/:-@[-`{-~;?]"),"", line).rstrip()
# print line
f.write(line.encode('utf-8'))
f.close()
| 25.782609
| 75
| 0.53457
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 134
| 0.22597
|
a28c65ff15ac6df969c2d1a4bd260f0f3974490e
| 1,736
|
py
|
Python
|
lumiml/examples/test_install.py
|
ovra-peers/lumiml
|
8df5f591edacb36c473b6e09f35da8098754b2e8
|
[
"BSD-3-Clause"
] | 4
|
2019-01-29T12:02:02.000Z
|
2019-12-26T11:12:32.000Z
|
lumiml/examples/test_install.py
|
ovra-peers/lumiml
|
8df5f591edacb36c473b6e09f35da8098754b2e8
|
[
"BSD-3-Clause"
] | null | null | null |
lumiml/examples/test_install.py
|
ovra-peers/lumiml
|
8df5f591edacb36c473b6e09f35da8098754b2e8
|
[
"BSD-3-Clause"
] | 4
|
2019-12-26T08:22:38.000Z
|
2020-10-07T09:37:12.000Z
|
def TestInstall():
from ..base import DeltaBasisFeatures, TriangBasisFeatures, HistBasisFeatures
from ..models import PoissonElasticNet
from ..model_selection import PoissonElasticNetCV
from ..simulator import StretchedExponentialDistribution, Simulator
import numpy as np
# generate st_exp distribution
gamma_eval = np.logspace(-2.5,1,500)
timeVec = np.linspace(-30, 1000, 10000)
#### simulator params ###
bcg_mean = 100
snr = 1e4
## dist params
bww = 0.5
tww = 5
st_exp = StretchedExponentialDistribution(beta_kww=bww, gamma_eval=gamma_eval, n_sum_terms=200, tau_kww=tww)
sim = Simulator(distribution=st_exp, time_scale=timeVec, background_mean=bcg_mean, snr=snr)
# simulate the streched exponential distribution
sim.simulate_data()
dbf = DeltaBasisFeatures(g_min=gamma_eval[0], g_max=gamma_eval[-1], omega=2*np.pi,with_bias=False)
dbf.fit()
_filter = sim.time_scale >= 0
t = sim.time_scale[_filter].copy()
y = sim.data_simulated.simulated[_filter].copy()
X = dbf.fit_transform(t[:, np.newaxis])
penet = PoissonElasticNet(
alpha=1e-8,
fix_intercept=True,
intercept_guess=bcg_mean,
max_iter=1
)
penet_cv = PoissonElasticNetCV(
estimator=penet,
param_grid={'alpha': np.logspace(-9, -5, 31)},
cv=3,
verbose=1,
n_jobs=2
)
penet_cv.fit(X, y)
print(penet_cv.best_estimator_.coef_)
return None
if __name__ == '__main__':
try:
TestInstall()
except Exception as e:
print(e);
print('Something is wrong with installation! Please read the error message carefuly to try and resolve it.')
| 24.450704
| 116
| 0.663594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 235
| 0.135369
|
a28d85267ddf700f8793d60f25330f1799660aba
| 422
|
py
|
Python
|
urllib/Cookie/CookieServer.py
|
pengchenyu111/SpiderLearning
|
d1fca1c7f46bfb22ad23f9396d0f2e2301ec4534
|
[
"Apache-2.0"
] | 3
|
2020-11-21T13:13:46.000Z
|
2020-12-03T05:43:32.000Z
|
urllib/Cookie/CookieServer.py
|
pengchenyu111/SpiderLearning
|
d1fca1c7f46bfb22ad23f9396d0f2e2301ec4534
|
[
"Apache-2.0"
] | null | null | null |
urllib/Cookie/CookieServer.py
|
pengchenyu111/SpiderLearning
|
d1fca1c7f46bfb22ad23f9396d0f2e2301ec4534
|
[
"Apache-2.0"
] | 1
|
2020-12-03T05:43:53.000Z
|
2020-12-03T05:43:53.000Z
|
from flask import Flask
from flask import request
app = Flask(__name__)
@app.route("/readCookie")
def readCookie():
print(request.cookies)
print(request.cookies.get('MyCookie'))
return "hello world"
@app.route("/writeCookie")
def writeCookie():
response = app.make_response('write cookie')
response.set_cookie("id", value="12345678")
return response
if __name__ == '__main__':
app.run()
| 18.347826
| 48
| 0.694313
| 0
| 0
| 0
| 0
| 300
| 0.7109
| 0
| 0
| 88
| 0.208531
|
a28eb678ba5f89d1bb90f58b1a3981298261532f
| 3,691
|
py
|
Python
|
Aihan-Liu-Individual-project/Code/demo.py
|
laihanel/Final-Project-Group3
|
e58cd526d8e26ee6b13b5a77af6ebcc1ff7e77ca
|
[
"MIT"
] | null | null | null |
Aihan-Liu-Individual-project/Code/demo.py
|
laihanel/Final-Project-Group3
|
e58cd526d8e26ee6b13b5a77af6ebcc1ff7e77ca
|
[
"MIT"
] | 8
|
2021-11-11T02:52:41.000Z
|
2021-12-05T23:01:05.000Z
|
Code/demo.py
|
laihanel/Final-Project-Group3
|
e58cd526d8e26ee6b13b5a77af6ebcc1ff7e77ca
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import numpy as np
import cv2
import os
import shutil
from matplotlib import pyplot as plt
from Model_Definition import VC3D
from mypath import NICKNAME, DATA_DIR, PATH
# TODO: Now can display images with plt.show(), need to solve display on cloud instance
OUT_DIR = PATH + os.path.sep + 'Result'
DEMO_DIR = PATH + os.path.sep + 'Demo'
# %%
def check_folder_exist(folder_name):
if os.path.exists(folder_name):
shutil.rmtree(folder_name)
os.makedirs(folder_name)
else:
os.makedirs(folder_name)
check_folder_exist(OUT_DIR)
# %%
def center_crop(frame):
frame = frame[:120, 22:142, :]
return np.array(frame).astype(np.uint8)
# %%
def main():
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print("Device being used:", device)
with open('ucf_9_labels.txt', 'r') as f:
class_names = f.readlines()
f.close()
# init model
model = VC3D()
checkpoint = torch.load(f'model_{NICKNAME}.pt', map_location=device)
model.load_state_dict(checkpoint)
model.to(device)
model.eval()
# read video
video_name = 'PlayingGuitar'
video = DATA_DIR + os.path.sep + video_name + os.path.sep + 'v_' + video_name + '_g09_c04.avi'
# video = DEMO_DIR + os.path.sep + video_name + '.mp4'
cap = cv2.VideoCapture(video)
retaining = True
fps = int(cap.get(5))
size = (int(cap.get(3)),
int(cap.get(4)))
fourcc = int(cap.get(6))
frames_num = cap.get(7)
print('Video Readed, with fps %s, size %s and format %s' % (fps, size,
chr(fourcc & 0xFF) + chr((fourcc >> 8) & 0xFF) + chr(
(fourcc >> 16) & 0xFF) + chr(
(fourcc >> 24) & 0xFF)))
out = cv2.VideoWriter(os.path.join(OUT_DIR, video_name + '_result.mp4'), 1983148141, fps, size)
clip = []
count = 0
while retaining:
count += 1
retaining, frame = cap.read()
if not retaining and frame is None:
continue
tmp_ = center_crop(cv2.resize(frame, (171, 128)))
tmp = tmp_ - np.array([[[90.0, 98.0, 102.0]]])
clip.append(tmp)
if len(clip) == 16:
inputs = np.array(clip).astype(np.float32)
inputs = np.expand_dims(inputs, axis=0)
inputs = np.transpose(inputs, (0, 4, 1, 2, 3))
inputs = torch.from_numpy(inputs)
inputs = torch.autograd.Variable(inputs, requires_grad=False).to(device)
with torch.no_grad():
outputs = model.forward(inputs)
probs = nn.Softmax(dim=1)(outputs)
label = torch.max(probs, 1)[1].detach().cpu().numpy()[0]
cv2.putText(frame, class_names[label].split(' ')[-1].strip(), (20, 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.6,
(0, 0, 255), 1)
cv2.putText(frame, "prob: %.4f" % probs[0][label], (20, 40),
cv2.FONT_HERSHEY_SIMPLEX, 0.6,
(0, 0, 255), 1)
out.write(frame)
clip.pop(0)
if count % 10 == 0:
print(str(count / frames_num * 100) + '%')
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# cv2.imshow('result', frame)
# cv2.waitKey(30)
# plt.imshow(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
# plt.title('result')
# plt.show()
out.release()
cap.release()
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| 32.955357
| 117
| 0.544568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 526
| 0.142509
|
a28f7b4918b94b07856ae26b2413470f943cc99a
| 123
|
py
|
Python
|
remove_punctuations.py
|
sparemeyoursoliloquy/Python-Exercises
|
18f2075327dec0dbc55edd4f50fa3f71258777e1
|
[
"MIT"
] | 3
|
2020-07-28T03:31:27.000Z
|
2020-07-28T03:31:32.000Z
|
remove_punctuations.py
|
sparemeyoursoliloquy/Python-Exercises
|
18f2075327dec0dbc55edd4f50fa3f71258777e1
|
[
"MIT"
] | null | null | null |
remove_punctuations.py
|
sparemeyoursoliloquy/Python-Exercises
|
18f2075327dec0dbc55edd4f50fa3f71258777e1
|
[
"MIT"
] | null | null | null |
text = input()
punc_remove = [",", ".", "!", "?"]
for i in punc_remove:
text = text.replace(i, "")
print(text.lower())
| 20.5
| 34
| 0.544715
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 14
| 0.113821
|
a29166d0430486b39f985f973d6999d2da3a0aae
| 5,519
|
py
|
Python
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/multisalesforce/views.py
|
oddbird/sfdo-template
|
ac128ca5b2db18d3069a1535cb6ac23f83aa987f
|
[
"BSD-3-Clause"
] | 3
|
2018-08-23T18:59:59.000Z
|
2021-05-25T00:05:52.000Z
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/multisalesforce/views.py
|
oddbird/sfdo-template
|
ac128ca5b2db18d3069a1535cb6ac23f83aa987f
|
[
"BSD-3-Clause"
] | 9
|
2018-09-28T21:30:35.000Z
|
2020-08-10T20:42:34.000Z
|
{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/multisalesforce/views.py
|
oddbird/sfdo-template
|
ac128ca5b2db18d3069a1535cb6ac23f83aa987f
|
[
"BSD-3-Clause"
] | 2
|
2019-03-28T05:03:08.000Z
|
2019-05-05T18:10:30.000Z
|
import logging
import re
import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2CallbackView,
OAuth2LoginView,
)
from allauth.socialaccount.providers.salesforce.views import (
SalesforceOAuth2Adapter as SalesforceOAuth2BaseAdapter,
)
from allauth.utils import get_request_param
from django.core.exceptions import SuspiciousOperation
from sfdo_template_helpers.crypto import fernet_decrypt, fernet_encrypt
from ..api.constants import ORGANIZATION_DETAILS
from .provider import (
SalesforceCustomProvider,
SalesforceProductionProvider,
SalesforceTestProvider,
)
logger = logging.getLogger(__name__)
ORGID_RE = re.compile(r"^00D[a-zA-Z0-9]{15}$")
CUSTOM_DOMAIN_RE = re.compile(r"^[a-zA-Z0-9.-]+$")
class SalesforcePermissionsError(Exception):
pass
class SalesforceOAuth2Mixin:
def get_org_details(self, extra_data, token):
headers = {"Authorization": f"Bearer {token}"}
# Confirm canModifyAllData:
org_info_url = (extra_data["urls"]["rest"] + "connect/organization").format(
version="44.0"
)
resp = requests.get(org_info_url, headers=headers)
resp.raise_for_status()
# Also contains resp.json()["name"], but not ["type"], so it's
# insufficient to just call this endpoint.
if not resp.json()["userSettings"]["canModifyAllData"]: # pragma: nocover
raise SalesforcePermissionsError
# Get org name and type:
org_id = extra_data["organization_id"]
self._validate_org_id(org_id)
org_url = (extra_data["urls"]["sobjects"] + "Organization/{org_id}").format(
version="44.0", org_id=org_id
)
resp = requests.get(org_url, headers=headers)
resp.raise_for_status()
return resp.json()
def complete_login(self, request, app, token, **kwargs):
token = fernet_decrypt(token.token)
headers = {"Authorization": f"Bearer {token}"}
verifier = request.session["socialaccount_state"][1]
logger.info(
"Calling back to Salesforce to complete login.",
extra={"tag": "oauth", "context": {"verifier": verifier}},
)
resp = requests.get(self.userinfo_url, headers=headers)
resp.raise_for_status()
extra_data = resp.json()
instance_url = kwargs.get("response", {}).get("instance_url", None)
ret = self.get_provider().sociallogin_from_response(request, extra_data)
ret.account.extra_data["instance_url"] = instance_url
try:
org_details = self.get_org_details(extra_data, token)
except (
requests.HTTPError,
KeyError,
SalesforcePermissionsError,
): # pragma: nocover
org_details = None
ret.account.extra_data[ORGANIZATION_DETAILS] = org_details
return ret
def parse_token(self, data):
"""Wrap OAuth2Base.parse_token to encrypt tokens for storage.
Called from OAuth2CallbackView"""
data["access_token"] = fernet_encrypt(data["access_token"])
data["refresh_token"] = fernet_encrypt(data["refresh_token"])
return super().parse_token(data)
def _validate_org_id(self, org_id):
if not ORGID_RE.match(org_id):
raise SuspiciousOperation("Invalid org Id")
class SalesforceOAuth2ProductionAdapter(
SalesforceOAuth2Mixin, SalesforceOAuth2BaseAdapter
):
provider_id = SalesforceProductionProvider.id
class SalesforceOAuth2SandboxAdapter(
SalesforceOAuth2Mixin, SalesforceOAuth2BaseAdapter
):
provider_id = SalesforceTestProvider.id
class SalesforceOAuth2CustomAdapter(SalesforceOAuth2Mixin, SalesforceOAuth2BaseAdapter):
provider_id = SalesforceCustomProvider.id
@property
def base_url(self):
custom_domain = self.request.GET.get(
"custom_domain", self.request.session.get("custom_domain")
)
if not CUSTOM_DOMAIN_RE.match(custom_domain):
raise SuspiciousOperation("Invalid custom domain")
self.request.session["custom_domain"] = custom_domain
return "https://{}.my.salesforce.com".format(custom_domain)
class LoggingOAuth2LoginView(OAuth2LoginView):
def dispatch(self, request, *args, **kwargs):
ret = super().dispatch(request, *args, **kwargs)
verifier = request.session["socialaccount_state"][1]
logger.info(
"Dispatching OAuth login",
extra={"tag": "oauth", "context": {"verifier": verifier}},
)
return ret
class LoggingOAuth2CallbackView(OAuth2CallbackView):
def dispatch(self, request, *args, **kwargs):
verifier = get_request_param(request, "state")
logger.info(
"Dispatching OAuth callback",
extra={"tag": "oauth", "context": {"verifier": verifier}},
)
return super().dispatch(request, *args, **kwargs)
prod_oauth2_login = LoggingOAuth2LoginView.adapter_view(
SalesforceOAuth2ProductionAdapter
)
prod_oauth2_callback = LoggingOAuth2CallbackView.adapter_view(
SalesforceOAuth2ProductionAdapter
)
sandbox_oauth2_login = LoggingOAuth2LoginView.adapter_view(
SalesforceOAuth2SandboxAdapter
)
sandbox_oauth2_callback = LoggingOAuth2CallbackView.adapter_view(
SalesforceOAuth2SandboxAdapter
)
custom_oauth2_login = LoggingOAuth2LoginView.adapter_view(SalesforceOAuth2CustomAdapter)
custom_oauth2_callback = LoggingOAuth2CallbackView.adapter_view(
SalesforceOAuth2CustomAdapter
)
| 34.067901
| 88
| 0.694872
| 4,158
| 0.753397
| 0
| 0
| 407
| 0.073745
| 0
| 0
| 985
| 0.178474
|
a29207dc0a5cb4e063b1e7adbc8c0acc0f001bf3
| 475
|
py
|
Python
|
7_testing/autotest/student.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | null | null | null |
7_testing/autotest/student.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | 9
|
2019-11-21T13:12:47.000Z
|
2021-02-02T14:52:52.000Z
|
7_testing/autotest/student.py
|
ProGabe/teals
|
7ebf0b6e6f81d8a4c44baa7b5d3a9d95267ec1e3
|
[
"MIT"
] | 2
|
2021-01-25T03:38:30.000Z
|
2021-03-07T23:54:53.000Z
|
'''
Student: Dan Grecoe
Assignment: Homework 1
Submission of the first homework assignment. The assignment
was to create a python file with 2 functions
multiply - Takes two parameters x and y and returns the product
of the values provided.
noop - Takes 0 parameters and returns None
'''
def multiply(x, y):
return x * y
def noop():
#raise Exception("Bad things happened")
print("Dumb student program")
return None
| 22.619048
| 67
| 0.669474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 390
| 0.821053
|
a292f32feefb9582465a4d958817a596211378a8
| 31,533
|
py
|
Python
|
nova/tests/unit/virt/ec2/test_ec2.py
|
platform9/omni-devstack-fixes
|
bc94150974fe181840ab3c5d618fa5ce3db44805
|
[
"Apache-2.0"
] | null | null | null |
nova/tests/unit/virt/ec2/test_ec2.py
|
platform9/omni-devstack-fixes
|
bc94150974fe181840ab3c5d618fa5ce3db44805
|
[
"Apache-2.0"
] | 1
|
2020-03-03T13:53:23.000Z
|
2020-03-03T13:53:23.000Z
|
nova/tests/unit/virt/ec2/test_ec2.py
|
platform9/omni-devstack-fixes
|
bc94150974fe181840ab3c5d618fa5ce3db44805
|
[
"Apache-2.0"
] | 1
|
2020-09-03T20:54:21.000Z
|
2020-09-03T20:54:21.000Z
|
"""
Copyright 2016 Platform9 Systems Inc.
All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
import base64
import contextlib
import boto3
import mock
from moto import mock_ec2
from oslo_log import log as logging
from oslo_utils import uuidutils
from credsmgrclient.common.exceptions import HTTPBadGateway
from nova.compute import task_states
from nova import context
from nova import exception
from nova.image.glance import GlanceImageServiceV2
from nova import objects
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit import matchers
from nova.virt.ec2 import EC2Driver
LOG = logging.getLogger(__name__)
keypair_exist_response = {
'KeyPairs': [
{
'KeyName': 'fake_key',
'KeyFingerprint': 'fake_key_data'
},
{
'KeyName': 'fake_key1',
'KeyFingerprint': 'fake_key_data1'
}
]
}
def fake_get_password(*args, **kwargs):
return {'PasswordData': "Fake_encrypted_pass"}
class EC2DriverTestCase(test.NoDBTestCase):
@mock_ec2
def setUp(self):
super(EC2DriverTestCase, self).setUp()
self.fake_access_key = 'aws_access_key'
self.fake_secret_key = 'aws_secret_key'
self.region_name = 'us-east-1'
self.az = 'us-east-1a'
self.flags(access_key=self.fake_access_key,
secret_key=self.fake_secret_key,
# Region name cannot be fake
region_name=self.region_name,
az=self.az,
group='AWS')
self.flags(api_servers=['http://localhost:9292'], group='glance')
self.flags(transport_url='memory://')
self.conn = EC2Driver(None, False)
self.type_data = None
self.project_id = 'fake'
self.user_id = 'fake'
self.instance_node = None
self.uuid = None
self.instance = None
self.context = context.RequestContext(self.user_id, self.project_id)
self.fake_ec2_conn = boto3.client(
"ec2", aws_access_key_id=self.fake_access_key,
aws_secret_access_key=self.fake_secret_key,
region_name=self.region_name)
def tearDown(self):
super(EC2DriverTestCase, self).tearDown()
@mock_ec2
def reset(self):
instance_list = self.fake_ec2_conn.describe_instances()
# terminated instances are considered deleted and hence ignore them
instance_id_list = []
for reservation in instance_list['Reservations']:
instance = reservation['Instances'][0]
if instance['State']['Name'] != 'terminated':
instance_id_list.append(instance['InstanceId'])
if len(instance_id_list) > 0:
self.fake_ec2_conn.stop_instances(InstanceIds=instance_id_list,
Force=True)
self.fake_ec2_conn.terminate_instances(
InstanceIds=instance_id_list)
self.type_data = None
self.instance = None
self.uuid = None
self.instance_node = None
def _get_instance_flavor_details(self):
return {'memory_mb': 2048.0,
'root_gb': 0,
'deleted_at': None,
'name': 't2.small',
'deleted': 0,
'created_at': None,
'ephemeral_gb': 0,
'updated_at': None,
'disabled': False,
'vcpus': 1,
'extra_specs': {},
'swap': 0,
'rxtx_factor': 1.0,
'is_public': True,
'flavorid': '1',
'vcpu_weight': None,
'id': 2}
def get_bdm(self):
return {'/dev/sdf': {}, '/dev/sdg': {}, '/dev/sdh': {}, '/dev/sdi': {},
'/dev/sdj': {}, '/dev/sdk': {}, '/dev/sdl': {}, '/dev/sdm': {},
'/dev/sdn': {}, '/dev/sdo': {}, '/dev/sdp': {}}
def _create_instance(self, key_name=None, key_data=None, user_data=None,
metadata={}):
uuid = uuidutils.generate_uuid()
self.type_data = self._get_instance_flavor_details()
values = {'name': 'fake_instance',
'id': 1,
'uuid': uuid,
'project_id': self.project_id,
'user_id': self.user_id,
'kernel_id': 'fake_kernel_id',
'ramdisk_id': 'fake_ramdisk_id',
'flavor': objects.flavor.Flavor(**self.type_data),
'node': 'fake_node',
'memory_mb': self.type_data['memory_mb'],
'root_gb': self.type_data['root_gb'],
'ephemeral_gb': self.type_data['ephemeral_gb'],
'vpcus': self.type_data['vcpus'],
'swap': self.type_data['swap'],
'expected_attrs': ['system_metadata', 'metadata'],
'display_name': 'fake_instance',
'metadata': metadata}
if key_name and key_data:
values['key_name'] = key_name
values['key_data'] = key_data
if user_data:
values['user_data'] = user_data
self.instance_node = 'fake_node'
self.uuid = uuid
self.instance = fake_instance.fake_instance_obj(self.context, **values)
def _create_network(self):
self.vpc = self.fake_ec2_conn.create_vpc(CidrBlock='192.168.10.0/24')
self.subnet = self.fake_ec2_conn.create_subnet(
VpcId=self.vpc['Vpc']['VpcId'], CidrBlock='192.168.10.0/24',
AvailabilityZone=self.az)
self.subnet_id = self.subnet['Subnet']['SubnetId']
def _create_nova_vm(self):
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'get_password_data'),
) as (mock_password_data):
mock_password_data[0].side_effect = fake_get_password
self.conn.spawn(self.context, self.instance, None,
injected_files=[], admin_password=None,
network_info=None, block_device_info=None)
def _create_vm_in_aws_nova(self):
self._create_instance()
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (self.subnet_id, '192.168.10.5', None,
None, [])
mock_secgrp.return_value = []
self._create_nova_vm()
@mock_ec2
@mock.patch('nova.virt.ec2.credshelper._get_credsmgr_client')
def test_list_instances(self, mock_credsmgr_client):
for _ in range(0, 5):
self.fake_ec2_conn.run_instances(ImageId='ami-1234abc', MinCount=1,
MaxCount=1)
mock_credsmgr_client.side_effect = HTTPBadGateway()
fake_list = self.conn.list_instances()
self.assertEqual(5, len(fake_list))
self.reset()
@mock_ec2
def test_add_ssh_keys_key_exists(self):
fake_key = 'fake_key'
fake_key_data = 'abcdefgh'
self.fake_ec2_conn.import_key_pair(
KeyName=fake_key, PublicKeyMaterial=fake_key_data)
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'describe_key_pairs'),
mock.patch.object(self.fake_ec2_conn, 'import_key_pair'),
) as (fake_get, fake_import):
fake_get.return_value = keypair_exist_response
self.conn._add_ssh_keys(self.fake_ec2_conn, fake_key,
fake_key_data)
fake_get.assert_called_once_with(KeyNames=[fake_key])
fake_import.assert_not_called()
@mock_ec2
def test_add_ssh_keys_key_absent(self):
fake_key = 'fake_key'
fake_key_data = 'abcdefgh'
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'describe_key_pairs'),
mock.patch.object(self.fake_ec2_conn, 'import_key_pair'),
) as (fake_get, fake_import):
fake_get.return_value = {'KeyPairs': []}
self.conn._add_ssh_keys(self.fake_ec2_conn, fake_key,
fake_key_data)
fake_get.assert_called_once_with(KeyNames=[fake_key])
fake_import.assert_called_once_with(
KeyName=fake_key, PublicKeyMaterial=fake_key_data)
def test_process_network_info(self):
fake_network_info = [{
'profile': {},
'ovs_interfaceid': None,
'preserve_on_delete': False,
'network': {
'bridge': None,
'subnets': [{
'ips': [{'meta': {},
'version': 4,
'type': 'fixed',
'floating_ips': [],
'address': u'192.168.100.5'}],
'version': 4,
'meta': {},
'dns': [],
'routes': [],
'cidr': u'192.168.100.0/24',
'gateway': {'meta': {},
'version': 4,
'type': 'gateway',
'address': u'192.168.100.1'}}],
'meta': {'injected': True,
'tenant_id': '135b1a036a51414ea1f989ab59fefde5'},
'id': '4f8ad58d-de60-4b52-94ba-8b988a9b7f33',
'label': 'test'},
'devname': 'tapa9a90cf6-62',
'vnic_type': 'normal',
'qbh_params': None,
'meta': {},
'details': '{"subnet_id": "subnet-0107db5a",'
' "ip_address": "192.168.100.5",'
' "ec2_security_groups": ["sg-123456"]}',
'address': 'fa:16:3e:23:65:2c',
'active': True,
'type': 'vip_type_a',
'id': 'a9a90cf6-627c-46f3-829d-c5a2ae07aaf0',
'qbg_params': None
}]
aws_subnet_id, aws_fixed_ip, port_id, network_id, secgrps = \
self.conn._process_network_info(fake_network_info)
self.assertEqual(aws_subnet_id, 'subnet-0107db5a')
self.assertEqual(aws_fixed_ip, '192.168.100.5')
self.assertEqual(port_id, 'a9a90cf6-627c-46f3-829d-c5a2ae07aaf0')
self.assertEqual(network_id, '4f8ad58d-de60-4b52-94ba-8b988a9b7f33')
self.assertEqual(secgrps, ["sg-123456"])
@mock_ec2
def test_spawn(self):
self._create_instance()
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (self.subnet_id, '192.168.10.5', None,
None, [])
mock_secgrp.return_value = []
self._create_nova_vm()
fake_instances = self.fake_ec2_conn.describe_instances()
self.assertEqual(len(fake_instances['Reservations']), 1)
self.assertEqual(
len(fake_instances['Reservations'][0]['Instances']), 1)
inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(inst['VpcId'], self.vpc['Vpc']['VpcId'])
self.assertEqual(inst['SubnetId'], self.subnet_id)
self.assertEqual(inst['ImageId'], 'ami-1234abc')
self.assertEqual(inst['KeyName'], 'None')
self.assertEqual(inst['InstanceType'], 't2.small')
for tag in inst['Tags']:
if tag['Key'] == 'Name':
self.assertEqual(tag['Value'], 'fake_instance')
if tag['Key'] == "openstack_id":
self.assertEqual(tag['Value'], self.uuid)
self.reset()
@mock_ec2
def test_spawn_with_key(self):
self._create_instance(key_name='fake_key', key_data='fake_key_data')
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (self.subnet_id, '192.168.10.5', None,
None, [])
mock_secgrp.return_value = []
self._create_nova_vm()
fake_instances = self.fake_ec2_conn.describe_instances()
self.assertEqual(len(fake_instances['Reservations']), 1)
self.assertEqual(
len(fake_instances['Reservations'][0]['Instances']), 1)
inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(inst['KeyName'], 'fake_key')
self.reset()
@mock_ec2
def test_spawn_with_userdata(self):
userdata = '''
#cloud-config
password: password
'''
b64encoded = base64.b64encode(userdata)
self._create_instance(user_data=b64encoded)
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
mock.patch.object(EC2Driver, '_ec2_conn'),
) as (mock_image, mock_network, mock_secgrp, mock_ec2_conn):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (self.subnet_id, '192.168.10.5', None,
None, [])
mock_secgrp.return_value = []
mock_ec2_conn.return_value = self.fake_ec2_conn
fake_run_instance_op = self.fake_ec2_conn.run_instances(
ImageId='ami-1234abc', MaxCount=1, MinCount=1)
self.fake_ec2_conn.run_instances = mock.Mock()
self.fake_ec2_conn.run_instances.return_value = \
fake_run_instance_op
self._create_nova_vm()
fake_instances = self.fake_ec2_conn.describe_instances()
self.assertEqual(len(fake_instances['Reservations']), 1)
self.fake_ec2_conn.run_instances.assert_called_once_with(
InstanceType='t2.small', ImageId='ami-1234abc', MaxCount=1,
UserData=userdata, SubnetId=self.subnet_id, MinCount=1,
PrivateIpAddress='192.168.10.5', SecurityGroupIds=[])
self.reset()
@mock_ec2
def test_spawn_with_metadata(self):
metadata = {"key": "value"}
self._create_instance(metadata=metadata)
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
mock.patch.object(EC2Driver, '_ec2_conn'),
) as (mock_image, mock_network, mock_secgrp, mock_ec2_conn):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (self.subnet_id, '192.168.10.5', None,
None, [])
mock_secgrp.return_value = []
mock_ec2_conn.return_value = self.fake_ec2_conn
fake_run_instance_op = self.fake_ec2_conn.run_instances(
ImageId='ami-1234abc', MaxCount=1, MinCount=1)
self.fake_ec2_conn.run_instances = mock.Mock()
self.fake_ec2_conn.run_instances.return_value = \
fake_run_instance_op
self._create_nova_vm()
fake_instances = self.fake_ec2_conn.describe_instances()
self.assertEqual(len(fake_instances['Reservations']), 1)
self.fake_ec2_conn.run_instances.assert_called_once_with(
InstanceType='t2.small', ImageId='ami-1234abc',
SubnetId=self.subnet_id, PrivateIpAddress='192.168.10.5',
SecurityGroupIds=[], MaxCount=1, MinCount=1)
for reservation in fake_instances['Reservations']:
instance = reservation['Instances'][0]
for tag in instance['Tags']:
if tag['Key'] == 'key':
self.assertEqual(tag['Value'], 'value')
self.reset()
@mock_ec2
def test_spawn_with_network_error(self):
self._create_instance()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (None, None, None, None, [])
mock_secgrp.return_value = []
self.assertRaises(exception.BuildAbortException,
self._create_nova_vm)
self.reset()
@mock_ec2
def test_spawn_with_network_error_from_aws(self):
self._create_instance()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.return_value = 'ami-1234abc'
mock_network.return_value = (None, '192.168.10.5', None, None, [])
mock_secgrp.return_value = []
self.assertRaises(exception.BuildAbortException,
self._create_nova_vm)
self.reset()
@mock_ec2
def test_spawn_with_image_error(self):
self._create_instance()
self._create_network()
with contextlib.nested(
mock.patch.object(EC2Driver, '_get_image_ami_id_from_meta'),
mock.patch.object(EC2Driver, '_process_network_info'),
mock.patch.object(EC2Driver, '_get_instance_sec_grps'),
) as (mock_image, mock_network, mock_secgrp):
mock_image.side_effect = exception.BuildAbortException('fake')
mock_network.return_value = ('subnet-1234abc', '192.168.10.5',
None, None)
mock_secgrp.return_value = []
self.assertRaises(exception.BuildAbortException,
self._create_nova_vm)
self.reset()
@mock_ec2
def test_snapshot(self):
self._create_vm_in_aws_nova()
GlanceImageServiceV2.update = mock.Mock()
expected_calls = [{'args': (),
'kwargs': {
'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_SNAPSHOT}}]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
self.conn.snapshot(self.context, self.instance, 'test-snapshot',
func_call_matcher.call)
self.assertIsNone(func_call_matcher.match())
_, snapshot_name, metadata = GlanceImageServiceV2.update.call_args[0]
aws_imgs = self.fake_ec2_conn.describe_images(Owners=['self'])
self.assertEqual(1, len(aws_imgs['Images']))
aws_img = aws_imgs['Images'][0]
self.assertEqual(snapshot_name, 'test-snapshot')
self.assertEqual(aws_img['Name'], 'test-snapshot')
self.assertEqual(aws_img['ImageId'],
metadata['properties']['ec2_image_id'])
self.reset()
@mock_ec2
def test_snapshot_instance_not_found(self):
self.fake_ec2_conn.create_image = mock.Mock()
self._create_instance()
GlanceImageServiceV2.update = mock.Mock()
expected_calls = [{'args': (),
'kwargs': {
'task_state': task_states.IMAGE_UPLOADING,
'expected_state': task_states.IMAGE_SNAPSHOT}}]
func_call_matcher = matchers.FunctionCallMatcher(expected_calls)
self.assertRaises(exception.InstanceNotFound, self.conn.snapshot,
self.context, self.instance, 'test-snapshot',
func_call_matcher.call)
self.fake_ec2_conn.create_image.assert_not_called()
self.reset()
@mock_ec2
def test_reboot_soft(self):
self._create_vm_in_aws_nova()
self.assertIsNone(self.conn.reboot(self.context, self.instance, None,
'SOFT', None, None))
self.reset()
@mock_ec2
def test_reboot_hard(self):
self._create_vm_in_aws_nova()
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
EC2Driver._wait_for_state = mock.Mock()
self.assertIsNone(self.conn.reboot(self.context, self.instance, None,
'HARD', None, None))
wait_state_calls = EC2Driver._wait_for_state.call_args_list
LOG.info(wait_state_calls)
self.assertEqual(2, len(wait_state_calls))
self.assertEqual('stopped', wait_state_calls[0][0][3])
self.assertEqual(fake_inst['InstanceId'], wait_state_calls[0][0][2])
self.assertEqual('running', wait_state_calls[1][0][3])
self.assertEqual(fake_inst['InstanceId'], wait_state_calls[1][0][2])
self.reset()
@mock_ec2
def test_reboot_instance_not_found(self):
self._create_instance()
self.fake_ec2_conn.stop_instances = mock.Mock()
self.assertRaises(exception.InstanceNotFound, self.conn.reboot,
self.context, self.instance, None, 'SOFT', None,
None)
self.fake_ec2_conn.stop_instances.assert_not_called()
self.reset()
@mock_ec2
def test_power_off(self):
self._create_vm_in_aws_nova()
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(fake_inst['State']['Name'], 'running')
with contextlib.nested(
mock.patch.object(EC2Driver, '_ec2_conn'),
) as (mock_ec2_conn,):
mock_ec2_conn.return_value = self.fake_ec2_conn
self.conn.power_off(self.instance)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(fake_inst['State']['Name'], 'stopped')
self.reset()
@mock_ec2
def test_power_off_instance_not_found(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.power_off,
self.instance)
self.reset()
@mock_ec2
def test_power_on(self):
self._create_vm_in_aws_nova()
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
self.fake_ec2_conn.stop_instances(
InstanceIds=[fake_inst['InstanceId']])
self.conn.power_on(self.context, self.instance, None, None)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(fake_inst['State']['Name'], 'running')
self.reset()
@mock_ec2
def test_power_on_instance_not_found(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.power_on,
self.context, self.instance, None, None)
self.reset()
@mock_ec2
def test_destroy(self):
self._create_vm_in_aws_nova()
self.conn.destroy(self.context, self.instance, None, None)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
self.assertEqual(fake_inst['State']['Name'], 'terminated')
self.reset()
@mock_ec2
def test_destroy_instance_not_found(self):
self._create_instance()
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'stop_instances'),
mock.patch.object(self.fake_ec2_conn, 'terminate_instances'),
mock.patch.object(EC2Driver, '_wait_for_state'),
) as (fake_stop, fake_terminate, fake_wait):
self.assertRaises(exception.InstanceNotFound, self.conn.destroy,
self.context, self.instance, None, None)
fake_stop.assert_not_called()
fake_terminate.assert_not_called()
fake_wait.assert_not_called()
self.reset()
@mock_ec2
def test_destory_instance_terminated_on_aws(self):
self._create_vm_in_aws_nova()
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
inst_id = fake_inst['InstanceId']
self.fake_ec2_conn.stop_instances(InstanceIds=[inst_id])
self.fake_ec2_conn.terminate_instances(InstanceIds=[inst_id])
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'stop_instances'),
mock.patch.object(self.fake_ec2_conn, 'terminate_instances'),
mock.patch.object(EC2Driver, '_wait_for_state'),
) as (fake_stop, fake_terminate, fake_wait):
self.conn.destroy(self.context, self.instance, None, None)
fake_stop.assert_not_called()
fake_terminate.assert_not_called()
fake_wait.assert_not_called()
self.reset()
@mock_ec2
@mock.patch.object(EC2Driver, '_ec2_conn')
def test_destroy_instance_shut_down_on_aws(self, mock_ec2_conn):
mock_ec2_conn.return_value = self.fake_ec2_conn
self._create_vm_in_aws_nova()
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
inst_id = fake_inst['InstanceId']
self.fake_ec2_conn.stop_instances(InstanceIds=[inst_id])
with contextlib.nested(
mock.patch.object(self.fake_ec2_conn, 'stop_instances'),
mock.patch.object(self.fake_ec2_conn, 'terminate_instances'),
mock.patch.object(EC2Driver, '_wait_for_state'),
) as (fake_stop, fake_terminate, fake_wait):
self.conn.destroy(self.context, self.instance, None, None)
fake_stop.assert_not_called()
fake_terminate.assert_called_once_with(InstanceIds=[inst_id])
self.reset()
@mock_ec2
def test_get_info(self):
self._create_vm_in_aws_nova()
vm_info = self.conn.get_info(self.instance)
self.assertEqual(0, vm_info.state)
self.reset()
@mock_ec2
def test_get_info_instance_not_found(self):
self._create_instance()
self.assertRaises(exception.InstanceNotFound, self.conn.get_info,
self.instance)
self.reset()
@mock_ec2
@mock.patch('nova.virt.ec2.credshelper._get_credsmgr_client')
def test_get_device_name_for_instance(self, mock_credsmgr_client):
mock_credsmgr_client.side_effect = HTTPBadGateway()
self._create_vm_in_aws_nova()
block_device_name = self.conn.get_device_name_for_instance(
self.instance, None, None)
self.assertEqual(block_device_name, "/dev/sdf")
@mock_ec2
def test_get_device_name_for_instance_failure(self):
self._create_instance()
self.instance.block_device_mapping = self.get_bdm()
self.assertRaises(exception.NovaException,
self.conn.get_device_name_for_instance,
self.instance, None, None)
@mock_ec2
def test_change_instance_metadata_add_metadata(self):
self._create_vm_in_aws_nova()
diff = {"key": ["+", "value"]}
self.conn.change_instance_metadata(self.context, self.instance, diff)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
for tag in fake_inst['Tags']:
if tag['Key'] == "key":
self.assertEqual(tag['Value'], "value")
@mock_ec2
def test_change_instance_metadata_remove_metadata(self):
self._create_vm_in_aws_nova()
diff = {"key": ["+", "value"]}
self.conn.change_instance_metadata(self.context, self.instance, diff)
diff = {"key": ["-"]}
self.conn.change_instance_metadata(self.context, self.instance, diff)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
key_present = False
for tag in fake_inst['Tags']:
if tag['Key'] == 'key':
key_present = True
self.assertFalse(key_present)
@mock_ec2
def test_change_instance_metadata_bulk_add_metadata(self):
self._create_vm_in_aws_nova()
diff = {
"key1": ["+", "value1"],
"key2": ["+", "value2"]
}
self.conn.change_instance_metadata(self.context, self.instance, diff)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
for key, change in diff.items():
for tag in fake_inst['Tags']:
if tag['Key'] == key:
self.assertEqual(tag['Value'], change[1])
@mock_ec2
def test_change_instance_metadata_bulk_remove_metadata(self):
self._create_vm_in_aws_nova()
diff = {
"key1": ["+", "value1"],
"key2": ["+", "value2"]
}
self.conn.change_instance_metadata(self.context, self.instance, diff)
reverse_diff = {k: ["-"] for k in diff.keys()}
self.conn.change_instance_metadata(self.context, self.instance,
reverse_diff)
fake_instances = self.fake_ec2_conn.describe_instances()
fake_inst = fake_instances['Reservations'][0]['Instances'][0]
key_present = False
for key, change in diff.items():
for tag in fake_inst['Tags']:
if tag['Key'] == key:
key_present = True
self.assertFalse(key_present)
| 43.979079
| 79
| 0.603114
| 30,036
| 0.952526
| 0
| 0
| 23,967
| 0.760061
| 0
| 0
| 5,254
| 0.166619
|
a29454fa75452cb28ef7fa8567271f51c49e623f
| 3,132
|
py
|
Python
|
tests/run_examples.py
|
theGreenJedi/neon
|
b85ba0fbbb0458d8a8599e5ead335959b10318c1
|
[
"Apache-2.0"
] | null | null | null |
tests/run_examples.py
|
theGreenJedi/neon
|
b85ba0fbbb0458d8a8599e5ead335959b10318c1
|
[
"Apache-2.0"
] | 3
|
2021-06-08T23:56:39.000Z
|
2022-03-12T00:56:34.000Z
|
tests/run_examples.py
|
theGreenJedi/neon
|
b85ba0fbbb0458d8a8599e5ead335959b10318c1
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# this script runs all examples and checks that they all
# run without throwing an exception
from __future__ import print_function
import os
import sys
from glob import glob
import subprocess as subp
from datetime import timedelta
from timeit import default_timer as timer
# Modify the following to suit your environment
NUM_EPOCHS = 2
BACKEND = "gpu"
SUBSET_PCT = 1
TINY_SUBSET_PCT = .1
ADDITIONAL_ARGS = ""
BASE_DATA_DIR = '~/nervana/data'
# skip - not a training example
FILES_TO_SKIP = ['examples/deep_dream.py']
# skip - need to download dataset
FILES_TO_SKIP += ['examples/imdb/train.py', 'examples/whale_calls.py', 'examples/music_genres.py']
ADD_I1K_BATCH_DIR = ['examples/alexnet.py', 'examples/imagenet_allcnn.py',
'examples/vgg_bn.py', 'examples/i1k_msra.py']
ADD_CIFAR_BATCH_DIR = ['examples/cifar10_msra.py']
ADD_UCF101_BATCH_DIR = ['examples/video-c3d/train.py']
ADD_SUBSET_PCT = ADD_I1K_BATCH_DIR + ADD_UCF101_BATCH_DIR
ADD_TINY_SUBSET_PCT = ['examples/fast-rcnn/train.py', 'examples/vgg_bn.py']
# Jenkins environment setup
if os.getenv("EXECUTOR_NUMBER"):
BASE_DATA_DIR = '/usr/local/data/jenkins'
ADDITIONAL_ARGS += "-i {}".format(os.getenv("EXECUTOR_NUMBER"))
I1K_BATCH_DIR = os.path.join(BASE_DATA_DIR, 'I1K/macrobatches')
CIFAR_BATCH_DIR = os.path.join(BASE_DATA_DIR, 'CIFAR10/macrobatches')
UCF101_BATCH_DIR = os.path.join(BASE_DATA_DIR, 'UCF-101/ucf-preprocessed')
if not os.path.isdir('examples'):
raise IOError('Must run from root dir of none repo')
# check for venv activations
cmd = 'if [ -z "$VIRTUAL_ENV" ];then exit 1;else exit 0;fi'
if subp.call(cmd, shell=True) > 0:
raise IOError('Need to activate the virtualenv')
examples = glob('examples/*.py') + glob('examples/*/train.py')
skipped = []
results = []
for ex in sorted(examples):
if ex in FILES_TO_SKIP:
skipped.append(ex)
continue
cmdargs = "-e {} -b {} --serialize 1 -v --no_progress_bar -s {} {}".format(
NUM_EPOCHS, BACKEND, os.path.splitext(ex)[0] + '.prm',
ADDITIONAL_ARGS)
cmd = "python {} ".format(ex) + cmdargs
if ex in ADD_I1K_BATCH_DIR:
cmd += ' -w {}'.format(I1K_BATCH_DIR)
elif ex in ADD_CIFAR_BATCH_DIR:
cmd += ' -w {}'.format(CIFAR_BATCH_DIR)
elif ex in ADD_UCF101_BATCH_DIR:
cmd += ' -w {} -z 16'.format(UCF101_BATCH_DIR)
else:
cmd += ' -w {}'.format(BASE_DATA_DIR)
if ex in ADD_TINY_SUBSET_PCT:
cmd += ' --subset_pct {}'.format(TINY_SUBSET_PCT)
elif ex in ADD_SUBSET_PCT:
cmd += ' --subset_pct {}'.format(SUBSET_PCT)
start = timer()
rc = subp.call(cmd, shell=True)
end = timer()
results.append([ex, rc, end - start])
print('\nFound {} scripts:'.format(len(examples)))
for dat in results:
if dat[1] == 0:
print('SUCCESS on {} in {}'.format(dat[0], timedelta(seconds=int(dat[2]))))
for ex in skipped:
print('SKIPPED {}'.format(ex))
errors = 0
for dat in results:
if dat[1] != 0:
print('FAILURE on {}'.format(dat[0]))
errors += 1
print("\nExiting with %d errors" % errors)
sys.exit(errors)
| 31.959184
| 98
| 0.678799
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,141
| 0.364304
|
a296a4a9d7aba1261750f949b22f9a0ca56bbbcf
| 5,888
|
py
|
Python
|
critical/rankorder.py
|
NECOTIS/CRITICAL
|
eba2dc9c90936f9cf51e04374081509be433ed10
|
[
"BSD-3-Clause"
] | 1
|
2022-02-16T00:59:50.000Z
|
2022-02-16T00:59:50.000Z
|
critical/rankorder.py
|
NECOTIS/CRITICAL
|
eba2dc9c90936f9cf51e04374081509be433ed10
|
[
"BSD-3-Clause"
] | null | null | null |
critical/rankorder.py
|
NECOTIS/CRITICAL
|
eba2dc9c90936f9cf51e04374081509be433ed10
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2012-2018, NECOTIS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Authors: Simon Brodeur, Jean Rouat (advisor)
# Date: April 18th, 2019
# Organization: Groupe de recherche en Neurosciences Computationnelles et Traitement Intelligent des Signaux (NECOTIS),
# Université de Sherbrooke, Canada
import logging
import numpy as np
import matplotlib.pyplot as plt
from brian2.units.stdunits import ms
from brian2.units.allunits import second
from matplotlib.lines import Line2D
logger = logging.getLogger(__name__)
class RocPattern(object):
def __init__(self, orders, times, width):
self.orders = orders
self.times = times
self.width = width
def plotPatterns(patterns, unit=ms):
fig = plt.figure(facecolor='white')
line, = plt.plot([], [], '.', color='gray')
ax = fig.add_subplot(1, 1, 1)
nbNeurons = np.max([len(p.orders) for p in patterns])
min_y = -0.5
ax.set_ylim((min_y, nbNeurons))
plt.ylabel('Neuron number')
if unit == ms:
plt.xlabel('Time [ms]')
elif unit == second:
plt.xlabel('Time [sec]')
else:
raise Exception('Unsupported unit provided')
plt.title('Rank-order coded patterns')
# Draw spikes
spikes = []
for n, p in enumerate(patterns):
for i, t in zip(range(nbNeurons), p.times):
spikes.append((i, t + n * p.width))
allst = []
if len(spikes):
sn, st = np.array(spikes).T
else:
sn, st = np.array([]), np.array([])
st /= unit
allsn = [sn]
allst.append(st)
sn = np.hstack(allsn)
st = np.hstack(allst)
line.set_xdata(np.array(st))
ax.set_xlim((0.0, np.max(st)))
line.set_ydata(sn)
# Draw lines between each pattern
for n in range(len(patterns)):
t = n * (patterns[n].width / unit)
line = Line2D([t, t], ax.get_ylim(), color='grey', linestyle='--', linewidth=1.0)
ax.add_line(line)
fig.canvas.draw()
return fig
def generateRankOrderCodedPatterns(nbNeurons, nbPatterns, widthEpoch=10 * ms, padding=1 * ms, refractory=0.0 * ms):
spiketimes = np.zeros((nbPatterns, nbNeurons)) * ms
orders = np.zeros((nbPatterns, nbNeurons))
# Loop for each class to generate
patterns = []
minT = padding
maxT = widthEpoch - padding
times = np.linspace(minT, maxT, nbNeurons)
for n in range(nbPatterns):
logger.debug('Generating pattern no.%d (out of %d)' % (n + 1, nbPatterns))
conflictFound = True
nbRetry = 0
maxRetry = 100000
while conflictFound and nbRetry < maxRetry:
if nbRetry > 0 and nbRetry % 1000 == 0:
logger.debug('Number of retries: %d' % (nbRetry))
genOrders = list(range(nbNeurons))
np.random.shuffle(genOrders)
# Ensure that the pattern doesn't already exist
conflictFound = False
for m in range(n):
if (genOrders == orders[m, :]).all():
conflictFound = True
nbRetry += 1
break
if not conflictFound and refractory > 0.0:
# Ensure each neuron is not in refractory period if concatenated with every other class
for target in range(nbNeurons):
for m in range(n):
if times[genOrders[target]] + widthEpoch - spiketimes[m, target] < refractory:
conflictFound = True
nbRetry += 1
break
if conflictFound:
break
if conflictFound:
raise Exception('Unable to generate all patterns: %d generated' % (n))
patterns.append(RocPattern(genOrders, times[genOrders], widthEpoch))
return patterns
def generateRankOrderCodedData(patterns, duration, delayEpoch):
t = 0.0 * second
indices = []
times = []
while t < duration:
p = np.random.choice(patterns)
if t + p.width + delayEpoch >= duration:
break
indices.extend(range(len(p.times)))
times.extend(t + p.times)
t += p.width + delayEpoch
indices = np.array(indices, dtype=np.int)
times = np.array(times) * second
# Sort by time
sortIndices = np.argsort(times)
times = times[sortIndices]
indices = indices[sortIndices]
return indices, times
| 34.432749
| 119
| 0.640625
| 154
| 0.02615
| 0
| 0
| 0
| 0
| 0
| 0
| 2,195
| 0.372729
|
a29750f0ca25c86bb147bca122dfcaad2818dc92
| 2,007
|
py
|
Python
|
trac/wiki/tests/web_api.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
trac/wiki/tests/web_api.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
trac/wiki/tests/web_api.py
|
clubturbo/Trac-1.4.2
|
254ce54a3c2fb86b4f31810ddeabbd4ff8b54a78
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014-2020 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at https://trac.edgewall.org/log/.
import textwrap
import unittest
from trac.mimeview.patch import PatchRenderer
from trac.test import EnvironmentStub, MockRequest
from trac.web.api import RequestDone
from trac.wiki.web_api import WikiRenderer
class WikiRendererTestCase(unittest.TestCase):
def setUp(self):
self.env = EnvironmentStub()
self.mod = WikiRenderer(self.env)
def test_load_stylesheet(self):
text = textwrap.dedent("""\
{{{#!text/x-diff
--- a/file.txt 2014-11-13 01:16:06 +0000
+++ b/file.txt 2014-11-13 01:16:06 +0000
@@ -1 +1 @@
-old line
+new line
}}}
""")
req = MockRequest(self.env, method='POST', path_info='/wiki_render',
args={'id': 'WikiStart', 'text': text})
self.assertTrue(self.mod.match_request(req))
try:
self.mod.process_request(req)
self.fail('RequestDone not raised')
except RequestDone:
output = req.response_sent.getvalue()
self.assertIn('<div class="wiki-code">', output)
self.assertIn('<table class="trac-diff inline"', output)
self.assertIn('jQuery.loadStyleSheet("'
'/trac.cgi/chrome/common/css/diff.css"', output)
def test_suite():
return unittest.makeSuite(WikiRendererTestCase)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| 34.016949
| 77
| 0.620329
| 1,121
| 0.558545
| 0
| 0
| 0
| 0
| 0
| 0
| 956
| 0.476333
|
a2983711f38540e1e3b5409d4bc00bd0c00c0ae8
| 5,814
|
py
|
Python
|
main.py
|
eyalnaor/DeepTemporalSR
|
7d8c821431dec3a4c480550c61a6033fcac5e640
|
[
"MIT"
] | 38
|
2020-09-04T10:53:50.000Z
|
2021-08-29T13:10:41.000Z
|
main.py
|
eyalnaor/DeepTemporalSR
|
7d8c821431dec3a4c480550c61a6033fcac5e640
|
[
"MIT"
] | 1
|
2021-02-24T17:20:58.000Z
|
2021-02-24T17:20:58.000Z
|
main.py
|
eyalnaor/DeepTemporalSR
|
7d8c821431dec3a4c480550c61a6033fcac5e640
|
[
"MIT"
] | 7
|
2020-12-03T12:11:49.000Z
|
2021-08-16T14:43:28.000Z
|
import torch
import Network
import Network_res3d
from data_handler import *
import cProfile
import io
import pstats
parser = utils.create_parser()
args = parser.parse_args()
def worker_init_fn(worker_id):
np.random.seed(np.random.get_state()[1][0] + worker_id)
def main():
# read json return
config = utils.startup(json_path=args.config, args=args, copy_files=args.eval is None or args.eval == 'empty')
# get available device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
params = {'batch_size': config["batch_size"],
'shuffle': True,
'num_workers': 0,
'worker_init_fn': worker_init_fn}
cur_data_path = config['data']['params']['frames_folder']
cur_spatial_scale = 1
if config['backprojection'] == True:
cur_data_path, cur_spatial_scale, cumulative_spatial_scales = utils.downscale_for_BP(config, device)
upsample_steps = config['upsamaple_steps']
network = None
output = None
cumulative_scale = 1
for scale_ind, scale in enumerate(upsample_steps):
cumulative_scale = cumulative_scale * scale
print('*********************************************************************************')
print(f'entered temporal iteration {scale_ind}. Upsampling by temporal scale {scale}. Until now (including this): {cumulative_scale}')
print(f'upscaling from spatial scale {cur_spatial_scale}, FROM path: {cur_data_path}')
print('*********************************************************************************')
# Check if this scale will be used for eval only, so no need for entire DataHandler object
scale_for_eval_only = config['fix_network'] == True and scale_ind != 0
dataset = DataHandler(data_path=cur_data_path, config=config, upsample_scale=scale, device=device,
video_only_flag=scale_for_eval_only)
assert dataset.crop_size[0] % scale == 0, f'assertion error in main, temporal crop size not divisible by scale'
data_generator = data.DataLoader(dataset, **params)
train_from_scratch = scale_ind == 0 or (config['fix_network'] is False and config['fine_tune'] is False)
if train_from_scratch:
network_class = config['network']
if network_class == 'base':
network = Network.Network(config=config, device=device, upsample_scale=scale)
elif network_class == 'residual':
network = Network_res3d.Network_residual(config=config, device=device, upsample_scale=scale)
else:
assert False, f'assertion fail at main, not a known "network_class"'
else: # In fine tuning/fixed network. Either way - No new network.
assert len(set(upsample_steps)) <= 1 # Make sure all upsamaple_steps are identical
network.epochs = network.epochs // config['fine_tuning_epochs_factor'] # if fine_tuning: needed. If fixed: No impact
assert network.epochs > 0, f'assertion error in main. "fine_tuning_epochs_factor" too large - No epochs left for fine_tuning in training iteration {scale_ind}'
need_to_train = (config['fix_network'] == False or scale_ind == 0) and (not config['ckpt_first_trained'] or scale_ind != 0) # Net not fixed or first training
if config['checkpoint'] is not '' and scale_ind == 0: # Load model. Only first iteration
network.load_model(config['checkpoint'])
print('loaded_ckpt\nloaded_ckpt\nloaded_ckpt\nloaded_ckpt\nloaded_ckpt\nloaded_ckpt\n')
if need_to_train:
# call train - provide a data_handler object to provide (lr,hr) tuples
network.train(data_generator, cumulative_scale)
if config['debug']:
utils.visualize_tuple((dataset.video_tensor, dataset.crops_on_video_tensor), name_hr='video', name_lr='selected crops', save_to_file=True,
save_path=os.path.join(config['trainer']['working_dir'], 'visualize_crops', f'scale_{cumulative_scale}'))
# reset the start epoch value for next training scale
network.epoch = 0
# call eval
output = network.eval(dataset.video_tensor)
# apply temporal BP
output = utils.temporal_bp_wrapper(dataset.video_tensor, output)
# save results to file
output_dir = os.path.join(config['trainer']['working_dir'], f'T{cumulative_scale}S{cur_spatial_scale}')
utils.save_output_result(output, output_dir)
# update data_path for next step
cur_data_path = output_dir
# Apply BP if needed
skip_BP = config['final_no_BP'] and scale_ind == len(upsample_steps) - 1 # Final upscale when BP not wanted - skip BP
if config['backprojection'] and not skip_BP:
cur_data_path, cur_spatial_scale, output = utils.BP_wrapper(config, cumulative_scale,
cumulative_spatial_scales,
cur_data_path, cur_spatial_scale, output,
scale_ind, device)
# save final result in "output" folder
final_output_dir = os.path.join(config['trainer']['working_dir'], 'output')
utils.save_output_result(output, final_output_dir)
if __name__ == '__main__':
# open comment to allow profiling
# pr = cProfile.Profile()
# pr.enable()
# main()
# pr.disable()
# pr.print_stats(sort="cumtime")
# s = io.StringIO()
# ps = pstats.Stats(pr, stream=s).sort_stats('tottime')
# ps.print_stats()
# with open('profile.txt', 'w+') as f:
# f.write(s.getvalue())
main()
print('done.')
| 46.887097
| 171
| 0.626935
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,171
| 0.373409
|
a298626351e920d8afa27758b5249b92283fda64
| 308
|
py
|
Python
|
package_name/__init__.py
|
netserf/template-python-repo
|
f6a2612b0e2dfd766c1287abb6e17f13fca44b93
|
[
"MIT"
] | null | null | null |
package_name/__init__.py
|
netserf/template-python-repo
|
f6a2612b0e2dfd766c1287abb6e17f13fca44b93
|
[
"MIT"
] | null | null | null |
package_name/__init__.py
|
netserf/template-python-repo
|
f6a2612b0e2dfd766c1287abb6e17f13fca44b93
|
[
"MIT"
] | null | null | null |
# `name` is the name of the package as used for `pip install package`
name = "package-name"
# `path` is the name of the package for `import package`
path = name.lower().replace("-", "_").replace(" ", "_")
version = "0.1.0"
author = "Author Name"
author_email = ""
description = "" # summary
license = "MIT"
| 30.8
| 69
| 0.655844
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 189
| 0.613636
|
a29a495e3f7946f01ad82f159c0ca13bc042ba05
| 1,826
|
py
|
Python
|
signaling_trajectories.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
signaling_trajectories.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
signaling_trajectories.py
|
simberaj/mobilib
|
ae350d095a34f53704bd4aaaf7f45e573bda779a
|
[
"MIT"
] | null | null | null |
"""Transform signaling data to smoothed trajectories."""
import sys
import numpy
import pandas as pd
import geopandas as gpd
import shapely.geometry
import matplotlib.patches
import matplotlib.pyplot as plt
import mobilib.voronoi
SAMPLING = pd.Timedelta('00:01:00')
STD = pd.Timedelta('00:05:00')
def smoothen(array, std_quant):
return pd.Series(array).rolling(
int(numpy.ceil(8 * std_quant)),
min_periods=0,
center=True,
win_type='gaussian'
).mean(std=std_quant)
def trajectory(df, xcol, ycol, sampling, std):
ts = pd.date_range(df.index.min(), df.index.max(), freq=sampling)
obs_ind = ts.searchsorted(df.index)
xs_src = numpy.full(ts.size, numpy.nan)
xs_src[obs_ind] = df[xcol]
ys_src = numpy.full(ts.size, numpy.nan)
ys_src[obs_ind] = df[ycol]
std_quant = std / sampling
return smoothen(xs_src, std_quant), smoothen(ys_src, std_quant), ts
if __name__ == '__main__':
signals = pd.read_csv(sys.argv[1], sep=';')
signals = signals[signals['phone_nr'] == int(sys.argv[3])]
signals['pos_time'] = pd.to_datetime(signals['pos_time'])
timeweights = (1 / signals.groupby('pos_time')['phone_nr'].count()).reset_index().rename(columns={'phone_nr' : 'weight'})
signals = pd.merge(signals, timeweights, on='pos_time')
antennas = pd.read_csv(sys.argv[2], sep=';')
siglocs = pd.merge(signals, antennas, on='cell_name').groupby('pos_time').agg({
'xcent': 'mean',
'ycent': 'mean',
})
xpos, ypos, tpos = trajectory(siglocs, 'xcent', 'ycent', sampling=SAMPLING, std=STD)
plt.plot(xpos, ypos)
plt.scatter(antennas.xcent, antennas.ycent, s=9, color='orange')
plt.gca().set_aspect('equal')
plt.show()
pd.DataFrame({'x': xpos, 'y': ypos, 't': tpos}).to_csv(sys.argv[4], sep=';', index=False)
| 33.2
| 125
| 0.661555
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 268
| 0.146769
|
a29c18ce763ea0eb8b5497234efc7ee7fced0caa
| 440
|
py
|
Python
|
home/pi/_testing/logging-test.py
|
rc-bellergy/pxpi
|
e3d6d1d1a1f6d1fdf53341d314e7d549c8e84a68
|
[
"MIT"
] | 26
|
2020-02-16T09:14:16.000Z
|
2022-03-28T07:39:47.000Z
|
home/pi/_testing/logging-test.py
|
rc-bellergy/pxpi
|
e3d6d1d1a1f6d1fdf53341d314e7d549c8e84a68
|
[
"MIT"
] | 1
|
2020-10-04T03:48:09.000Z
|
2020-10-05T01:47:09.000Z
|
home/pi/_testing/logging-test.py
|
rc-bellergy/pxpi
|
e3d6d1d1a1f6d1fdf53341d314e7d549c8e84a68
|
[
"MIT"
] | 7
|
2020-10-04T03:45:36.000Z
|
2022-02-28T16:54:36.000Z
|
#!/usr/bin/env python
import logging
import logging.handlers
import os
# Logging to file
dir_path = os.path.dirname(os.path.realpath(__file__))
logging.basicConfig(filename=dir_path + "/test.log", format='%(asctime)s - %(message)s', level=logging.INFO, filemode='w')
# Logging messages to the console
console = logging.StreamHandler()
logger = logging.getLogger()
logger.addHandler(console)
# Logging test
logging.info("** Testing **")
| 25.882353
| 122
| 0.747727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 141
| 0.320455
|
a29c642613fdef33219868f8958a1851ae0b81aa
| 1,556
|
py
|
Python
|
test_client.py
|
ericjmartin/slackview
|
28797ca06e13f5c9f97c1755e613c0e402ae0ea4
|
[
"MIT"
] | null | null | null |
test_client.py
|
ericjmartin/slackview
|
28797ca06e13f5c9f97c1755e613c0e402ae0ea4
|
[
"MIT"
] | null | null | null |
test_client.py
|
ericjmartin/slackview
|
28797ca06e13f5c9f97c1755e613c0e402ae0ea4
|
[
"MIT"
] | null | null | null |
import os
from slack_sdk.web import WebClient
from slack_sdk.socket_mode import SocketModeClient
# Initialize SocketModeClient with an app-level token + WebClient
client = SocketModeClient(
# This app-level token will be used only for establishing a connection
app_token=os.environ.get("SLACK_APP_TOKEN")
# You will be using this WebClient for performing Web API calls in listeners
web_client=WebClient(token=os.environ.get("SLACK_BOT_TOKEN")) # xoxb-111-222-xyz
)
from slack_sdk.socket_mode.response import SocketModeResponse
from slack_sdk.socket_mode.request import SocketModeRequest
def process(client: SocketModeClient, req: SocketModeRequest):
print('here')
if req.type == "events_api":
# Acknowledge the request anyway
response = SocketModeResponse(envelope_id=req.envelope_id)
client.send_socket_mode_response(response)
# Add a reaction to the message if it's a new message
if req.payload["event"]["type"] == "message" \
and req.payload["event"].get("subtype") is None:
client.web_client.reactions_add(
name="eyes",
channel=req.payload["event"]["channel"],
timestamp=req.payload["event"]["ts"],
)
# Add a new listener to receive messages from Slack
# You can add more listeners like this
client.socket_mode_request_listeners.append(process)
# Establish a WebSocket connection to the Socket Mode servers
client.connect()
# Just not to stop this process
from threading import Event
Event().wait()
| 39.897436
| 85
| 0.720437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 618
| 0.397172
|
a29c710e6a5af2c146c941aed7a01353e7cc6f77
| 1,968
|
py
|
Python
|
utils/misc.py
|
hengwei-chan/3D_SBDD
|
eda6d51aaf01ef25581a46920a25161678fab76d
|
[
"MIT"
] | 67
|
2021-12-02T05:53:44.000Z
|
2022-03-31T07:21:26.000Z
|
utils/misc.py
|
hengwei-chan/3D_SBDD
|
eda6d51aaf01ef25581a46920a25161678fab76d
|
[
"MIT"
] | 13
|
2021-12-05T14:23:46.000Z
|
2022-03-25T21:07:20.000Z
|
utils/misc.py
|
hengwei-chan/3D_SBDD
|
eda6d51aaf01ef25581a46920a25161678fab76d
|
[
"MIT"
] | 16
|
2022-01-11T11:48:24.000Z
|
2022-03-27T19:20:58.000Z
|
import os
import time
import random
import logging
import torch
import numpy as np
import yaml
from easydict import EasyDict
from logging import Logger
from tqdm.auto import tqdm
class BlackHole(object):
def __setattr__(self, name, value):
pass
def __call__(self, *args, **kwargs):
return self
def __getattr__(self, name):
return self
def load_config(path):
with open(path, 'r') as f:
return EasyDict(yaml.safe_load(f))
def get_logger(name, log_dir=None):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s::%(name)s::%(levelname)s] %(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.DEBUG)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
if log_dir is not None:
file_handler = logging.FileHandler(os.path.join(log_dir, 'log.txt'))
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
def get_new_log_dir(root='./logs', prefix='', tag=''):
fn = time.strftime('%Y_%m_%d__%H_%M_%S', time.localtime())
if prefix != '':
fn = prefix + '_' + fn
if tag != '':
fn = fn + '_' + tag
log_dir = os.path.join(root, fn)
os.makedirs(log_dir)
return log_dir
def seed_all(seed):
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
def log_hyperparams(writer, args):
from torch.utils.tensorboard.summary import hparams
vars_args = {k:v if isinstance(v, str) else repr(v) for k, v in vars(args).items()}
exp, ssi, sei = hparams(vars_args, {})
writer.file_writer.add_summary(exp)
writer.file_writer.add_summary(ssi)
writer.file_writer.add_summary(sei)
def int_tuple(argstr):
return tuple(map(int, argstr.split(',')))
def str_tuple(argstr):
return tuple(argstr.split(','))
| 24.911392
| 87
| 0.676829
| 191
| 0.097053
| 0
| 0
| 0
| 0
| 0
| 0
| 112
| 0.056911
|
a29d7acbfba5a243e1f4a49be6ce4cba089c4b1f
| 2,928
|
py
|
Python
|
tests/test_api.py
|
mattjm/iam-idbase
|
d96d1bada5adf4dbad9be212f1015e3d7399a63d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_api.py
|
mattjm/iam-idbase
|
d96d1bada5adf4dbad9be212f1015e3d7399a63d
|
[
"Apache-2.0"
] | null | null | null |
tests/test_api.py
|
mattjm/iam-idbase
|
d96d1bada5adf4dbad9be212f1015e3d7399a63d
|
[
"Apache-2.0"
] | null | null | null |
from idbase.api import RESTDispatch, LoginStatus
from idbase import exceptions
from django.http import HttpResponse
from mock import MagicMock
import pytest
import json
@pytest.fixture
def req(rf):
request = rf.get('/')
request.user = MagicMock()
request.user.is_authenticated.return_value = True
return request
@pytest.fixture
def rest_dispatch():
"""Instantiate a RESTDispatch object with a GET method."""
rd = RESTDispatch()
rd.GET = MagicMock()
rd.GET.side_effect = lambda x: {'foo': 'bar'}
return rd
def test_rest_dispatch_run_get_basic(rest_dispatch, req):
response = rest_dispatch.run(req)
assert response.status_code == 200
assert response.content.decode() == json.dumps({'foo': 'bar'})
assert (response._headers['content-type'] ==
('Content-Type', 'application/json'))
rest_dispatch.GET.assert_called_once_with(req)
def test_rest_dispatch_run_http_response(rest_dispatch, req):
rest_dispatch.GET.side_effect = lambda x: HttpResponse(
content='hello world', status=503)
response = rest_dispatch.run(req)
assert response.status_code == 503
assert response.content.decode() == 'hello world'
def test_rest_dispatch_run_get_no_method(req):
rd = RESTDispatch()
response = rd.run(req)
assert response.status_code == 400
assert json.loads(response.content.decode()).get(
'error_message', None) is not None
def test_rest_dispatch_run_invalid_session(rest_dispatch, req):
rest_dispatch.GET.side_effect = exceptions.InvalidSessionError()
response = rest_dispatch.run(req)
assert response.status_code == 401
def test_rest_dispatch_run_not_found(rest_dispatch, req):
rest_dispatch.GET.side_effect = exceptions.NotFoundError()
response = rest_dispatch.run(req)
assert response.status_code == 404
def test_rest_dispatch_run_exception(rest_dispatch, req):
rest_dispatch.GET.side_effect = Exception()
response = rest_dispatch.run(req)
assert response.status_code == 500
def test_rest_dispatch_not_logged_in(rest_dispatch, req):
req.user.is_authenticated.return_value = False
response = rest_dispatch.run(req)
assert response.status_code == 401
def test_rest_dispatch_no_login_necessary(req):
req.user.is_authenticated.return_value = False
rest_dispatch = RESTDispatch(login_required=False)
rest_dispatch.GET = lambda x: {'foo': 'bar'}
response = rest_dispatch.run(req)
assert response.status_code == 200
assert json.loads(response.content.decode()) == {'foo': 'bar'}
def test_login_status_get(req):
req.user.netid = 'jo'
req.user.get_full_name.return_value = 'Jo Blo'
assert LoginStatus().GET(req) == {'netid': 'jo', 'name': 'Jo Blo'}
def test_login_status_no_auth(req):
req.user.is_authenticated.return_value = False
with pytest.raises(exceptions.InvalidSessionError):
LoginStatus().GET(req)
| 30.185567
| 70
| 0.730874
| 0
| 0
| 0
| 0
| 370
| 0.126366
| 0
| 0
| 225
| 0.076844
|
a2a205807fc1a9002dcff612423d88ef56c86c00
| 5,885
|
py
|
Python
|
volatility/volatility/plugins/mac/adiummsgs.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | 2
|
2018-07-16T13:30:40.000Z
|
2018-07-17T12:02:05.000Z
|
volatility/volatility/plugins/mac/adiummsgs.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
volatility/volatility/plugins/mac/adiummsgs.py
|
williamclot/MemoryVisualizer
|
2ff9f30f07519d6578bc36c12f8d08acc9cb4383
|
[
"MIT"
] | null | null | null |
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import os
import volatility.obj as obj
import volatility.plugins.mac.pstasks as pstasks
import volatility.plugins.mac.common as common
from volatility.renderers import TreeGrid
from volatility.renderers.basic import Address
class mac_adium(pstasks.mac_tasks):
""" Lists Adium messages """
def __init__(self, config, *args, **kwargs):
pstasks.mac_tasks.__init__(self, config, *args, **kwargs)
self._config.add_option('DUMP-DIR', short_option = 'D', default = None, help = 'Output directory', action = 'store', type = 'str')
self._config.add_option('WIDE', short_option = 'W', default = False, help = 'Wide character search', action = 'store_true')
def _make_uni(self, msg):
if self._config.WIDE:
return "\x00".join([m for m in msg])
else:
return msg
def calculate(self):
common.set_plugin_members(self)
procs = pstasks.mac_tasks.calculate(self)
for proc in procs:
if proc.p_comm.lower().find("adium") == -1:
continue
proc_as = proc.get_process_address_space()
for map in proc.get_proc_maps():
if map.get_perms() != "rw-" or map.get_path() != "":
continue
buffer = proc_as.zread(map.start.v(), map.end.v() - map.start.v())
if not buffer:
continue
msg_search = self._make_uni('<span class="x-message"')
time_search = self._make_uni('<span class="x-ltime"')
send_search = self._make_uni('<span class="x-sender"')
end_search = self._make_uni('</span>')
idx = 0
msg_idx = buffer.find(msg_search)
while msg_idx != -1:
idx = idx + msg_idx
msg_end_idx = buffer[idx:].find(end_search)
if msg_end_idx == -1:
break
msg = buffer[idx: idx + msg_end_idx + 14]
# to look for time and send
search_idx = idx - 200
time_idx = buffer[search_idx : search_idx + 200].find(time_search)
msg_time = ""
if time_idx != -1:
time_end_idx = buffer[search_idx + time_idx: search_idx + time_idx + 130].find(end_search)
if time_end_idx != -1:
msg_time = buffer[search_idx + time_idx: search_idx + time_idx + time_end_idx + 14]
msg_sender = ""
send_idx = buffer[idx + search_idx: idx + search_idx + 200].find(send_search)
if send_idx != -1:
send_end_idx = buffer[search_idx + send_idx: search_idx + send_idx + 60].find(end_search)
if send_end_idx != -1:
msg_sender = buffer[search_idx + send_idx: search_idx + send_idx + send_end_idx + 14]
yield proc, map.start + idx, msg_time + msg_sender + msg
idx = idx + 5
msg_idx = buffer[idx:].find(msg_search)
def unified_output(self, data):
return TreeGrid([("Pid", int),
("Name", str),
("Start", Address),
("Size", int),
("Path", str),
],
self.generator(data))
def generator(self, data):
for (proc, start, msg) in data:
fname = "Adium.{0}.{1:x}.txt".format(proc.p_pid, start)
file_path = os.path.join(self._config.DUMP_DIR, fname)
fd = open(file_path, "wb+")
fd.write(msg)
fd.close()
yield(0, [
int(proc.p_pid),
str(proc.p_comm),
Address(start),
int(len(msg)),
str(file_path),
])
def render_text(self, outfd, data):
self.table_header(outfd, [("Pid", "8"),
("Name", "20"),
("Start", "[addrpad]"),
("Size", "8"),
("Path", "")])
for (proc, start, msg) in data:
fname = "Adium.{0}.{1:x}.txt".format(proc.p_pid, start)
file_path = os.path.join(self._config.DUMP_DIR, fname)
fd = open(file_path, "wb+")
fd.write(msg)
fd.close()
self.table_row(outfd,
str(proc.p_pid),
proc.p_comm,
start,
len(msg),
file_path)
| 35.884146
| 138
| 0.497026
| 4,795
| 0.814783
| 3,052
| 0.518607
| 0
| 0
| 0
| 0
| 1,222
| 0.207647
|
a2a490d7174747d1795eadc9407c26effc4b112a
| 14,165
|
py
|
Python
|
mod_equations-master/mod_equations.py
|
userElaina/hg8
|
235dbeca3d58b94e1378ac4240ed8424791ae561
|
[
"MIT"
] | null | null | null |
mod_equations-master/mod_equations.py
|
userElaina/hg8
|
235dbeca3d58b94e1378ac4240ed8424791ae561
|
[
"MIT"
] | null | null | null |
mod_equations-master/mod_equations.py
|
userElaina/hg8
|
235dbeca3d58b94e1378ac4240ed8424791ae561
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Author @55-AA
19 July, 2016
'''
import copy
def gcd(a, b):
"""
Return the greatest common denominator of integers a and b.
gmpy2.gcd(a, b)
"""
while b:
a, b = b, a % b
return a
def lcm(a, b):
return a * b / (gcd(a, b))
def egcd(a, b):
"""
ax + by = 1
ax ≡ 1 mod b
Return a 3-element tuple (g, x, y), the g = gcd(a, b)
gmpy2.gcdext(a, b)
"""
if a == 0:
return (b, 0, 1)
else:
g, y, x = egcd(b % a, a)
return (g, x - (b // a) * y, y)
def mod_inv(a, m):
"""
ax ≡ 1 mod m
gmpy2.invert(a, m)
"""
g, x, y = egcd(a, m)
assert g == 1
return x % m
def int2mem(x):
"""
0x12233 => '\x33\x22\x01'
"""
pad_even = lambda x : ('', '0')[len(x)%2] + x
x = list(pad_even(format(x, 'x')).decode('hex'))
x.reverse()
return ''.join(x)
def mem2int(x):
"""
'\x33\x22\x01' => 0x12233
"""
x = list(x)
x.reverse()
return int(''.join(x).encode('hex'), 16)
###########################################################
# class
###########################################################
class GaussMatrix:
"""
A*X ≡ B (mod p),p为大于0的整数。
高斯消元求解模线性方程组。先化简为上三角,然后回代求解。
当r(A) <= n时,一定有多解;
当r(A) == n时,有多解或唯一解;
当r(A) != r(A~)时,无解。
r(A)为系数矩阵的秩,r(A)为增广矩阵的秩,n为未知数的个数。
http://www.docin.com/p-1063811671.html讨论了gcd(|A|, m) = 1时的LU分解解法,
本文包括了gcd(|A|, m) > 1时的解法,
化简原则:
1、系数与模互质
2、系数加某一行n次后,对应的系数与模的GCD最小
3、将1或2得到的系数移到对角线上
初始化参数:
matrix:方程组的增广矩阵(最后一列为常数项)。
matrix = [
[ 69, 75, 78, 36, 58],
[ 46, 68, 51, 26, 42],
[ 76, 40, 42, 49, 11],
[ 11, 45, 2, 45, 1],
[ 15, 67, 60, 14, 72],
[ 76, 67, 73, 56, 58],
[ 67, 15, 68, 54, 75],
]
mod:模数
函数:
gauss():求解方程
输出变量:
error_str:出错的信息
count:解的数量
"""
def __init__(self, matrix, mod):
self.matrix = copy.deepcopy(matrix)
self.d = None
self.r = len(matrix)
self.c = len(matrix[0])
self.N = len(matrix[0]) - 1
self.mod = mod
self.count = 1
self.error_str = "unknown error"
def verify_solution(self, solution):
for d in self.matrix:
result = 0
for r in map(lambda x,y:0 if None == y else x*y, d, solution):
result += r
if (result % self.mod) != ((d[-1]) % self.mod):
return 0
return 1
def swap_row(self, ra, rb):
(self.d[ra], self.d[rb]) = (self.d[rb], self.d[ra])
def swap_col(self, ca, cb):
for j in range(self.r):
(self.d[j][ca], self.d[j][cb]) = (self.d[j][cb], self.d[j][ca])
def inv_result(self, r, n):
"""
求解第n个未知数,r已经获得的解。形如:[None,None, ..., n+1, ...]
a*x ≡ b(mod m)
x有解的条件:gcd(a,m) | b。也即a,m互质时一定有解,不互质时,b整除gcd(a,m)也有解,否则无解。
解的格式为:x0+k(m/gcd(a,m)),其中x0为最小整数特解,k为任意整数。
返回[x0, x1, ...xn],其中x0 < x1 < xn < m。
"""
b = self.d[n][self.N]
a = self.d[n][n]
m = self.mod
k = gcd(a, m)
for j in xrange(n + 1, self.N):
b = (b - (self.d[n][j] * r[j] % m)) % m
if 1 == k:
return [mod_inv(a, m) * b % m]
else:
if k == gcd(k, b):
a /= k
b /= k
m /= k
x0 = mod_inv(a, m) * b % m
x = []
for i in xrange(k):
x.append(x0 + m*i)
return x
return None
def find_min_gcd_row_col(self, i, j):
# 查找直接互质的对角线系数
for k in xrange(i, self.r):
for l in xrange(j, self.c - 1):
if(1 == gcd(self.d[k][l], self.mod)):
return [k, l]
def add_min_gcd(a, b, m):
r = [m, 1]
g = gcd(a, b)
if g:
i = a / g
for j in xrange(i):
g = gcd((a + j * b) % m, m)
if g < r[0]:
r[0] = g
r[1] = j
if g == 1:
break
return r
# 查找加乘后GCD最小的对角线系数
# [加乘后的最大公约数,加乘的倍数,要化简的行号,加乘的行号,要化简的列号]
r = [self.mod, 1, i, i + 1, j]
for k in xrange(i, self.r):
for kk in xrange(k+1, self.r):
for l in range(j, self.c - 1):
rr = add_min_gcd(self.d[k][l], self.d[kk][l], self.mod)
if rr[0] < r[0]:
r[0] = rr[0]
r[1] = rr[1]
r[2] = k
r[3] = kk
r[4] = l
pass
if(1 == rr[0]):
break
g = r[0]
n = r[1]
k = r[2]
kk = r[3]
l = r[4]
if n and g < self.mod:
self.d[k] = map(lambda x, y : (x + n*y)%self.mod, self.d[k], self.d[kk])
return [k, l]
def mul_row(self, i, k, j):
a = self.d[k][j]
b = self.d[i][j]
def get_mul(a, b, m):
k = gcd(a, m)
if 1 == k:
return mod_inv(a, m) * b % m
else:
if k == gcd(k, b):
return mod_inv(a/k, m/k) * (b/k) % (m/k)
return None
if b:
mul = get_mul(a, b, self.mod)
if None == mul:
print_matrix(self.d)
assert(mul != None)
self.d[i] = map(lambda x, y : (y - x*mul) % self.mod, self.d[k], self.d[i])
def gauss(self):
"""
返回解向量,唯一解、多解或无解(None)。
例如:[[61, 25, 116, 164], [61, 60, 116, 94], [61, 95, 116, 24], [61, 130, 116, 129], [61, 165, 116, 59]]
"""
self.d = copy.deepcopy(self.matrix)
for i in xrange(self.r):
for j in xrange(self.c):
self.d[i][j] = self.matrix[i][j] % self.mod #把负系数变成正系数
if self.r < self.N:
self.d.extend([[0]*self.c]*(self.N - self.r))
# 化简上三角
index = [x for x in xrange(self.N)]
for i in range(self.N):
tmp = self.find_min_gcd_row_col(i, i)
if(tmp):
self.swap_row(i, tmp[0])
(index[i], index[tmp[1]]) = (index[tmp[1]], index[i])
self.swap_col(i, tmp[1])
else:
self.error_str = "no min"
return None
for k in range(i + 1, self.r):
self.mul_row(k, i, i)
# print_matrix(self.d)
if self.r > self.N:
for i in xrange(self.N, self.r):
for j in xrange(self.c):
if self.d[i][j]:
self.error_str = "r(A) != r(A~)"
return None
# 判断解的数量
for i in xrange(self.N):
self.count *= gcd(self.d[i][i], self.mod)
if self.count > 100:
self.error_str = "solution too more:%d" % (self.count)
return None
# 回代
result = [[None]*self.N]
for i in range(self.N - 1, -1, -1):
new_result = []
for r in result:
ret = self.inv_result(r, i)
if ret:
for rr in ret:
l = r[:]
l[i] = rr
new_result.append(l)
else:
self.error_str = "no inv:i=%d" % (i)
return None
result = new_result
# 调整列变换导致的未知数顺序变化
for i in xrange(len(result)) :
def xchg(a, b):
result[i][b] = a
map(xchg, result[i][:], index)
return result
###########################################################
# test
###########################################################
def print_array(x):
prn = "\t["
for j in x:
if j:
prn += "%3d, " % j
else:
prn += " 0, "
print prn[:-2]+"],"
def print_matrix(x):
print "["
for i in x:
print_array(i)
print "]"
def random_test(times):
import random
for i in xrange(times):
print "\n============== random test %d ==============\n" % i
mod = random.randint(5, 999)
col = random.randint(2, 30)
row = random.randint(2, 30)
solution = map(lambda x : random.randint(0, mod - 1), [xc for xc in xrange(col)])
matrix = []
for y in xrange(row):
array = map(lambda x : random.randint(0, mod), [xc for xc in xrange(col)])
t = 0
for j in map(lambda x,y:0 if None == y else x*y, array, solution):
t += j
array.append(t % mod)
matrix.append(array)
run_test(mod, solution, matrix)
def static_test_ex():
mod=256
solution=[]
# matrix=[
# [2,3,0,11],
# [1,1,0,6],
# [3,0,1,22],
# ]
matrix=[list(map(int,i.split())) for i in open('m2.txt','r').read().splitlines()]
run_test(mod, solution, matrix)
def static_test():
mod = 26
solution = [23,15,19,13,25,17,24,18,11]
matrix = [
[11,12,7,0,0,0,0,0,0],
[0,0,0,11,12,7,0,0,0],
[0,0,0,0,0,0,11,12,7],
[14,18,23,0,0,0,0,0,0],
[0,0,0,14,18,23,0,0,0],
[0,0,0,0,0,0,14,18,23],
[17,5,19,0,0,0,0,0,0],
[0,0,0,17,5,19,0,0,0],
[0,0,0,0,0,0,17,5,19],
]
for i in xrange(len(matrix)):
t = 0
for j in map(lambda x,y:0 if None == y else x*y, matrix[i], solution):
t += j
matrix[i].append(t % mod)
run_test(mod, solution, matrix)
def run_test(mod, solution, matrix):
print "row = %d, col = %d" % (len(matrix), len(matrix[0])-1)
print "mod = %d" % (mod)
print "solution =", solution
print "matrix ="
print_matrix(matrix)
g = GaussMatrix(matrix, mod)
ret = g.gauss()
if not ret:
print "error:"
print_matrix(g.d)
print "error_str:", g.error_str
else:
print "times:", g.count
print "result:"
print_matrix(ret)
def DSA_comK():
"""
# DSA两次签名使用相同的随机数k可导致私钥x泄漏
# p:L bits长的素数。L是64的倍数,范围是512到1024;
# q:p - 1的160bits的素因子;
# g:g = h^((p-1)/q) mod p,h满足h < p - 1, h^((p-1)/q) mod p > 1;
# x:x < q,x为私钥 ;
# y:y = g^x mod p ,( p, q, g, y )为公钥;
# r = ( g^k mod p ) mod q
# s = ( k^(-1) (HASH(m) + xr)) mod q
# 签名结果是( m, r, s )
"""
import hashlib
p = 0x8c286991e30fd5341b7832ce9fe869c0a73cf79303c2959ab677d980237abf7ecf853015c9a086c4330252043525a4fa60c64397421caa290225d6bc6ec6b122cd1da4bba1b13f51daca8b210156a28a0c3dbf17a7826f738fdfa87b22d7df990908c13dbd0a1709bbbab5f816ddba6c8166ef5696414538f6780fdce987552b
g = 0x49874582cd9af51d6f554c8fae68588c383272c357878d7f4079c6edcda3bcbf1f2cbada3f7d541a5b1ae7f046199f8f51d72db60a2601bd3375a3b48d7a3c9a0c0e4e8a0680f7fb98a8610f042e10340d2453d3c811088e48c5d6dd834eaa5509daeb430bcd9de8aabc239d698a655004e3f0a2ee456ffe9331c5f32c66f90d
q = 0x843437e860962d85d17d6ee4dd2c43bc4aec07a5
m1 = 0x3132333435363738
r1 = 0x4d91a491d95e4eef4196a583cd282ca0e625f36d
s1 = 0x3639b47678abf7545397fc9a1af108537fd1dfac
m2 = 0x49276c6c206265206261636b2e
r2 = 0x4d91a491d95e4eef4196a583cd282ca0e625f36d
s2 = 0x314c044409a94f4961340212b42ade005fb27b0a
# M1 = mem2int(hashlib.sha1(int2mem(m1)).digest())
M1 = int(hashlib.sha1('3132333435363738'.decode('hex')).hexdigest(), 16)
# M2 = mem2int(hashlib.sha1(int2mem(m2)).digest())
M2 = int(hashlib.sha1('49276c6c206265206261636b2e'.decode("hex")).hexdigest(), 16)
matrix_c = [
[0x3639b47678abf7545397fc9a1af108537fd1dfac, -0x4d91a491d95e4eef4196a583cd282ca0e625f36d, M1],
[0x314c044409a94f4961340212b42ade005fb27b0a, -0x4d91a491d95e4eef4196a583cd282ca0e625f36d, M2]
]
print "mod = %d" % (q)
print "matrix ="
print_matrix(matrix_c)
Gauss = GaussMatrix(matrix_c, q)
ret = Gauss.gauss()
if not ret:
print "error:"
print_matrix(Gauss.d)
print "error_str:", Gauss.error_str
else:
k = ret[0][0]
x = ret[0][1]
print "k: %x" % (k)
print "x: %x" % (x)
print Gauss.verify_solution(ret[0])
exit(0)
def qwq(matrix):
mod=256
solution=[]
run_test(mod, solution, matrix)
# if __name__ == "__main__":
# DSA_comK()
# static_test()
# static_test_ex()
# random_test(1)
# exit(0)
con=[26,28,38,39,40,50,52,79,80,81,91,103,105,115,116,117]
for kk in xrange(144):
if kk in con:
continue
for k in xrange(kk):
if k in con:
continue
matrix=[list(map(int,i.split())) for i in open('n2.txt','r').read().splitlines()]
_p=list()
_p.append(int(k))
if k//12>=1:
_p.append(int(k-12))
if k//12<=10:
_p.append(int(k+12))
if k//12>=2:
_p.append(int(k-24))
if k//12<=9:
_p.append(int(k+24))
if k%12>=1:
_p.append(int(k-1))
if k%12<=10:
_p.append(int(k+1))
if k%12>=2:
_p.append(int(k-2))
if k%12<=9:
_p.append(int(k+2))
_p.append(int(kk))
if kk//12>=1:
_p.append(int(kk-12))
if kk//12<=10:
_p.append(int(kk+12))
if kk//12>=2:
_p.append(int(kk-24))
if kk//12<=9:
_p.append(int(kk+24))
if kk%12>=1:
_p.append(int(kk-1))
if kk%12<=10:
_p.append(int(kk+1))
if kk%12>=2:
_p.append(int(kk-2))
if kk%12<=9:
_p.append(int(kk+2))
for i in sorted(set(_p))[::-1]:
matrix.pop(i)
qwq(matrix)
# input()
| 27.34556
| 266
| 0.454712
| 7,628
| 0.5054
| 0
| 0
| 0
| 0
| 0
| 0
| 4,027
| 0.266812
|
a2a4ec18f82420451b7a78afd24b5244e8356daf
| 451
|
py
|
Python
|
main.py
|
attakei/lantis-web-radio
|
febf5fe156da4bd60ef9d1d09fe57a62c435a380
|
[
"MIT"
] | null | null | null |
main.py
|
attakei/lantis-web-radio
|
febf5fe156da4bd60ef9d1d09fe57a62c435a380
|
[
"MIT"
] | null | null | null |
main.py
|
attakei/lantis-web-radio
|
febf5fe156da4bd60ef9d1d09fe57a62c435a380
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf8 -*-
import sys
import argparse
from lantis.webradio.commands import bind_subparsers
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers()
bind_subparsers(subparsers)
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
args = parser.parse_args()
command = args.Command(args)
return command.run()
if __name__ == '__main__':
ret = main()
sys.exit(ret)
| 18.791667
| 52
| 0.687361
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 52
| 0.115299
|
a2a5a42b6d09e31e44930e2b97a11e3ac6f3bf02
| 6,304
|
py
|
Python
|
vnpy/app/portfolio_strategy/strategies/daily_amplitude_2_days_volitility_strategy.py
|
franklili3/vnpy
|
4d710553302eb3587e4acb2ff8ce151660fb9c17
|
[
"MIT"
] | null | null | null |
vnpy/app/portfolio_strategy/strategies/daily_amplitude_2_days_volitility_strategy.py
|
franklili3/vnpy
|
4d710553302eb3587e4acb2ff8ce151660fb9c17
|
[
"MIT"
] | null | null | null |
vnpy/app/portfolio_strategy/strategies/daily_amplitude_2_days_volitility_strategy.py
|
franklili3/vnpy
|
4d710553302eb3587e4acb2ff8ce151660fb9c17
|
[
"MIT"
] | null | null | null |
from typing import List, Dict
from datetime import datetime
import numpy as np
from vnpy.app.portfolio_strategy import StrategyTemplate, StrategyEngine
from vnpy.trader.utility import BarGenerator, ArrayManager
from vnpy.trader.object import BarData
from vnpy.trader.constant import Interval
class AmplitudeVolitilityStrategy(StrategyTemplate):
""""""
author = "Frank li"
volitility_window = 2
stocks_number = 1
rebalance_days = 1
#leg1_symbol = ""
parameters = [
"volitility_window",
"stocks_number",
"rebalance_days"
]
#variables = [
#"leg1_symbol",
#]
def __init__(
self,
strategy_engine: StrategyEngine,
strategy_name: str,
vt_symbols: List[str],
setting: dict
):
""""""
super().__init__(strategy_engine, strategy_name, vt_symbols, setting)
self.bgs: Dict[str, BarGenerator] = {}
self.targets: Dict[str, int] = {}
self.last_tick_time: datetime = None
self.amplitude_data: Dict[str, np.array] = {}
self.volitility_data: Dict[str, int] = {}
self.sorted_volitility_data: list[np.array] = []
self.selected_symbols: list[str] = []
self.bar_count: int = 0
for vt_symbol in self.vt_symbols:
self.amplitude_data[vt_symbol] = np.zeros(volitility_window)
self.volitility_data[vt_symbol] = np.zeros(1)
# Obtain contract info
#self.leg1_symbol, self.leg2_symbol = vt_symbols
def on_bar(bar: BarData):
""""""
pass
for vt_symbol in self.vt_symbols:
self.targets[vt_symbol] = 0
self.bgs[vt_symbol] = BarGenerator(on_bar, Interval.Day)
def on_init(self):
"""
Callback when strategy is inited.
"""
self.write_log("策略初始化")
self.load_bars(2)
def on_start(self):
"""
Callback when strategy is started.
"""
self.write_log("策略启动")
def on_stop(self):
"""
Callback when strategy is stopped.
"""
self.write_log("策略停止")
def on_tick(self, tick: TickData):
"""
Callback of new tick data update.
"""
if (
self.last_tick_time
and self.last_tick_time.day != tick.datetime.day
):
bars = {}
for vt_symbol, bg in self.bgs.items():
bars[vt_symbol] = bg.generate()
self.on_bars(bars)
bg: BarGenerator = self.bgs[tick.vt_symbol]
bg.update_tick(tick)
self.last_tick_time = tick.datetime
def on_bars(self, bars: Dict[str, BarData]):
""""""
self.cancel_all()
self.bar_count += 1
# Return if one leg data is missing
#if self.leg1_symbol not in bars or self.leg2_symbol not in bars:
# return
# Calculate current signal
bar_0 = bars[0]
#leg2_bar = bars[self.leg2_symbol]
for vt_symbol in self.vt_symbols:
self.amplitude_data[vt_symbol].append((bars[vt_symbol].high_price -\
bars[vt_symbol].low_price) / bars[vt_symbol].open_price)
# Filter time only run on
if self.bar_count % rebalance_days != 0:
return
# Update to data array
for vt_symbol in self.vt_symbols:
if (self.amplitude_data[vt_symbol]) < volitility_window:
self.amplitude_data[vt_symbol].append((bars[vt_symbol].high_price -\
bars[vt_symbol].low_price) / bars[vt_symbol].open_price)
else:
self.amplitude_data[vt_symbol][:-1] = self.amplitude_data[vt_symbol][1:]
self.amplitude_data[vt_symbol].append((bars[vt_symbol].high_price -\
bars[vt_symbol].low_price) / bars[vt_symbol].open_price)
self.volitility_data[vt_symbol] = self.amplitude_data[vt_symbol].std()
#self.current_spread = (
# leg1_bar.close_price * self.leg1_ratio - leg2_bar.close_price * self.leg2_ratio
# select stock name
self.sorted_volitility_data = sorted(self.volitility_data.items(), key=lambda\
item: item[1], reverse=True)
i = 0
for item in self.sorted_volitility_data.items():
if i < stocks_number:
self.selected_symbols.append(item[0])
print('self.selected_symbols:', self.selected_symbols)
'''
self.spread_data[:-1] = self.spread_data[1:]
self.spread_data[-1] = self.current_spread
self.spread_count += 1
if self.spread_count <= self.boll_window:
return
# Calculate boll value
buf: np.array = self.spread_data[-self.boll_window:]
std = buf.std()
self.boll_mid = buf.mean()
self.boll_up = self.boll_mid + self.boll_dev * std
self.boll_down = self.boll_mid - self.boll_dev * std
'''
# Calculate new target position
# 等权重
weight = 1 / len(self.selected_symbols)
# 目前持仓列表
stock_hold_now = [equity.symbol for equity in self.get_pos()]
print('stock_hold_now:', stock_hold_now)
# 需要买入的股票列表
stock_to_buy = [i for i not in stock_hold_now if i in self.selected_symbols]
print('stock_to_buy:', stock_to_buy)
# 继续持有股票列表
no_need_to_sell = [i for i in stock_hold_now if i in self.selected_symbols]
print('no_need_to_sell:', no_need_to_sell)
# 卖出股票列表
stock_to_sell = [i for i in stock_hold_now if i not in no_need_to_sell]
print('stock_to_sell:', stock_to_sell)
# 执行卖出
for stock in stock_to_sell:
current_pos = self.get_pos(stock)
volume = current_pos
bar = bars[stock]
price = bar.close_price + self.price_add
self.sell(stock, price, volume)
# 如果当天没有买入就返回
if len(stock_to_buy) == 0:
return
# 执行买入
for s_t_b in stock_to_buy:
bar = bars[stock]
price = bar.close_price + self.price_add
volume = 1
self.buy(s_t_b, volume)
self.put_event()
| 32
| 92
| 0.584391
| 6,136
| 0.95398
| 0
| 0
| 0
| 0
| 0
| 0
| 1,608
| 0.25
|
a2a878b865e7dd158c1f4d9b527b4dc267ffa7f3
| 7,065
|
py
|
Python
|
old_game/hotmaps.py
|
jwvhewitt/dmeternal
|
bb09f2d497daf9b40dd8cfee10c55be55fb7c3cb
|
[
"Apache-2.0"
] | 53
|
2015-07-03T21:25:36.000Z
|
2022-02-18T23:08:38.000Z
|
old_game/hotmaps.py
|
jwvhewitt/dmeternal
|
bb09f2d497daf9b40dd8cfee10c55be55fb7c3cb
|
[
"Apache-2.0"
] | 5
|
2015-07-03T21:27:12.000Z
|
2016-12-08T14:40:38.000Z
|
old_game/hotmaps.py
|
jwvhewitt/dmeternal
|
bb09f2d497daf9b40dd8cfee10c55be55fb7c3cb
|
[
"Apache-2.0"
] | 14
|
2016-02-02T06:49:51.000Z
|
2022-02-24T13:24:35.000Z
|
# Pathfinding algorithm.
import pygame
import random
class HotTile( object ):
def __init__( self ):
self.heat = 9999
self.cost = 0
self.block = False
class HotMap( object ):
DELTA8 = [ (-1,-1), (0,-1), (1,-1), (-1,0), (1,0), (-1,1), (0,1), (1,1) ]
EXPENSIVE = 9999
def __init__( self, scene, hot_points, obstacles=set(), expensive=set(), limits=None, avoid_models=False ):
"""Calculate this hotmap given scene and set of hot points."""
# Obstacles block movement.
# Expensive tiles are avoided, if possible.
self.scene = scene
if avoid_models:
obstacles = self.list_model_positions().union( obstacles )
self.obstacles = obstacles
self.expensive = expensive
self.map = [[ int(self.EXPENSIVE)
for y in range(scene.height) ]
for x in range(scene.width) ]
for p in hot_points:
if len( p ) < 3:
self.map[p[0]][p[1]] = 0
else:
self.map[p[0]][p[1]] = min( p[2], self.map[p[0]][p[1]] )
if limits:
self.lo_x = max( limits.x, 1 )
self.hi_x = min( limits.x + limits.width + 1, scene.width - 1 )
self.lo_y = max( limits.y, 1 )
self.hi_y = min( limits.y + limits.height + 1, scene.height - 1 )
else:
self.lo_x,self.hi_x,self.lo_y,self.hi_y = 1, scene.width-1, 1, scene.height-1
self.process_map( limits )
def process_map( self, limits ):
# Iterate through each of the tiles,
flag = True
while flag:
flag = False
for y in range( self.lo_y, self.hi_y ):
for x in range( self.lo_x, self.hi_x ):
p = (x,y)
if not self.blocks_movement( x, y ):
dh = 2 + self.map[x-1][y]
dv = 2 + self.map[x][y-1]
dd = 3 + self.map[x-1][y-1]
dp = 3 + self.map[x+1][y-1]
dp = min(dh,dv,dd,dp)
if p in self.expensive:
dp += 16
if dp < self.map[x][y]:
self.map[x][y] = dp
flag = True
for y in range( self.hi_y-1, self.lo_y-1, -1 ):
for x in range( self.hi_x-1, self.lo_x-1, -1 ):
if not self.blocks_movement( x, y ):
dh = 2 + self.map[x+1][y]
dv = 2 + self.map[x][y+1]
dd = 3 + self.map[x+1][y+1]
dp = 3 + self.map[x-1][y+1]
dp = min(dh,dv,dd,dp)
if p in self.expensive:
dp += 16
if dp < self.map[x][y]:
self.map[x][y] = dp
flag = True
def blocks_movement( self, x, y ):
return self.scene.map[x][y].blocks_walking() or (x,y) in self.obstacles
def list_model_positions( self ):
mylist = set()
for m in self.scene.contents:
if self.scene.is_model(m):
mylist.add( m.pos )
return mylist
def downhill_dir( self, pos ):
"""Return a dx,dy tuple showing the lower heat value."""
best_d = None
random.shuffle( self.DELTA8 )
heat = self.map[pos[0]][pos[1]]
for d in self.DELTA8:
x2 = d[0] + pos[0]
y2 = d[1] + pos[1]
if self.scene.on_the_map(x2,y2) and ( self.map[x2][y2] < heat ):
heat = self.map[x2][y2]
best_d = d
return best_d
def clever_downhill_dir( self, exp, pos ):
"""Return the best direction to move in, avoiding models."""
best_d = None
random.shuffle( self.DELTA8 )
heat = self.map[pos[0]][pos[1]]
for d in self.DELTA8:
x2 = d[0] + pos[0]
y2 = d[1] + pos[1]
if exp.scene.on_the_map(x2,y2) and ( self.map[x2][y2] < heat ):
target = exp.scene.get_character_at_spot( (x2,y2) )
if not target:
heat = self.map[x2][y2]
best_d = d
return best_d
def mix( self, other_map, amount ):
for y in range( self.lo_y, self.hi_y ):
for x in range( self.lo_x, self.hi_x ):
self.map[x][y] += other_map.map[x][y] * amount
def show( self, x0, y0 ):
for y in range( y0-2,y0+3):
vals = list()
for x in range( x0-2,x0+3):
if self.scene.on_the_map(x,y):
vals.append( '{:<8}'.format( self.map[x][y] ) )
else:
vals.append( "XXX" )
print(" ".join( vals ))
class AvoidMap( HotMap ):
def __init__( self, scene, hot_points, obstacles=set(), expensive=set(), limits=None, avoid_models=False ):
"""Calculate this hotmap given scene and set of hot points."""
super( AvoidMap, self ).__init__( scene, hot_points, obstacles, expensive=expensive, avoid_models=avoid_models, limits=limits )
for y in range( self.lo_y, self.hi_y ):
for x in range( self.lo_x, self.hi_x ):
if self.map[x][y] < self.EXPENSIVE:
self.map[x][y] *= -1.2
self.process_map( limits )
class PointMap( HotMap ):
def __init__( self, scene, dest, avoid_models = False, expensive=set(), limits=None ):
myset = set()
myset.add( dest )
super( PointMap, self ).__init__( scene, myset, expensive=expensive, avoid_models=avoid_models, limits=limits )
class MoveMap( HotMap ):
"""Calculates movement costs to different tiles. Only calcs as far as necessary."""
def __init__( self, scene, chara, avoid_models = False ):
myset = set()
myset.add( chara.pos )
reach = ( chara.get_move() + 1 ) // 2
super( MoveMap, self ).__init__( scene, myset, limits=pygame.Rect(chara.pos[0]-reach, chara.pos[1]-reach, reach*2+1, reach*2+1 ), avoid_models=avoid_models )
if __name__=='__main__':
import timeit
from . import maps
import random
import pygame
myscene = maps.Scene( 100 , 100 )
for x in range( 5, myscene.width ):
for y in range( 5, myscene.height ):
if random.randint(1,3) == 1:
myscene.map[x][y].wall = maps.BASIC_WALL
myset = set()
myset.add( (23,23) )
class OldWay( object ):
def __init__( self, m ):
self.m = m
def __call__(self):
HotMap( self.m, myset )
class NewWay( object ):
def __init__( self, m ):
self.m = m
self.myrect = pygame.Rect( 20, 20, 5, 5 )
def __call__(self):
HotMap( self.m, myset, limits=self.myrect )
t1 = timeit.Timer( OldWay( myscene ) )
t2 = timeit.Timer( NewWay( myscene ) )
print(t1.timeit(100))
print(t2.timeit(100))
| 34.802956
| 165
| 0.501062
| 6,470
| 0.915782
| 0
| 0
| 0
| 0
| 0
| 0
| 479
| 0.067799
|
a2a927903851fa866273d2e9c394ad0c65d802fb
| 960
|
py
|
Python
|
upload_menu.py
|
jaypee-f/webhook
|
4fc8e47c6dd7fd3c90b4db076bfd075ffdd44054
|
[
"MIT"
] | null | null | null |
upload_menu.py
|
jaypee-f/webhook
|
4fc8e47c6dd7fd3c90b4db076bfd075ffdd44054
|
[
"MIT"
] | null | null | null |
upload_menu.py
|
jaypee-f/webhook
|
4fc8e47c6dd7fd3c90b4db076bfd075ffdd44054
|
[
"MIT"
] | null | null | null |
import json
import jsonpickle
from pprint import pprint
class Object(object):
pass
prods = Object()
prods.accountId="5c76ae99c6489f0001bc6b0a"
prods.locationId="5db938536d49b300017efcc3"
prods.products=[]
prods.categories=[]
with open ('pl.json', 'r') as f:
products_dict = json.load(f)
for item in products_dict["models"]:
prod = Object()
prod.productType=1
prod.plu=item["id"]
prod.price=item["price"]
prod.posProductId=item["id"]
prod.name=item["name"]
prod.posProductCategoryId=item["parentId"]
prod.imageUrl=""
prod.description=item["description"]
prod.deliveryTax=20000
prod.takeawayTax=20000
prods.products.append(prod)
with open ('cat.json', 'r') as f:
category_dict = json.load(f)
for item in category_dict["models"]:
cat = Object()
cat.name=item["name"]
cat.posCategoryId=item["id"]
cat.imageUrl:""
prods.categories.append(cat)
print(jsonpickle.dumps(prods))
| 20.869565
| 46
| 0.691667
| 30
| 0.03125
| 0
| 0
| 0
| 0
| 0
| 0
| 151
| 0.157292
|
a2a95220c05c2685607d88d70a06cedd80129fc1
| 2,489
|
py
|
Python
|
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | 1
|
2016-09-18T16:40:27.000Z
|
2016-09-18T16:40:27.000Z
|
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | null | null | null |
CareerTinderServer/CareerTinder/migrations/0002_auto_20160918_0011.py
|
sarojaerabelli/HVGS
|
86ec3d2de496540ca439c40f4a0c58c47aa181cf
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-18 04:11
from __future__ import unicode_literals
import CareerTinder.listfield
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CareerTinder', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='hiree',
name='date_of_birth',
),
migrations.RemoveField(
model_name='hiree',
name='name',
),
migrations.AddField(
model_name='hiree',
name='college',
field=models.CharField(default='mit', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='degree',
field=models.CharField(choices=[(b'BA', b"Bachelor's"), (b'MA', b"Master's"), (b'DO', b'Doctorate')], default='ba', max_length=10),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='first_name',
field=models.CharField(default='john', max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='last_name',
field=models.CharField(default='doe', max_length=50),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='major',
field=models.CharField(default='cs', max_length=100),
preserve_default=False,
),
migrations.AddField(
model_name='hiree',
name='year',
field=models.IntegerField(default='2019'),
preserve_default=False,
),
migrations.AddField(
model_name='recruiter',
name='hirees',
field=CareerTinder.listfield.ListField(default=b''),
),
migrations.AlterField(
model_name='company',
name='logo',
field=models.ImageField(upload_to=b'media/logos/'),
),
migrations.AlterField(
model_name='hiree',
name='face_picture',
field=models.ImageField(upload_to=b'media/faces/'),
),
migrations.AlterField(
model_name='hiree',
name='resume_picture',
field=models.FileField(upload_to=b'media/resumes/'),
),
]
| 30.728395
| 143
| 0.546002
| 2,301
| 0.924468
| 0
| 0
| 0
| 0
| 0
| 0
| 438
| 0.175974
|
a2a9aa208a483f748111656782f9fb6afead659b
| 910
|
py
|
Python
|
tests/test_client.py
|
nyush-se-spring2021-forum/OurTieba
|
e7c5d75686e9cfda35050b8e40166d8a1d6ae83d
|
[
"MIT"
] | null | null | null |
tests/test_client.py
|
nyush-se-spring2021-forum/OurTieba
|
e7c5d75686e9cfda35050b8e40166d8a1d6ae83d
|
[
"MIT"
] | 20
|
2021-04-08T11:16:51.000Z
|
2021-05-21T16:17:51.000Z
|
tests/test_client.py
|
nyush-se-spring2021-forum/OurTieba
|
e7c5d75686e9cfda35050b8e40166d8a1d6ae83d
|
[
"MIT"
] | null | null | null |
class TestClient:
"""
Test before_request and after_request decorators in __init__.py.
"""
def test_1(self, client): # disallowed methods
res = client.put("/")
assert res.status_code == 405
assert b"Method Not Allowed" in res.content
res = client.options("/api/post/add")
assert res.status_code == 405
assert b"Method Not Allowed" in res.content
res = client.delete("/notifications")
assert res.status_code == 405
assert b"Method Not Allowed" in res.content
def test_2(self, client): # empty/fake user agent
res = client.get("/", headers={"User-Agent": ""})
assert res.status_code == 403
assert b"No Scrappers!" in res.content
res = client.get("/board/2", headers={"User-Agent": "python/3.8"})
assert res.status_code == 403
assert b"No Scrappers!" in res.content
| 33.703704
| 74
| 0.615385
| 909
| 0.998901
| 0
| 0
| 0
| 0
| 0
| 0
| 303
| 0.332967
|
a2aa47ea240a66801a3fa533dadd5d9026710eb3
| 4,259
|
py
|
Python
|
cadence/activity_loop.py
|
mfateev/cadence-python
|
f8e6e2eb3a010dcd1df76a2e4e59afbb8c11f1db
|
[
"MIT"
] | null | null | null |
cadence/activity_loop.py
|
mfateev/cadence-python
|
f8e6e2eb3a010dcd1df76a2e4e59afbb8c11f1db
|
[
"MIT"
] | null | null | null |
cadence/activity_loop.py
|
mfateev/cadence-python
|
f8e6e2eb3a010dcd1df76a2e4e59afbb8c11f1db
|
[
"MIT"
] | null | null | null |
import datetime
import logging
import json
from cadence.activity import ActivityContext
from cadence.cadence_types import PollForActivityTaskRequest, TaskListMetadata, TaskList, PollForActivityTaskResponse, \
RespondActivityTaskCompletedRequest, RespondActivityTaskFailedRequest
from cadence.conversions import json_to_args
from cadence.workflowservice import WorkflowService
from cadence.worker import Worker
logger = logging.getLogger(__name__)
def activity_task_loop(worker: Worker):
service = WorkflowService.create(worker.host, worker.port)
logger.info(f"Activity task worker started: {WorkflowService.get_identity()}")
try:
while True:
if worker.is_stop_requested():
return
try:
polling_start = datetime.datetime.now()
polling_request = PollForActivityTaskRequest()
polling_request.task_list_metadata = TaskListMetadata()
polling_request.task_list_metadata.max_tasks_per_second = 200000
polling_request.domain = worker.domain
polling_request.identity = WorkflowService.get_identity()
polling_request.task_list = TaskList()
polling_request.task_list.name = worker.task_list
task: PollForActivityTaskResponse
task, err = service.poll_for_activity_task(polling_request)
polling_end = datetime.datetime.now()
logger.debug("PollForActivityTask: %dms", (polling_end - polling_start).total_seconds() * 1000)
except Exception as ex:
logger.error("PollForActivityTask error: %s", ex)
continue
if err:
logger.error("PollForActivityTask failed: %s", err)
continue
if not task.task_token:
logger.debug("PollForActivityTask has no task_token (expected): %s", task)
continue
args = json_to_args(task.input)
logger.info(f"Request for activity: {task.activity_type.name}")
fn = worker.activities.get(task.activity_type.name)
if not fn:
logger.error("Activity type not found: " + task.activity_type.name)
continue
process_start = datetime.datetime.now()
activity_context = ActivityContext()
activity_context.task_token = task.task_token
activity_context.workflow_execution = task.workflow_execution
activity_context.domain = worker.domain
try:
ActivityContext.set(activity_context)
ret = fn(*args)
ActivityContext.set(None)
respond = RespondActivityTaskCompletedRequest()
respond.task_token = task.task_token
respond.result = json.dumps(ret)
respond.identity = WorkflowService.get_identity()
_, error = service.respond_activity_task_completed(respond)
if error:
logger.error("Error invoking RespondActivityTaskCompleted: %s", error)
logger.info(f"Activity {task.activity_type.name}({str(args)[1:-1]}) returned {respond.result}")
except Exception as ex:
logger.error(f"Activity {task.activity_type.name} failed: {type(ex).__name__}({ex})", exc_info=1)
respond: RespondActivityTaskFailedRequest = RespondActivityTaskFailedRequest()
respond.task_token = task.task_token
respond.identity = WorkflowService.get_identity()
respond.details = json.dumps({
"detailMessage": f"Python error: {type(ex).__name__}({ex})",
"class": "java.lang.Exception"
})
respond.reason = "java.lang.Exception"
_, error = service.respond_activity_task_failed(respond)
if error:
logger.error("Error invoking RespondActivityTaskFailed: %s", error)
process_end = datetime.datetime.now()
logger.info("Process ActivityTask: %dms", (process_end - process_start).total_seconds() * 1000)
finally:
worker.notify_thread_stopped()
| 48.954023
| 120
| 0.630195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 668
| 0.156844
|
a2aacc4cece05e2f823b764750ce6c88673d5b7a
| 3,666
|
py
|
Python
|
support/fetch_validators_load.py
|
sonofmom/ton-zabbix-scripts
|
b43471d058873c5ba78a92fa79d334380df5f6fc
|
[
"MIT"
] | null | null | null |
support/fetch_validators_load.py
|
sonofmom/ton-zabbix-scripts
|
b43471d058873c5ba78a92fa79d334380df5f6fc
|
[
"MIT"
] | null | null | null |
support/fetch_validators_load.py
|
sonofmom/ton-zabbix-scripts
|
b43471d058873c5ba78a92fa79d334380df5f6fc
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
import sys
import os
sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
import argparse
import datetime
import time
import requests
import Libraries.arguments as ar
import Classes.AppConfig as AppConfig
import Classes.LiteClient as LiteClient
import Classes.TonNetwork as TonNetwork
import json
def run():
description = 'Fetches validators load statistics from blockchain, maps it to ADNL and returns JSON'
parser = argparse.ArgumentParser(formatter_class = argparse.RawDescriptionHelpFormatter,
description = description)
ar.set_standard_args(parser)
parser.add_argument('-o', '--output',
required=False,
type=str,
default=None,
dest='output',
action='store',
help='Write output to indicated file instead of stdout')
parser.add_argument('period', nargs=1, help='Max period to fetch, in seconds - REQUIRED')
cfg = AppConfig.AppConfig(parser.parse_args())
lc = LiteClient.LiteClient(cfg.args, cfg.config["liteClient"], cfg.log)
tn = TonNetwork.TonNetwork(lc, cfg.log)
start_time = datetime.datetime.now()
cfg.log.log(os.path.basename(__file__), 3, 'Fetching validation cycles list from elections server')
try:
rs = requests.get("{}/getValidationCycles?return_participants=true&offset=0&limit=2".format(cfg.config["elections"]["url"])).json()
except Exception as e:
cfg.log.log(os.path.basename(__file__), 1, "Could not perform elections request: " + str(e))
sys.exit(1)
cfg.log.log(os.path.basename(__file__), 3, "Looking for active cycle")
dt = datetime.datetime.now(datetime.timezone.utc)
now = dt.replace(tzinfo=datetime.timezone.utc).timestamp()
vdata = None
for record in rs:
if record["cycle_info"]["utime_since"] < now and record["cycle_info"]["utime_until"] >= now:
vdata = record
break
if not vdata:
cfg.log.log(os.path.basename(__file__), 1, "Could not find active validation cycle")
sys.exit(1)
cfg.log.log(os.path.basename(__file__), 3, 'Calculating maximum possible period for current cycle')
t_now = int(time.time())
t_max = t_now - vdata["cycle_info"]["utime_since"]
t_period = int(cfg.args.period[0])
cfg.log.log(os.path.basename(__file__), 3, '{} - {} = {} seconds'.format(t_now,vdata["cycle_info"]["utime_since"],t_max))
if t_period > t_max:
t_period = t_max
cfg.log.log(os.path.basename(__file__), 3, 'Using period of {} seconds'.format(t_period))
cfg.log.log(os.path.basename(__file__), 3, 'Fetching validators load from blockchain')
result = tn.get_validators_load((t_now-t_period),t_now)
cfg.log.log(os.path.basename(__file__), 3, 'Mapping ADNLs to PUBKEYS for result')
for i in range(len(result)):
record = next((chunk for chunk in vdata["cycle_info"]["validators"] if chunk["pubkey"] == result[i]["pubkey"]),
None)
if record:
result[i]["adnl_addr"] = record["adnl_addr"]
runtime = (datetime.datetime.now() - start_time)
if not result:
cfg.log.log(os.path.basename(__file__), 1, 'Could not retrieve information.')
sys.exit(1)
elif cfg.args.get_time:
print(runtime.microseconds/1000)
else:
if cfg.args.output:
f = open(cfg.args.output, "w")
f.write(json.dumps(result))
f.close()
else:
print(json.dumps(result))
if __name__ == '__main__':
run()
| 39.847826
| 139
| 0.642117
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 886
| 0.24168
|
a2aacc4feda333eaff912d30b183a58db7aa86b3
| 8,324
|
py
|
Python
|
packnet_sfm/loggers/wandb_logger.py
|
asmith9455/packnet-sfm
|
60a034ac42d2e72314d002b27efcdfc769dbc3fc
|
[
"MIT"
] | 982
|
2020-02-27T02:48:29.000Z
|
2022-03-31T12:33:50.000Z
|
packnet_sfm/loggers/wandb_logger.py
|
asmith9455/packnet-sfm
|
60a034ac42d2e72314d002b27efcdfc769dbc3fc
|
[
"MIT"
] | 205
|
2020-03-24T06:44:30.000Z
|
2022-03-30T09:13:14.000Z
|
packnet_sfm/loggers/wandb_logger.py
|
asmith9455/packnet-sfm
|
60a034ac42d2e72314d002b27efcdfc769dbc3fc
|
[
"MIT"
] | 253
|
2020-01-25T16:14:45.000Z
|
2022-03-30T05:57:40.000Z
|
# Copyright 2020 Toyota Research Institute. All rights reserved.
# Adapted from Pytorch-Lightning
# https://github.com/PyTorchLightning/pytorch-lightning/blob/master/pytorch_lightning/loggers/wandb.py
from argparse import Namespace
from collections import OrderedDict
import numpy as np
import torch.nn as nn
import wandb
from wandb.wandb_run import Run
from packnet_sfm.utils.depth import viz_inv_depth
from packnet_sfm.utils.logging import prepare_dataset_prefix
from packnet_sfm.utils.types import is_dict, is_tensor
class WandbLogger:
"""
Wandb logger class to monitor training.
Parameters
----------
name : str
Run name (if empty, uses a fancy Wandb name, highly recommended)
dir : str
Folder where wandb information is stored
id : str
ID for the run
anonymous : bool
Anonymous mode
version : str
Run version
project : str
Wandb project where the run will live
tags : list of str
List of tags to append to the run
log_model : bool
Log the model to wandb or not
experiment : wandb
Wandb experiment
entity : str
Wandb entity
"""
def __init__(self,
name=None, dir=None, id=None, anonymous=False,
version=None, project=None, entity=None,
tags=None, log_model=False, experiment=None
):
super().__init__()
self._name = name
self._dir = dir
self._anonymous = 'allow' if anonymous else None
self._id = version or id
self._tags = tags
self._project = project
self._entity = entity
self._log_model = log_model
self._experiment = experiment if experiment else self.create_experiment()
self._metrics = OrderedDict()
def __getstate__(self):
"""Get the current logger state"""
state = self.__dict__.copy()
state['_id'] = self._experiment.id if self._experiment is not None else None
state['_experiment'] = None
return state
def create_experiment(self):
"""Creates and returns a new experiment"""
experiment = wandb.init(
name=self._name, dir=self._dir, project=self._project,
anonymous=self._anonymous, reinit=True, id=self._id,
resume='allow', tags=self._tags, entity=self._entity
)
wandb.run.save()
return experiment
def watch(self, model: nn.Module, log: str = 'gradients', log_freq: int = 100):
"""Watch training parameters."""
self.experiment.watch(model, log=log, log_freq=log_freq)
@property
def experiment(self) -> Run:
"""Returns the experiment (creates a new if it doesn't exist)."""
if self._experiment is None:
self._experiment = self.create_experiment()
return self._experiment
@property
def version(self) -> str:
"""Returns experiment version."""
return self._experiment.id if self._experiment else None
@property
def name(self) -> str:
"""Returns experiment name."""
name = self._experiment.project_name() if self._experiment else None
return name
@property
def run_name(self) -> str:
"""Returns run name."""
return wandb.run.name if self._experiment else None
@property
def run_url(self) -> str:
"""Returns run URL."""
return 'https://app.wandb.ai/{}/{}/runs/{}'.format(
wandb.run.entity, wandb.run.project, wandb.run.id) if self._experiment else None
@staticmethod
def _convert_params(params):
if isinstance(params, Namespace):
params = vars(params)
if params is None:
params = {}
return params
def log_config(self, params):
"""Logs model configuration."""
params = self._convert_params(params)
self.experiment.config.update(params, allow_val_change=True)
def log_metrics(self, metrics):
"""Logs training metrics."""
self._metrics.update(metrics)
if 'global_step' in metrics:
self.experiment.log(self._metrics)
self._metrics.clear()
def log_images(self, func, mode, batch, output,
args, dataset, world_size, config):
"""
Adds images to metrics for later logging.
Parameters
----------
func : Function
Function used to process the image before logging
mode : str {"train", "val"}
Training stage where the images come from (serve as prefix for logging)
batch : dict
Data batch
output : dict
Model output
args : tuple
Step arguments
dataset : CfgNode
Dataset configuration
world_size : int
Number of GPUs, used to get logging samples at consistent intervals
config : CfgNode
Model configuration
"""
dataset_idx = 0 if len(args) == 1 else args[1]
prefix = prepare_dataset_prefix(config, dataset_idx)
interval = len(dataset[dataset_idx]) // world_size // config.num_logs
if args[0] % interval == 0:
prefix_idx = '{}-{}-{}'.format(mode, prefix, batch['idx'][0].item())
func(prefix_idx, batch, output)
# Log depth images
def log_depth(self, *args, **kwargs):
"""Helper function used to log images relevant for depth estimation"""
def log(prefix_idx, batch, output):
self._metrics.update(log_rgb('rgb', prefix_idx, batch))
self._metrics.update(log_inv_depth('inv_depth', prefix_idx, output))
if 'depth' in batch:
self._metrics.update(log_depth('depth', prefix_idx, batch))
self.log_images(log, *args, **kwargs)
def log_rgb(key, prefix, batch, i=0):
"""
Converts an RGB image from a batch for logging
Parameters
----------
key : str
Key from data containing the image
prefix : str
Prefix added to the key for logging
batch : dict
Dictionary containing the key
i : int
Batch index from which to get the image
Returns
-------
image : wandb.Image
Wandb image ready for logging
"""
rgb = batch[key] if is_dict(batch) else batch
return prep_image(prefix, key,
rgb[i])
def log_depth(key, prefix, batch, i=0):
"""
Converts a depth map from a batch for logging
Parameters
----------
key : str
Key from data containing the depth map
prefix : str
Prefix added to the key for logging
batch : dict
Dictionary containing the key
i : int
Batch index from which to get the depth map
Returns
-------
image : wandb.Image
Wandb image ready for logging
"""
depth = batch[key] if is_dict(batch) else batch
inv_depth = 1. / depth[i]
inv_depth[depth[i] == 0] = 0
return prep_image(prefix, key,
viz_inv_depth(inv_depth, filter_zeros=True))
def log_inv_depth(key, prefix, batch, i=0):
"""
Converts an inverse depth map from a batch for logging
Parameters
----------
key : str
Key from data containing the inverse depth map
prefix : str
Prefix added to the key for logging
batch : dict
Dictionary containing the key
i : int
Batch index from which to get the inverse depth map
Returns
-------
image : wandb.Image
Wandb image ready for logging
"""
inv_depth = batch[key] if is_dict(batch) else batch
return prep_image(prefix, key,
viz_inv_depth(inv_depth[i]))
def prep_image(prefix, key, image):
"""
Prepare image for wandb logging
Parameters
----------
prefix : str
Prefix added to the key for logging
key : str
Key from data containing the inverse depth map
image : torch.Tensor [3,H,W]
Image to be logged
Returns
-------
output : dict
Dictionary with key and value for logging
"""
if is_tensor(image):
image = image.detach().permute(1, 2, 0).cpu().numpy()
prefix_key = '{}-{}'.format(prefix, key)
return {prefix_key: wandb.Image(image, caption=key)}
| 30.379562
| 102
| 0.608602
| 5,309
| 0.637794
| 0
| 0
| 1,109
| 0.133229
| 0
| 0
| 3,710
| 0.445699
|
a2ab5037304159997115ed0a2b381a23c81c1548
| 5,781
|
py
|
Python
|
source_code/trans.py
|
shinyfe74/EN_KOR_translator
|
910e6924b2b7b27a6e111ae35554cbff7e39ac20
|
[
"MIT"
] | null | null | null |
source_code/trans.py
|
shinyfe74/EN_KOR_translator
|
910e6924b2b7b27a6e111ae35554cbff7e39ac20
|
[
"MIT"
] | null | null | null |
source_code/trans.py
|
shinyfe74/EN_KOR_translator
|
910e6924b2b7b27a6e111ae35554cbff7e39ac20
|
[
"MIT"
] | null | null | null |
from tkinter import *
from tkinter import ttk
import numpy as np
from PIL import ImageGrab
from PIL import Image
from pytesseract import *
import re
import cv2
from googletrans import Translator as google_translator
from pypapago import Translator as papago_translator
from kakaotrans import Translator as kakao_translator
pytesseract.tesseract_cmd = 'C:/Program Files/Tesseract-OCR/tesseract.exe'
form = Tk()
form.geometry("300x250")
form.title("영한 번역기")
# 색상 설정
Blue = (255, 0, 0)
Green = (0, 255, 0)
Red = (0, 0, 255)
White = (255, 255, 255)
Black = (0, 0, 0)
point1 = (0, 0)
point2 = (0, 0)
click1 = False
translator_combo_Label = Label(form, text="--------------------번역기 선택---------------------")
translator_combo_Label_Var = StringVar()
translator_combo = ttk.Combobox(form, width=10, textvariable=translator_combo_Label_Var)
translator_combo['values'] = ('구글', '파파고','카카오')
translator_combo.set("구글")
translator_combo.current(0)
def resultform():
global point1, point2, t_start, translator
img = ImageGrab.grab()
img2 = np.array(img)
if translator_combo_Label_Var.get() == '카카오':
translator = kakao_translator()
elif translator_combo_Label_Var.get() == '파파고':
translator = papago_translator()
else:
translator = google_translator()
resultbox = Tk()
resultbox.geometry("780x300")
resultbox.title("번역 결과")
# resultbox.wm_attributes("-transparentcolor", "white")
left = Frame(resultbox)
t_start = Text(left, height=20, width=68,
font=("arial", 15))
t_start.pack(side=LEFT, fill=Y)
s_start = Scrollbar(left)
t_start.insert(INSERT, "F2를 눌러서 번역영역을 선택해주세요.")
s_start.pack(side=RIGHT, fill=Y)
s_start.config(command=t_start.yview)
t_start.config(yscrollcommand=s_start.set)
left.pack(side=LEFT, fill=Y)
def exit(event):
resultbox.destroy()
resultbox.bind("<Escape>", exit)
resultbox.bind("<F2>", translate)
resultbox.mainloop()
def translate(event):
global point1, point2, t_start
img = ImageGrab.grab()
img2 = np.array(img)
t_start.delete(1.0, 'end')
tesserect_lang = 'eng'
def click(event, x, y, flags, params):
global click1, point1, point2
if event == cv2.EVENT_LBUTTONDOWN:
# if mousedown, store the x,y position of the mous
click1 = True
point1 = (x, y)
elif event == cv2.EVENT_MOUSEMOVE and click1:
# when dragging pressed, draw rectangle in image
img_copy = img2.copy()
cv2.rectangle(img_copy, point1, (x, y), (0, 0, 255), 2)
cv2.imshow("Image", img_copy)
elif event == cv2.EVENT_LBUTTONUP:
# on mouseUp, create subimag
click1 = False
if (x <= point1[0]) & (y <= point1[1]) :
point2 = point1
point1 = (x, y)
elif (x <= point1[0]) & (y > point1[1]):
point2 = (point1[0], y)
point1 = (x, point1[1])
elif (x > point1[0]) & (y <= point1[1]):
point2 = (x, point1[1])
point1 = (point1[0], y)
else:
point2 = (x,y)
sub_img = img2[point1[1]:point2[1], point1[0]:point2[0]]
cv2.imshow("subimg", sub_img)
setup_try = False
cv2.destroyAllWindows()
cv2.namedWindow("Image", cv2.WINDOW_NORMAL)
cv2.setMouseCallback("Image", click)
cv2.imshow("Image", img2)
cv2.waitKey(0)
cv2.destroyAllWindows()
# 번역 부분
source_image = img2[point1[1]:point2[1], point1[0]:point2[0]]
source = cv2.resize(
source_image, (int((point2[0]-point1[0])*1.5), int((point2[1]-point1[1])*1.5)))
translate_result = []
temp = pytesseract.image_to_string(source, lang=tesserect_lang)
text = ' '.join(temp.splitlines())
if text.startswith('"'):
text = text[1:]
source_text = open('source.txt', 'w', encoding='utf-8')
source_text.write(text)
source_text.close()
result = None
try:
with open('source.txt', encoding='utf-8') as f:
line = f.readline()
forTranslateString = line
if translator_combo_Label_Var.get() == '카카오':
result = translator.translate(
forTranslateString, src='en', tgt='kr')
elif translator_combo_Label_Var.get() == '파파고':
result = translator.translate(
forTranslateString, source='en', target='ko')
else:
print(translator.translate(
forTranslateString, src='en', dest='ko'))
result = translator.translate(
forTranslateString, src='en', dest='ko').text
except TypeError:
result = '번역불가'
pass
except KeyError:
result = '번역불가'
pass
t_start.insert(INSERT, result)
btn_trans = Button(form, text="번역 시작",
command=resultform, width=30)
btn_end = Button(form, text="번역기 프로그램 종료", command=form.destroy, width=30)
btn_trans.grid(row=0, columnspan=3, padx=30, pady=20)
btn_end.grid(row=1, columnspan=3, padx=30, pady=5)
translator_combo_Label.grid(row=4, columnspan=5)
translator_combo.grid(row=5, column=1, padx=5)
Manual_Label = Label(
form, text="F2 영역 선택 / ESC or 닫기 번역 중지")
Manual_Label.grid(row=6, columnspan=3, padx=30, pady=10)
Maker_Label = Label(
form, text="---------만든사람 : tobeptcoder------------")
Maker_Label.grid(row=7, columnspan=3, padx=30, pady=5)
Email_Label = Label(
form, text="------------tobeptcoder@gmail.com------------")
Email_Label.grid(row=8, columnspan=3, padx=30, pady=5)
form.mainloop()
| 28.477833
| 95
| 0.59488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 876
| 0.146758
|
a2ab67ac5edaa66589f9eee8088e666122ba3bce
| 7,869
|
py
|
Python
|
src/data/data_processing.py
|
ChrisPedder/Medieval_Manuscripts
|
40bfcf9c273385cfd8aa66e63b2fb80078fef33b
|
[
"MIT"
] | null | null | null |
src/data/data_processing.py
|
ChrisPedder/Medieval_Manuscripts
|
40bfcf9c273385cfd8aa66e63b2fb80078fef33b
|
[
"MIT"
] | 5
|
2020-12-28T15:28:35.000Z
|
2022-02-10T03:26:44.000Z
|
src/data/data_processing.py
|
ChrisPedder/Medieval_Manuscripts
|
40bfcf9c273385cfd8aa66e63b2fb80078fef33b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 3 17:20:06 2018
@author: chrispedder
A routine to crop sections from the images of different manuscripts in the two
datasets to the same size, and with the same magnification, so that the average
script size doesn't create a feature that the neural networks can learn.
Reading the data description of the CLaMM dataset, we find that the images
are 150mm*100mm, so we need to take similar-sized crops from our new target
data. Looking at the bar on the left, we find that 6000px =(341-47) = 294mm
So 1mm = 20.41px. We therefore need to crop 3062 * 2041px from the original
However, to not give away too much, we need to make this crop a little
random. Looking at the test images, 1) Their heights vary by around 100px
AFTER downsampling, so around 170px BEFORE downsampling. 2) Their widths
vary by proportionately less, around 65px AFTER, so 110px BEFORE.
We define a crop function below which achieves precisely this.
To run this routine, call something like `python -m src.data.data_processing
--thow_input_path data/raw/MS157/ --thow_output_path data/external/thow_out
--clamm_input_path data/raw/ICDAR2017_CLaMM_Training/
--clamm_output_path data/external/clamm_out`
The four command line args given here are all required.
"""
import numpy as np
import scipy.io
import random
import scipy.ndimage
import glob
import os
import argparse
from PIL import Image
from random import randint
from typing import List
# helper function to clean up file list for scraped THoW filenames
def clean_THoW_file_list(file_list: List):
# clean out folio views etc, whose filenames start with a letter
# rather than a number
cleaned_THoW_list = [element for element in file_list if not
element[-5].isalpha()]
return cleaned_THoW_list
class ImageProcessor(object):
def __init__(self, args):
# Crop images from point CORNER, to size given by DIM
self.CORNER = [1000,1400]
self.DIM = [3062,2041]
# To match the training data, we need to downsample images by a
# factor of 1.7
self.SCALE = 1.7
# Set size of training tiles here (we could pick 224*224 to match the
# expected input size of VGG16 here too)
self.IM_HEIGHT = 300
self.IM_WIDTH = 300
# Set random seed to get the same train-test split when run
self.SEED = 42
random.seed(self.SEED)
self.args = args
def read_raw_from_dir(self, filename):
"""
Define function to read bytes directly from tar by filename.
"""
x = Image.open(filename)
x = x.convert('L')
# makes it greyscale - CLaMM data is already grayscale
y = np.asarray(x.getdata(), dtype='uint8')
return y.reshape((x.size[1], x.size[0]))
def image_oc_crop(self, img):
"""
Makes a crop of an img, with coordinates of the top left corner
top_left_pt and of side lengths "dimensions" using numpy slicing.
"""
lh, lw = self.CORNER
dim_x, dim_y = self.DIM
cropped_img = img[lh:lh+dim_x,lw:lw+dim_y]
return cropped_img
def resample_image(self, img):
"""
Resample scraped images to make them a similar number of pixels to
CLaMM dataset images.
"""
# retain a single image channel, use cubic splines for resampling
resampled = scipy.ndimage.zoom(img, 1/self.SCALE, order=3)
output = resampled.astype('uint8')
return output
def prepare_raw_bytes_for_model(self, input_path):
"""
Put everything together into one function to read, crop & scale data
"""
input_image = self.read_raw_from_dir(input_path)
cropped_input = self.image_oc_crop(input_image)
img = self.resample_image(cropped_input)
return img
def tile_crop(self, array):
"""
function to crop tile_height by tile_width sections from the original
cropped files.
"""
array_height, array_width = array.shape
height_tiles_number = array_height//self.IM_HEIGHT
width_tiles_number = array_width//self.IM_WIDTH
tile_list = []
for i in range(height_tiles_number):
for j in range(width_tiles_number):
new_tile = array[i * self.IM_HEIGHT: (i + 1) * self.IM_HEIGHT,
j * self.IM_WIDTH: (j + 1)* self.IM_WIDTH]
tile_list.append(new_tile)
return tile_list
def write_input_data_to_jpg(self, input_path, output_path, THOW=False):
"""
Read files, process and write out processed files to an external folder,
defined by the argparse args
"""
counter = 0
file_suffix = '*.jpg' if THOW else '*.tif'
file_name = 'THOW' if THOW else 'CLaMM'
# get list of files in the raw data directory
input_files_list = sorted(glob.glob(input_path + file_suffix))
if THOW:
input_files_list = clean_THoW_file_list(input_files_list)
else:
input_files_list = input_files_list[:500]
#check output directory exists, if not create it
if not os.path.exists(output_path):
os.mkdir(output_path)
for element in input_files_list:
image = self.prepare_raw_bytes_for_model(element)
new_tile_list = self.tile_crop(image)
for i, tile in enumerate(new_tile_list):
# define file names for training example
tile_file_name = os.path.join(
output_path,
file_name + str(counter + i) + ".jpg")
# write three copies of the grayscale image to three separate
# layers as the VGG16 net expects an RGB input
tensorized = np.dstack([tile] * 3)
# create image from tensorized array
im = Image.fromarray(tensorized)
# save to path specified in arguments
im.save(tile_file_name)
print(
"Tile with name {} written to disk".format(tile_file_name))
counter += len(new_tile_list)
print("So far {} files written".format(counter))
print("File writing completed")
def process_all_files(self):
print(f'Reading data from {self.args.thow_input_path}, writing to\
{self.args.thow_output_path}')
self.write_input_data_to_jpg(self.args.thow_input_path,
self.args.thow_output_path,
THOW=True)
print(f'Reading data from {self.args.clamm_input_path}, writing to\
{self.args.clamm_output_path}')
self.write_input_data_to_jpg(self.args.clamm_input_path,
self.args.clamm_output_path)
print('All files processed and written to file')
def parse_args():
parser = argparse.ArgumentParser(description='Command line options for '
'processing the data files needed to train the model.')
parser.add_argument('--thow_input_path', type=str, required=True,
help='give the path to the THOW raw files')
parser.add_argument('--thow_output_path', type=str, required=True,
help='path to where we should write the processed THOW tile files')
parser.add_argument('--clamm_input_path', type=str, required=True,
help='give the path to the CLaMM raw files')
parser.add_argument('--clamm_output_path', type=str, required=True,
help='path to where we should write the processed CLaMM tile files')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = parse_args()
processor = ImageProcessor(args)
processor.process_all_files()
| 38.385366
| 80
| 0.648367
| 5,155
| 0.655102
| 0
| 0
| 0
| 0
| 0
| 0
| 3,644
| 0.463083
|
a2ac4d61989a683d4c9f7b828fb2128fcf9a33a2
| 7,934
|
py
|
Python
|
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
ivy/container/gradients.py
|
Aarsh2001/ivy
|
827164d7d31bd08c5287bbd1ac9ccce588b733bc
|
[
"Apache-2.0"
] | null | null | null |
from typing import Optional, Union, List, Dict
# local
import ivy
from ivy.container.base import ContainerBase
# noinspection PyMissingConstructor
class ContainerWithGradients(ContainerBase):
@staticmethod
def static_optimizer_update(
w,
effective_grad,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"optimizer_update",
w,
effective_grad,
lr,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def optimizer_update(
self: ivy.Container,
effective_grad,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return self.static_optimizer_update(
self,
effective_grad,
lr,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_gradient_descent_update(
w,
dcdw,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"gradient_descent_update",
w,
dcdw,
lr,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def gradient_descent_update(
self,
dcdw,
lr,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_gradient_descent_update(
self,
dcdw,
lr,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_lars_update(
w,
dcdw,
lr,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"lars_update",
w,
dcdw,
lr,
decay_lambda=decay_lambda,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def lars_update(
self,
dcdw,
lr,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_lars_update(
self,
dcdw,
lr,
decay_lambda,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_adam_update(
w,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"adam_update",
w,
dcdw,
lr,
mw_tm1=mw_tm1,
vw_tm1=vw_tm1,
step=step,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def adam_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_adam_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1,
beta2,
epsilon,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
@staticmethod
def static_lamb_update(
w,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
max_trust_ratio=10,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
) -> ivy.Container:
return ContainerBase.multi_map_in_static_method(
"lamb_update",
w,
dcdw,
lr,
mw_tm1=mw_tm1,
vw_tm1=vw_tm1,
step=step,
beta1=beta1,
beta2=beta2,
epsilon=epsilon,
max_trust_ratio=max_trust_ratio,
decay_lambda=0,
inplace=inplace,
stop_gradients=stop_gradients,
key_chains=key_chains,
to_apply=to_apply,
prune_unapplied=prune_unapplied,
map_sequences=map_sequences,
)
def lamb_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1=0.9,
beta2=0.999,
epsilon=1e-7,
max_trust_ratio=10,
decay_lambda=0,
inplace=None,
stop_gradients=True,
key_chains: Optional[Union[List[str], Dict[str, str]]] = None,
to_apply: bool = True,
prune_unapplied: bool = False,
map_sequences: bool = False,
):
return self.static_lamb_update(
self,
dcdw,
lr,
mw_tm1,
vw_tm1,
step,
beta1,
beta2,
epsilon,
max_trust_ratio,
decay_lambda,
inplace,
stop_gradients,
key_chains,
to_apply,
prune_unapplied,
map_sequences,
)
| 25.511254
| 70
| 0.515755
| 7,783
| 0.980968
| 0
| 0
| 4,262
| 0.537182
| 0
| 0
| 124
| 0.015629
|
a2adbf90bc22cca044acdd78bea2c9355ce557e4
| 2,848
|
py
|
Python
|
desktop/core/ext-py/Mako-1.0.7/test/test_cmd.py
|
kokosing/hue
|
2307f5379a35aae9be871e836432e6f45138b3d9
|
[
"Apache-2.0"
] | 5,079
|
2015-01-01T03:39:46.000Z
|
2022-03-31T07:38:22.000Z
|
desktop/core/ext-py/Mako-1.0.7/test/test_cmd.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 1,623
|
2015-01-01T08:06:24.000Z
|
2022-03-30T19:48:52.000Z
|
desktop/core/ext-py/Mako-1.0.7/test/test_cmd.py
|
zks888/hue
|
93a8c370713e70b216c428caa2f75185ef809deb
|
[
"Apache-2.0"
] | 2,033
|
2015-01-04T07:18:02.000Z
|
2022-03-28T19:55:47.000Z
|
from __future__ import with_statement
from contextlib import contextmanager
from test import TemplateTest, eq_, raises, template_base, mock
import os
from mako.cmd import cmdline
class CmdTest(TemplateTest):
@contextmanager
def _capture_output_fixture(self, stream="stdout"):
with mock.patch("sys.%s" % stream) as stdout:
yield stdout
def test_stdin_success(self):
with self._capture_output_fixture() as stdout:
with mock.patch("sys.stdin", mock.Mock(
read=mock.Mock(return_value="hello world ${x}"))):
cmdline(["--var", "x=5", "-"])
eq_(stdout.write.mock_calls[0][1][0], "hello world 5")
def test_stdin_syntax_err(self):
with mock.patch("sys.stdin", mock.Mock(
read=mock.Mock(return_value="${x"))):
with self._capture_output_fixture("stderr") as stderr:
with raises(SystemExit):
cmdline(["--var", "x=5", "-"])
assert "SyntaxException: Expected" in \
stderr.write.mock_calls[0][1][0]
assert "Traceback" in stderr.write.mock_calls[0][1][0]
def test_stdin_rt_err(self):
with mock.patch("sys.stdin", mock.Mock(
read=mock.Mock(return_value="${q}"))):
with self._capture_output_fixture("stderr") as stderr:
with raises(SystemExit):
cmdline(["--var", "x=5", "-"])
assert "NameError: Undefined" in stderr.write.mock_calls[0][1][0]
assert "Traceback" in stderr.write.mock_calls[0][1][0]
def test_file_success(self):
with self._capture_output_fixture() as stdout:
cmdline(["--var", "x=5",
os.path.join(template_base, "cmd_good.mako")])
eq_(stdout.write.mock_calls[0][1][0], "hello world 5")
def test_file_syntax_err(self):
with self._capture_output_fixture("stderr") as stderr:
with raises(SystemExit):
cmdline(["--var", "x=5",
os.path.join(template_base, "cmd_syntax.mako")])
assert "SyntaxException: Expected" in stderr.write.mock_calls[0][1][0]
assert "Traceback" in stderr.write.mock_calls[0][1][0]
def test_file_rt_err(self):
with self._capture_output_fixture("stderr") as stderr:
with raises(SystemExit):
cmdline(["--var", "x=5",
os.path.join(template_base, "cmd_runtime.mako")])
assert "NameError: Undefined" in stderr.write.mock_calls[0][1][0]
assert "Traceback" in stderr.write.mock_calls[0][1][0]
def test_file_notfound(self):
with raises(SystemExit, "error: can't find fake.lalala"):
cmdline(["--var", "x=5", "fake.lalala"])
| 39.013699
| 78
| 0.581812
| 2,666
| 0.936096
| 130
| 0.045646
| 150
| 0.052669
| 0
| 0
| 469
| 0.164677
|
a2b000534f69d5e5c990ba8c2baa88de9b69fc99
| 1,920
|
py
|
Python
|
corefacility/core/models/module.py
|
serik1987/corefacility
|
78d84e19403361e83ef562e738473849f9133bef
|
[
"RSA-MD"
] | null | null | null |
corefacility/core/models/module.py
|
serik1987/corefacility
|
78d84e19403361e83ef562e738473849f9133bef
|
[
"RSA-MD"
] | null | null | null |
corefacility/core/models/module.py
|
serik1987/corefacility
|
78d84e19403361e83ef562e738473849f9133bef
|
[
"RSA-MD"
] | null | null | null |
import uuid
from django.db import models
class Module(models.Model):
"""
Defines the information related to the application module that is stored to the
database, not as a Django application
"""
uuid = models.UUIDField(db_index=True, editable=False, primary_key=True, default=uuid.uuid4,
help_text="The UUID provides a quick access to the application during the routing")
parent_entry_point = models.ForeignKey("EntryPoint", null=True, on_delete=models.RESTRICT,
related_name="modules", editable=False,
help_text="List of all modules connected to this entry point")
alias = models.SlugField(editable=False,
help_text="A short name that can be used to identify the module in the app")
name = models.CharField(max_length=128, editable=False, db_index=True,
help_text="The name through which the module is visible in the system")
html_code = models.TextField(null=True, editable=False,
help_text="When the module is visible on the frontend as widget, this field relates"
"to the module HTML code to show")
app_class = models.CharField(max_length=1024, editable=False,
help_text="The python class connected to the module")
user_settings = models.JSONField(help_text="Settings defined by the user and stored in the JSON format")
is_application = models.BooleanField(default=True, editable=False,
help_text="True if the module is application")
is_enabled = models.BooleanField(default=True,
help_text="True if the module has switched on")
class Meta:
unique_together = ["alias", "parent_entry_point"]
| 60
| 117
| 0.621354
| 1,876
| 0.977083
| 0
| 0
| 0
| 0
| 0
| 0
| 713
| 0.371354
|
a2b0828f0ce39bb552f2d2231688d2adacf5b85e
| 1,986
|
py
|
Python
|
sphinx-sources/Examples/Commands/LensFresnel_Convert.py
|
jccmak/lightpipes
|
1a296fe08bdd97fc9a0e11f92bab25c85f68e57d
|
[
"BSD-3-Clause"
] | 132
|
2017-03-15T15:28:46.000Z
|
2022-03-09T00:28:25.000Z
|
sphinx-sources/Examples/Commands/LensFresnel_Convert.py
|
jccmak/lightpipes
|
1a296fe08bdd97fc9a0e11f92bab25c85f68e57d
|
[
"BSD-3-Clause"
] | 63
|
2017-01-26T15:46:55.000Z
|
2022-01-25T04:50:59.000Z
|
sphinx-sources/Examples/Commands/LensFresnel_Convert.py
|
jccmak/lightpipes
|
1a296fe08bdd97fc9a0e11f92bab25c85f68e57d
|
[
"BSD-3-Clause"
] | 37
|
2017-02-17T16:11:38.000Z
|
2022-01-25T18:03:47.000Z
|
from LightPipes import *
import matplotlib.pyplot as plt
def TheExample(N):
fig=plt.figure(figsize=(11,9.5))
ax1 = fig.add_subplot(221)
ax2 = fig.add_subplot(222)
ax3 = fig.add_subplot(223)
ax4 = fig.add_subplot(224)
labda=1000*nm;
size=10*mm;
f1=10*m
f2=1.11111111*m
z=1.0*m
w=5*mm;
F=Begin(size,labda,N);
F=RectAperture(w,w,0,0,0,F);
#1) Using Lens and Fresnel:
F1=Lens(z,0,0,F)
F1=Fresnel(z,F1)
phi1=Phase(F1);phi1=PhaseUnwrap(phi1)
I1=Intensity(0,F1);
x1=[]
for i in range(N):
x1.append((-size/2+i*size/N)/mm)
#2) Using Lens + LensFresnel and Convert:
F2=Lens(f1,0,0,F);
F2=LensFresnel(f2,z,F2);
F2=Convert(F2);
phi2=Phase(F2);phi2=PhaseUnwrap(phi2)
I2=Intensity(0,F2);
x2=[]
newsize=size/10
for i in range(N):
x2.append((-newsize/2+i*newsize/N)/mm)
ax1.plot(x1,phi1[int(N/2)],'k--',label='Lens + Fresnel')
ax1.plot(x2,phi2[int(N/2)],'k',label='LensFresnel + Convert');
ax1.set_xlim(-newsize/2/mm,newsize/2/mm)
ax1.set_ylim(-2,4)
ax1.set_xlabel('x [mm]');
ax1.set_ylabel('phase [rad]');
ax1.set_title('phase, N = %d' %N)
legend = ax1.legend(loc='upper center', shadow=True)
ax2.plot(x1,I1[int(N/2)],'k--',label='Lens+Fresnel')
ax2.plot(x2,I2[int(N/2)], 'k',label='LensFresnel + Convert');
ax2.set_xlim(-newsize/2/mm,newsize/2/mm)
ax2.set_ylim(0,1000)
ax2.set_xlabel('x [mm]');
ax2.set_ylabel('Intensity [a.u.]');
ax2.set_title('intensity, N = %d' %N)
legend = ax2.legend(loc='upper center', shadow=True)
ax3.imshow(I1);ax3.axis('off');ax3.set_title('Intensity, Lens + Fresnel, N = %d' %N)
ax3.set_xlim(int(N/2)-N/20,int(N/2)+N/20)
ax3.set_ylim(int(N/2)-N/20,int(N/2)+N/20)
ax4.imshow(I2);ax4.axis('off');ax4.set_title('Intensity, LensFresnel + Convert, N = %d' %N)
plt.show()
TheExample(100) #100 x 100 grid
TheExample(1000) #1000 x 1000 grid
| 29.641791
| 95
| 0.606244
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 388
| 0.195368
|
a2b0a81878fe2e8c89e23970f4f8db084dca00c6
| 598
|
py
|
Python
|
delira/models/backends/chainer/__init__.py
|
gedoensmax/delira
|
545e2ccbe56ed382d300cf3d00317e9a0e3ab5f6
|
[
"BSD-2-Clause"
] | 1
|
2019-10-03T21:00:20.000Z
|
2019-10-03T21:00:20.000Z
|
delira/models/backends/chainer/__init__.py
|
gedoensmax/delira
|
545e2ccbe56ed382d300cf3d00317e9a0e3ab5f6
|
[
"BSD-2-Clause"
] | null | null | null |
delira/models/backends/chainer/__init__.py
|
gedoensmax/delira
|
545e2ccbe56ed382d300cf3d00317e9a0e3ab5f6
|
[
"BSD-2-Clause"
] | null | null | null |
from delira import get_backends as _get_backends
if "CHAINER" in _get_backends():
from delira.models.backends.chainer.abstract_network import \
AbstractChainerNetwork
from delira.models.backends.chainer.data_parallel import \
DataParallelChainerNetwork
from delira.models.backends.chainer.data_parallel import \
DataParallelChainerOptimizer
from delira.models.backends.chainer.data_parallel import \
ParallelOptimizerUpdateModelParameters
from delira.models.backends.chainer.data_parallel import \
ParallelOptimizerCumulateGradientsHook
| 42.714286
| 65
| 0.789298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 9
| 0.01505
|
a2b0f8e387546b569d1ef99efb66f95e406a0935
| 1,071
|
py
|
Python
|
src/deepproblog/examples/Forth/Add/data/extract.py
|
vossenwout/gtadeepproblog
|
65509b740518af422b96e84ef10716e0ac246e75
|
[
"Apache-2.0"
] | 54
|
2021-06-23T08:03:23.000Z
|
2022-03-10T01:02:43.000Z
|
src/deepproblog/examples/Forth/Add/data/extract.py
|
Damzwan/deepproblog
|
56bcf5208e79c17510b5d288068fabc6cd64f3cf
|
[
"Apache-2.0"
] | 2
|
2021-06-30T23:48:25.000Z
|
2022-03-18T10:45:05.000Z
|
src/deepproblog/examples/Forth/Add/data/extract.py
|
Damzwan/deepproblog
|
56bcf5208e79c17510b5d288068fabc6cd64f3cf
|
[
"Apache-2.0"
] | 12
|
2021-06-30T10:47:52.000Z
|
2022-03-09T23:51:48.000Z
|
import re
for train in [2, 4, 8]:
for test in [8, 64]:
for mode in ["train", "test", "dev"]:
with open("train{}_test{}/{}.txt".format(train, test, mode)) as f:
text = f.read()
matches = re.findall("\[([0-9 ]*)\]\t\[([0-9 ]*) (\d) (\d*)\]", text)
text = list()
for match in matches:
res = match[0].strip().split(" ")
digits = match[1].strip().split(" ")
carry = [match[2]]
length = int(match[3])
digit1 = list()
digit2 = list()
for i in range(0, len(digits), 2):
digit1.append(digits[i])
digit2.append(digits[i + 1])
text.append(
"add([{}],[{}],{},[{}]).".format(
*[",".join(l) for l in [digit1, digit2, carry, res]]
)
)
with open("train{}_test{}_{}.txt".format(train, test, mode), "w") as f:
f.write("\n".join(text))
| 38.25
| 83
| 0.388422
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 146
| 0.136321
|
a2b26dec93877fc20d8f5328e080c0557abecb6c
| 16,519
|
py
|
Python
|
app/location/crawler.py
|
maro99/yapen
|
0de7aa9d4b152aadd18511be6e536e89645452d9
|
[
"MIT"
] | 1
|
2019-04-28T12:21:51.000Z
|
2019-04-28T12:21:51.000Z
|
app/location/crawler.py
|
maro99/yapen
|
0de7aa9d4b152aadd18511be6e536e89645452d9
|
[
"MIT"
] | 5
|
2018-07-30T05:44:44.000Z
|
2020-06-05T18:56:41.000Z
|
app/location/crawler.py
|
maro99/yapen
|
0de7aa9d4b152aadd18511be6e536e89645452d9
|
[
"MIT"
] | 5
|
2018-07-23T05:21:41.000Z
|
2018-08-08T05:00:42.000Z
|
import re
import requests
from bs4 import BeautifulSoup
import time
from urllib import parse
from selenium import webdriver
from location.models import Pension, RoomImage, PensionImage, Room, Location, SubLocation
# 가격에 string, 100,00 표현 맞지 않는 경우 0넣고 아니면 int로 바꿔서 출력
def get_int_only(string):
a= re.findall( '(\d*,\d*)원.*',string)
if len(a)==0:
result= 0
else:
result = int(re.sub(',','',a[0]))
return result
def room_crawler(soup,room_num,url,pension,count_sec_after_popup,count_sec_after_click,room_picture_url_num):
############################################################
# Room 모델 정보, RoomImage 모델 체우기 위한 이미지 셀레늄으로 뽑아보겠슴.
# 버튼 이름 으로쓸 방 이름부터 뽑아내겠슴.
room_name_list = []
image_table = soup.select('div.roomImageLists')
image_table_lis = image_table[0].select('li')
for index, image_table_li in enumerate(image_table_lis):
if 0 < index < room_num + 1:
room_name_list.append(image_table_li.get_text())
# 접속.
chromedriver_dir = '/home/nasanmaro/Desktop/projects/yapen/test/selenium_crawling_test/chromedriver'
driver = webdriver.Chrome(chromedriver_dir)
driver.get(url)
time.sleep(count_sec_after_popup)
# 방갯수만큼의 버튼을 클릭!
image_table = driver.find_element_by_class_name('roomImageLists')
for room_name_text in room_name_list:
name = room_name_text # 객실 이름.
room_name_button = image_table.find_elements_by_xpath('//li[contains(text(), "{0}") and @class="roomLists"]'.format(room_name_text))
room_name_button[0].click()
time.sleep(count_sec_after_click) # 버튼 클릭후 충분히 멈춰줘야 사진이 로딩된다.
# 드라이버 인스턴스로부터 현제 메뉴 연상태의 페이지 소스 받아서 source에 넣는다.
source = driver.page_source
# soup 객체로 만듬.
soup = BeautifulSoup(source, 'lxml')
pensionImagesLists = soup.find(id="pensionImagesLists")
jssorts = pensionImagesLists.select_one('div.jssort07')
jssort = jssorts.select('div')[0]
image_tags = jssort.select('div.p > img')
# 빈값들을 일단 넣어줘서 아래서 Room 모델 생성시 애러뜨지 않게하려함.
structure = ""
size = ""
normal_num_poeple = 0
max_num_people = 0
equipments = ""
info = ""
price = 0
extra_charge_head = 0
extra_charge_adult = 0
extra_charge_child = 0
# room 의 속성들 뽑는중.
layer_table_trs = soup.select('div.layerBox > table > tbody > tr')
# 종종 여기서 애러떠서 아예 빈값이면 안들어가게했음.
if layer_table_trs:
td_list1 = layer_table_trs[0].select('td') # 1행
structure = td_list1[1].get_text() # '객실구조'
size = td_list1[3].get_text(strip=True) # '크기'
num_result = re.findall('(\d*)명 / (\d*)명', td_list1[5].get_text())
normal_num_poeple = int(num_result[0][0]) # '기준인원'
max_num_people = int(num_result[0][1]) # '최대인원'
td_list2 = layer_table_trs[1].select('td') # 2행
equipments = td_list2[1].get_text() # 구비시설
td_list3 = layer_table_trs[2].select('td') # 3행
info = td_list3[1].get_text() # 객실 설명
td_list4 = layer_table_trs[3].select(' > td') # 4행
######기본금액뽑기.
# price table 의 content중 좌상단 한개만 선택해서 기본금액으로 이것만 저장하려함.
price_table_td = td_list4[1].select('table.datePriceTbl > tbody > tr > td')
# 추가할인으로 밑줄 그어진 부분 같은 경우는 이것이 p테그안에 들어가서 따로 구분해줌.
price_p_tag = price_table_td[1].select_one('p')
if price_p_tag == None:
price_p_tag = price_table_td[1]
# 100,000원 에서 숫자들만뽑아냄.
price = get_int_only(price_p_tag.get_text(strip=True)) # 가격
###### 추가금액 뽑기
extra_charge_table_trs = td_list4[3].select('table.datePriceTbl > thead > tr')
extra_charge_head = extra_charge_table_trs[0].get_text(strip=True) # 추가금액 헤드
extra_charge_adult_str = extra_charge_table_trs[1].select('td')[1].get_text(strip=True)
extra_charge_child_str = extra_charge_table_trs[2].select('td')[1].get_text(strip=True)
extra_charge_baby_str = extra_charge_table_trs[3].select('td')[1].get_text(strip=True)
# 100,000원 에서 숫자들만뽑아냄.
extra_charge_adult = get_int_only(extra_charge_adult_str) # 어른
extra_charge_child = get_int_only(extra_charge_child_str) # 아동
extra_charge_baby = get_int_only(extra_charge_baby_str) # 유아
room,room_created_bool = Room.objects.get_or_create(
pension=pension,
name=name,
structure=structure,
size=size,
normal_num_poeple=normal_num_poeple,
max_num_people=max_num_people,
equipments=equipments,
info=info,
price=price,
extra_charge_head=extra_charge_head,
extra_charge_adult=extra_charge_adult,
extra_charge_child=extra_charge_child,
extra_charge_baby=extra_charge_baby,
)
for index, image_tag in enumerate(image_tags):
image_src = image_tag.get("src") # image_src--------------------->RoomImage객체만들때써라
# print('@@룸 이미지')
# print(image_src)
RoomImage.objects.get_or_create(
room=room,
room_image=image_src,
)
if index == (room_picture_url_num-1): # room_picture_url_num 장 뽑는 시점에서 break
break
# 이 for 문안에서 RoomImage객체 room마다 총세번 만들면될듯
# print("@@RoomObject 속성들")
# print(name)
# print(structure)
# print(size)
# print(normal_num_poeple)
# print(max_num_people)
# print(equipments)
# print(info)
# print(price)
#
# print(extra_charge_head)
# print(extra_charge_adult)
# print(extra_charge_child)
# print(extra_charge_baby)
driver.close()
def pension_detail_crawler(sub_location,lowest_price,pension_image_thumbnail,ypidx,discount_rate):
max_room_num = 4
pension_picture_url_num = 2 # 저장할 pension 이미지 url 1이상으로 설정해야함.
room_picture_url_num = 2 # 저장할 room 이미지 url 1이상으로 설정해야함.
count_sec_after_popup = 3 # seleinuim으로 창 연후에 후 몇초 sleep할지
count_sec_after_click = 2 # seleinuim으로 각 방버튼 클릭 후 몇초 sleep할지
params = {
'ypIdx': ypidx
}
url = "http://www.yapen.co.kr/details?" + parse.urlencode(params)
request = requests.get(url)
response = request.text
soup = BeautifulSoup(response, 'lxml')
##############pension_detail 페이지 상단 기본정보들 .
# name
name_root = soup.select('div.wrap_1000')
name = name_root[0].select('h3')[0].get_text() # name
# address
table = soup.select('table.pensionTbl')
trs = table[0].select('tr')
tds = trs[0].select('td')
address = tds[0].get_text() # address
result = re.findall('지번 : (.*) ',address)
lat=0
lng=0
tic = 0
while(lat==0): # 한번 요청 보내도 값 안줄때가 있어서 적절한 값 들어갈때까지 요청 보낸다.
URL = 'http://maps.googleapis.com/maps/api/geocode/json?sensor=false&language=ko&address={}' \
.format(result)
response = requests.get(URL)
if response:
data = response.json()
if data.get('results'): # 만약 reaults에 뭔가 있다면 if문들어가서 lat, lng에 값 할당
lat = data['results'][0]['geometry']['location']['lat'] # 위도 lat
lng = data['results'][0]['geometry']['location']['lng'] # 경도 lng
tic = tic +1
if tic ==10:
lat = 1
# check_in, check_out
tds2 = trs[1].select('td')
check_in_out = tds2[0].select('span')
check_in = check_in_out[0].get_text() # check_in
check_out = check_in_out[1].get_text() # check_out
# pickup
tds3 = trs[2].select('td')
pickup = tds3[0].get_text(strip=True) # pickup
# room_num
td4 = trs[3].select('td')
number_tags = td4[0].select('span')
room_string = number_tags[0].get_text()
room_num = int(re.search('(\d*)', room_string).group()) # room_num
# room_num 최대 max_room_num으로 제한걸어줌.
if room_num > max_room_num:
room_num = max_room_num
# info
td5 = trs[4].select('td')
infos = td5[0].select('p')
info = ''
for one_info in infos:
info = info + '\n' + one_info.get_text() + '\n' # info
# theme
td6 = trs[5].select('td')
lis = td6[0].select('li')
theme_list = []
for li in lis:
theme_list.append(li.get_text())
# '테마1,테마2,테마3' 이런 형태로 저장하고 싶다.
theme = (',').join(theme_list) # theme
###############pension_detail 페이지 하단 추가정보들 .
detailDiv = soup.select('div.detailDiv')[0]
detailsPensionInfoTitle = detailDiv.select('div.detailsPensionInfoTitle')
pension_detail_bellow_dict = dict()
for one_title in detailsPensionInfoTitle:
next_detail_div = one_title.findNext('div')
next_detail = ''
for p in next_detail_div.select('p'):
next_detail = next_detail + p.get_text() + '\n'
# p 테그없고 바로 text쓴경우에는 검출안된다.....
pension_detail_bellow_dict[one_title.get_text(strip=True)] = next_detail
check_in_out_detail = ""
pickup_detail = ""
gretting = ""
precautions = ""
for key, value in pension_detail_bellow_dict.items():
if key == '입실 / 퇴실시간':
check_in_out_detail = value # check_in_out_detail
elif key == '픽업가능':
pickup_detail = value # pickup_detail
elif key == '펜션소개 및 인사말':
gretting = value # gretting
elif key == '이용 주의사항':
precautions = value # precautions
# print("@@@@PensionObject 속성들")
# print(name)
# print(address)
# print(check_in)
# print(check_out)
# print(room_num)
# print(info)
# print(theme)
#
# print(check_in_out_detail)
# print(pickup_detail)
# print(gretting)
# print(precautions)
pension,pension_created_bool = Pension.objects.get_or_create(
# location_total_crawler안에서 pension_detail_crawler사용시 전달받아야되는 인자들.-->location_crawler() 로 얻어짐.
pension_image_thumbnail=pension_image_thumbnail,
lowest_price=lowest_price,
ypidx=ypidx,
sub_location=sub_location,
discount_rate=discount_rate,
#pension_detail_crawler 안에서 크롤링한 속성들.
name=name,
address=address,
check_in=check_in,
check_out=check_out,
pickup=pickup,
room_num=room_num,
info=info,
theme=theme,
lat=lat,
lng=lng,
check_in_out_detail=check_in_out_detail,
pickup_detail=pickup_detail,
gretting=gretting,
precautions=precautions,
)
##############PensionImage 에 넣을 Pension 사진들 3장뽑겠슴.
pensionImagesLists = soup.find(id="pensionMainImageSlider")
image_tags = pensionImagesLists.select(" > div img")
for index, image_tag in enumerate(image_tags):
# html보면 2개씩 같은 이미지라서 홀수번째 만 받기로함.
if index % 2 == 0:
image_src = image_tag.get("src") # image_src---------------->PensionImage객체만들때써라
# print('@@@@팬션 이미지')
# print(image_src)
PensionImage.objects.get_or_create(
pension=pension,
pension_image=image_src,
)
if index == (pension_picture_url_num-1)*2: # pension_picture_url_num 장 뽑는 시점에서 break
break
# 각 팬션에 속한 room정보 크롤링하며 Room객체 생성하는 크롤러
room_crawler(soup=soup,
room_num=room_num,
url=url,
pension=pension,
count_sec_after_click=count_sec_after_click,
count_sec_after_popup=count_sec_after_popup,
room_picture_url_num=room_picture_url_num)
# 세부지역 페이지에서 각 팬션에 대한 기본정보 5개만 여러개 팬션에게서 가져왔던것.
def sub_location_crawler(location_no, sub_location_no):
params = {
'location': location_no,
'subLocation': sub_location_no,
}
url = "http://www.yapen.co.kr/region?" + parse.urlencode(params)
request = requests.get(url)
response = request.text
soup = BeautifulSoup(response, 'lxml')
title_list = list()
img_file_list = list()
price_list = list()
ypidx_list = list()
discount_rate_list = list()
title_uls = soup.select('ul.dest-place-opt-fea')
for ul in title_uls:
li = ul.select('li')
title_list.append(li[1].get_text())
price_uls = soup.select('ul.dest-place-opt-cast')
for ul in price_uls:
li = ul.select('li')
price_list.append(get_int_only(li[1].get_text())) # '370,000원~' 에서 숫자만 남기는 함수 호출함.
img_file_divs = soup.select('div.imgBox')
for div in img_file_divs:
img_file_list.append(div.select('img')[0]['src'])
list1 = re.split('/', div.select('img')[0]['src'])
ypidx_list.append(int(list1[5]))
dest_place_pics = soup.select('div.dest-place-pic')
for dest_place_pic in dest_place_pics:
# dest_place_pic에는 dic가 2개 or 1게 있는데 discount_rate가 있는 경우는 div가 2개이며
# 길이가 5여서 이것으로 discount_rate있고 없고 를 비교한다.
if (len(dest_place_pic) == 5):
discount_rate_string = dest_place_pic.select('div')[0].get_text(strip=True)
# %문자 정규표현식으로 빼줌.
discount_rate_int = int(re.search('(\d*)', discount_rate_string).group())
discount_rate_list.append(discount_rate_int)
else:
discount_rate_list.append(0)
sub_locations_info_list = [title_list, price_list, img_file_list, ypidx_list, discount_rate_list]
return sub_locations_info_list
# location_name_list 뽑는 과정
def location_crawler():
request = requests.get("http://www.yapen.co.kr")
response = request.text
soup = BeautifulSoup(response, 'lxml')
left_menu = soup.select('div.locLayer')
# 풀빌라, MD추천 제외 14지역중 7지역 만남김.
selected_left_menu = left_menu[2:4]
for selected_location in selected_left_menu:
# 지역 이름 먼저 뽑음
location_name = selected_location.select('div.titleStyle')[0].get_text(strip=True)
# Location 객체 생성
location,__=Location.objects.get_or_create(name=location_name) # Location(지역)객체 생성
li = selected_location.select('li')
for location_detail in li:
onclick_value = location_detail['onclick'] # regionMove('1.003021','금산/논산');
split_right = onclick_value.split(',')[0]
split_left = onclick_value.split(',')[1]
sub_location_no = re.findall("'(.+)'", split_right)[0]
sub_location_name = re.findall("'(.+)'", split_left)[0]
if len(re.findall('.*(전체).*', sub_location_name))==1: # sub_location_name 에 전체가 들어있으면 for문 이하 건너뜀
continue
# SubLocation 객체 생성
sub_location,__= SubLocation.objects.get_or_create(location=location, # SubLocation(세부지역) 객체 생성
name=sub_location_name,
sub_location_no=sub_location_no)
# 지역 고유번호부터 정규표현식으로 뽑아내서 담음.(세부지역 고유번호의 소숫점 뒤 3자리) # '1.003021'
location_no = re.findall(".(\d\d\d)", sub_location_no)[0] # 지역번호
# sub_location(세부지역) 페이지로부터 기본정보 5개 크롤링먼저 해옴.(팬션 디테일 페이지 접속위한 ypidx얻기위해 필요한 과정)
sub_locations_info_list = sub_location_crawler(location_no, sub_location_no)
for i in range(len(sub_locations_info_list[0])):
# 각각의 팬션을 크롤링 하며 Pension객체 생성, 방들 크롤링한후 Room 객체 생성하는 크롤러 호출
pension_detail_crawler(
sub_location=sub_location,
lowest_price=sub_locations_info_list[1][i], # lowest_price,
pension_image_thumbnail=sub_locations_info_list[2][i], # pension_image_thumbnail
ypidx=sub_locations_info_list[3][i], # ypidx,
discount_rate=sub_locations_info_list[4][i] # discount_rate,
)
for location in Location.objects.all():
sub_location = SubLocation.objects.filter(location=location)
location.pensions_length = len(Pension.objects.filter(sub_location__in=sub_location))
location.save()
for sublocation in SubLocation.objects.all():
sublocation.pensions_length = len(Pension.objects.filter(sub_location=sublocation))
sublocation.save()
| 36.225877
| 140
| 0.603971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 6,091
| 0.330943
|
a2b2ffb533dae5272cd3fbc1cefbb22e54b5762b
| 1,181
|
py
|
Python
|
14-semparsing/ucca/scripts/find_constructions.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
14-semparsing/ucca/scripts/find_constructions.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
14-semparsing/ucca/scripts/find_constructions.py
|
ariasjose/nn4nlp-code
|
7327ea3e93161afbc8c008e287b646daa802be4d
|
[
"Apache-2.0"
] | null | null | null |
from argparse import ArgumentParser
from ucca import constructions
from ucca.ioutil import read_files_and_dirs
if __name__ == "__main__":
argparser = ArgumentParser(description="Extract linguistic constructions from UCCA corpus.")
argparser.add_argument("passages", nargs="+", help="the corpus, given as xml/pickle file names")
constructions.add_argument(argparser, False)
argparser.add_argument("-v", "--verbose", action="store_true", help="print tagged text for each passage")
args = argparser.parse_args()
for passage in read_files_and_dirs(args.passages):
if args.verbose:
print("%s:" % passage.ID)
extracted = constructions.extract_edges(passage, constructions=args.constructions, verbose=args.verbose)
if any(extracted.values()):
if not args.verbose:
print("%s:" % passage.ID)
for construction, edges in extracted.items():
if edges:
print(" %s:" % construction.description)
for edge in edges:
print(" %s [%s %s]" % (edge, edge.tag, edge.child))
print()
| 47.24
| 113
| 0.624047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 215
| 0.182049
|
a2b4cc002608cb98fc1f6000c06a7afefddd34dc
| 3,870
|
py
|
Python
|
multicache/__init__.py
|
bargulg/SimpleCache
|
52f6fd18381e9ccb21194b83288d631d6e2cf28e
|
[
"MIT"
] | 1
|
2017-02-21T14:46:45.000Z
|
2017-02-21T14:46:45.000Z
|
multicache/__init__.py
|
bargulg/multicache
|
52f6fd18381e9ccb21194b83288d631d6e2cf28e
|
[
"MIT"
] | null | null | null |
multicache/__init__.py
|
bargulg/multicache
|
52f6fd18381e9ccb21194b83288d631d6e2cf28e
|
[
"MIT"
] | null | null | null |
import hashlib
import os
import pickle
import tempfile
import zlib
from threading import Lock
from time import time
from multicache.base import BaseCache
try:
from multicache.redis import RedisCache
except ImportError:
pass
lock = Lock()
class DummyCache(BaseCache):
""" Fake cache class to allow a "no cache"
use without breaking anything """
def __init__(self):
self._dict = {}
def get(self, key):
return None
def put(self, key, value, ex=None, ttl=None):
pass
def invalidate(self, key):
pass
class DictCache(BaseCache):
""" Saves data in a dictionary without any persistent storage """
def __init__(self, **kwargs):
self._dict = {}
self.ttl = kwargs.pop('ttl', 3600)
def get(self, key):
ret = self._dict.get(key, None)
if ret is not None and ret[1] > time():
# cache hit
return ret[0]
elif ret is None:
# cache miss
return None
else:
# stale, delete from cache
self.invalidate(key)
return None
def put(self, key, value, ex=None, ttl=None):
with lock:
if ex is None:
if ttl is not None:
ex = ttl + time()
else:
ex = self.ttl + time()
self._dict[key] = value, ex
def invalidate(self, key):
self._dict.pop(key, None)
def get_all_keys(self):
return self._dict.keys()
def get_all_values(self):
return [val[0] for val in self._dict.values() if val[1] >= time()]
def recheck(self):
invalid = []
for key, val in self._dict.items():
if time() > val[1]:
invalid.append(key)
for key in invalid:
self.invalidate(key)
class FileCache(BaseCache):
""" Saves data to a dictionary and files, always saves to both,
only reads files when data isn't in dictionary"""
def __init__(self, path=None, **kwargs):
self._cache = {}
self.ttl = kwargs.pop('ttl', 3600)
if path:
self.path = path
else:
self.path = '{}/multicache'.format(tempfile.gettempdir())
if not os.path.isdir(self.path):
os.mkdir(self.path, 0o700)
def _getpath(self, key):
h = hashlib.new('md5')
h.update(key.encode('utf-8'))
return os.path.join(self.path, h.hexdigest() + '.cache')
def put(self, key, value, ex=None, ttl=None):
with lock:
with open(self._getpath(key), 'wb') as f:
if ex is None:
if ttl is not None:
ex = ttl + time()
else:
ex = self.ttl + time()
f.write(zlib.compress(pickle.dumps((value, ex), -1)))
self._cache[key] = (value, ex)
def get(self, key):
if key in self._cache:
cached = self._cache[key]
if cached[1] > time():
return cached[0]
try:
with open(self._getpath(key), 'rb') as f:
ret = pickle.loads(zlib.decompress(f.read()))
if ret[1] > time():
# cache hit
return ret[0]
# stale cache, invalidate
self.invalidate(key)
return None
except IOError as ex:
if ex.errno == 2: # file does not exist (yet)
return None
else:
raise
def invalidate(self, key):
with lock:
self._cache.pop(key, None)
try:
os.unlink(self._getpath(key))
except OSError as ex:
if ex.errno == 2: # does not exist
pass
else:
raise
| 27.062937
| 74
| 0.509044
| 3,613
| 0.933592
| 0
| 0
| 0
| 0
| 0
| 0
| 447
| 0.115504
|
a2b509c564ad0b2601ed7a285ba7c94de901b242
| 754
|
py
|
Python
|
01_numbers.py
|
fernandobd42/Introduction_Python
|
7a656df1341bda4e657baa146c28b98bef211fc6
|
[
"OLDAP-2.5",
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.3"
] | 1
|
2016-10-02T00:51:43.000Z
|
2016-10-02T00:51:43.000Z
|
01_numbers.py
|
fernandobd42/Introduction_Python
|
7a656df1341bda4e657baa146c28b98bef211fc6
|
[
"OLDAP-2.5",
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.3"
] | null | null | null |
01_numbers.py
|
fernandobd42/Introduction_Python
|
7a656df1341bda4e657baa146c28b98bef211fc6
|
[
"OLDAP-2.5",
"Python-2.0",
"OLDAP-2.4",
"OLDAP-2.3"
] | null | null | null |
x = 1 #x recebe 1
#print irá printar(mostrar) o valor desejado;
print(x) # resultado: 1
print(x + 4) # é possível somar uma variável com um número, desde que a variável tenha um valor definido - resultado: 5
print(2 * 2) #um asterisco é usado para multiplicar - resultado: 4
print(3 ** 3) #dois asterisco é usado para elevar a potência - resultado: 27
print(5 / 2) #divisão com uma barra é usado para retornar tipo flutuante - resultado = 2.5
print(5 // 2) #divisão com duas barras é usado para retorna tipo inteiro - resultado = 2;
print(5 % 2) #modulo é usado para retornar o resto da divisão - resultado = 1;
#OBS: o uso de '=' atribui um valor a uma variável, já o uso de '==' compara os valores;
y = 1 #1 é atribuído para y;
y == 1 #y é igual a 1?
| 53.857143
| 119
| 0.706897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 653
| 0.843669
|
a2b518c79f5318969c38eb1d484323f66909f1f2
| 3,648
|
py
|
Python
|
schematizer/models/redshift_sql_entities.py
|
Yelp/schematizer
|
035845d27945a05db475f00eb76f59e8825dbaa4
|
[
"Apache-2.0"
] | 86
|
2016-11-17T17:39:13.000Z
|
2021-06-01T15:19:05.000Z
|
schematizer/models/redshift_sql_entities.py
|
tomzhang/schematizer
|
035845d27945a05db475f00eb76f59e8825dbaa4
|
[
"Apache-2.0"
] | 2
|
2016-12-01T20:57:43.000Z
|
2021-09-28T09:26:25.000Z
|
schematizer/models/redshift_sql_entities.py
|
tomzhang/schematizer
|
035845d27945a05db475f00eb76f59e8825dbaa4
|
[
"Apache-2.0"
] | 26
|
2016-11-29T22:38:11.000Z
|
2021-03-02T19:44:17.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains the internal data structure to hold the information
of redshift SQL schemas.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from schematizer.models.sql_entities import SQLColumn
from schematizer.models.sql_entities import SQLTable
class RedshiftSQLTable(SQLTable):
"""Internal data structure that represents a redshift sql table.
"""
def __init__(
self,
table_name,
columns=None,
doc=None,
diststyle=None,
**metadata
):
super(RedshiftSQLTable, self).__init__(
table_name,
columns,
doc,
**metadata
)
self.diststyle = diststyle
def __eq__(self, other):
return (type(other) is type(RedshiftSQLTable) and
self.name == other.name and
self.columns == other.columns and
self.diststyle == other.diststyle and
self.metadata == other.metadata)
@property
def sortkeys(self):
return sorted(
(col for col in self.columns if col.sort_key_order),
key=lambda c: c.sort_key_order
)
@property
def distkey(self):
candidate_distkey = [col for col in self.columns if col.is_dist_key]
if len(candidate_distkey) > 1:
raise ValueError(
"More than one distkey for {table}".format(self.name)
)
if candidate_distkey:
return candidate_distkey[0] # a table should have one distkey
else:
return None
class RedshiftSQLColumn(SQLColumn):
"""Internal data structure that represents a redshift sql column.
It is intended to support sql column definition in redshift.
"""
def __init__(self, column_name, column_type, primary_key_order=None,
sort_key_order=None, is_dist_key=None, encode=None,
is_nullable=True, default_value=None,
attributes=None, doc=None, **metadata):
super(RedshiftSQLColumn, self).__init__(
column_name,
column_type,
primary_key_order,
is_nullable,
default_value,
attributes,
doc,
**metadata
)
self.sort_key_order = sort_key_order
self.is_dist_key = is_dist_key
self.encode = encode
def __eq__(self, other):
return (type(other) is type(RedshiftSQLColumn) and
self.name == other.name and
self.type == other.type and
self.primary_key_order == other.primary_key_order and
self.sort_key_order == other.sort_key_order and
self.encode == other.encode and
self.is_dist_key == other.is_dist_key and
self.is_nullable == other.is_nullable and
self.default_value == other.default_value and
self.attributes == other.attributes and
self.metadata == other.metadata)
| 33.46789
| 76
| 0.625
| 2,751
| 0.754112
| 0
| 0
| 579
| 0.158717
| 0
| 0
| 966
| 0.264803
|
a2b5bf13bb08e8ae97991098e42fc0fd73145597
| 50,707
|
py
|
Python
|
modules/templates/WACOP/config.py
|
mswdresden/AsylumEden
|
a68ee08f9f7031974ec12ec327d00c5d975a740a
|
[
"MIT"
] | 1
|
2017-07-22T18:49:34.000Z
|
2017-07-22T18:49:34.000Z
|
modules/templates/WACOP/config.py
|
mswdresden/AsylumEden
|
a68ee08f9f7031974ec12ec327d00c5d975a740a
|
[
"MIT"
] | null | null | null |
modules/templates/WACOP/config.py
|
mswdresden/AsylumEden
|
a68ee08f9f7031974ec12ec327d00c5d975a740a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from collections import OrderedDict
from gluon import current
from gluon.storage import Storage
def config(settings):
"""
Template for WA-COP + CAD Cloud Integration
"""
T = current.T
# =========================================================================
# System Settings
#
settings.base.system_name = T("Sahana: Washington Common Operating Picture (WA-COP)")
settings.base.system_name_short = T("Sahana")
# Prepop default
settings.base.prepopulate += ("WACOP", "default/users", "WACOP/Demo")
# Theme (folder to use for views/layout.html)
settings.base.theme = "WACOP"
settings.ui.social_buttons = True
# -------------------------------------------------------------------------
# Self-Registration and User Profile
#
# Users can self-register
settings.security.self_registration = False
# Users need to verify their email
settings.auth.registration_requires_verification = True
# Users need to be approved
settings.auth.registration_requires_approval = True
settings.auth.registration_requests_organisation = True
settings.auth.registration_organisation_required = True
# Approval emails get sent to all admins
settings.mail.approver = "ADMIN"
settings.auth.registration_link_user_to = {"staff": T("Staff")}
settings.auth.registration_link_user_to_default = ["staff"]
settings.auth.registration_roles = {"organisation_id": ["USER"],
}
settings.auth.show_utc_offset = False
settings.auth.show_link = False
# -------------------------------------------------------------------------
# Security Policy
#
settings.security.policy = 7 # Apply Controller, Function and Table ACLs
settings.security.map = True
# -------------------------------------------------------------------------
# L10n (Localization) settings
#
settings.L10n.languages = OrderedDict([
("en", "English"),
("es", "Español"),
])
# Default Language
settings.L10n.default_language = "en"
# Default timezone for users
settings.L10n.utc_offset = "-0800"
# Unsortable 'pretty' date format
settings.L10n.date_format = "%b %d %Y"
# Number formats (defaults to ISO 31-0)
# Decimal separator for numbers (defaults to ,)
settings.L10n.decimal_separator = "."
# Thousands separator for numbers (defaults to space)
settings.L10n.thousands_separator = ","
# Default Country Code for telephone numbers
settings.L10n.default_country_code = 1
# Enable this to change the label for 'Mobile Phone'
settings.ui.label_mobile_phone = "Cell Phone"
# Enable this to change the label for 'Postcode'
settings.ui.label_postcode = "ZIP Code"
settings.msg.require_international_phone_numbers = False
# PDF to Letter
settings.base.paper_size = T("Letter")
# Uncomment this to Translate CMS Series Names
# - we want this on when running s3translate but off in normal usage as we use the English names to lookup icons in render_posts
#settings.L10n.translate_cms_series = True
# Uncomment this to Translate Location Names
#settings.L10n.translate_gis_location = True
# Has scalability issues, but should be OK with our number of records
settings.search.dates_auto_range = True
# -------------------------------------------------------------------------
# GIS settings
#
# Restrict the Location Selector to just certain countries
settings.gis.countries = ("US",)
# Levels for the LocationSelector
levels = ("L1", "L2", "L3")
# Uncomment to pass Addresses imported from CSV to a Geocoder to try and automate Lat/Lon
#settings.gis.geocode_imported_addresses = "google"
# Until we add support to S3LocationSelector to set dropdowns from LatLons
settings.gis.check_within_parent_boundaries = False
# GeoNames username
settings.gis.geonames_username = "mcop"
# Uncomment to hide Layer Properties tool
#settings.gis.layer_properties = False
# Uncomment to display the Map Legend as a floating DIV
settings.gis.legend = "float"
# Uncomment to prevent showing LatLon in Location Represents
settings.gis.location_represent_address_only = "icon"
# Resources which can be directly added to the main map
settings.gis.poi_create_resources = None
# -------------------------------------------------------------------------
# Modules
#
settings.modules = OrderedDict([
# Core modules which shouldn't be disabled
("default", Storage(
name_nice = "Home",
restricted = False, # Use ACLs to control access to this module
access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller
module_type = None # This item is not shown in the menu
)),
("admin", Storage(
name_nice = "Administration",
#description = "Site Administration",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
("appadmin", Storage(
name_nice = "Administration",
#description = "Site Administration",
restricted = True,
module_type = None # No Menu
)),
# ("errors", Storage(
# name_nice = "Ticket Viewer",
# #description = "Needed for Breadcrumbs",
# restricted = False,
# module_type = None # No Menu
# )),
("sync", Storage(
name_nice = "Synchronization",
#description = "Synchronization",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu & access the controller
module_type = None # This item is handled separately for the menu
)),
#("translate", Storage(
# name_nice = "Translation Functionality",
# #description = "Selective translation of strings based on module.",
# module_type = None,
#)),
("gis", Storage(
name_nice = "Map",
#description = "Situation Awareness & Geospatial Analysis",
restricted = True,
module_type = 1, # 1st item in the menu
)),
("pr", Storage(
name_nice = "Persons",
description = "Central point to record details on People",
restricted = True,
access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still)
module_type = None
)),
("org", Storage(
name_nice = "Organizations",
#description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities',
restricted = True,
module_type = 10
)),
# All modules below here should be possible to disable safely
("hrm", Storage(
name_nice = "Contacts",
#description = "Human Resources Management",
restricted = True,
module_type = None,
)),
("cms", Storage(
name_nice = "Content Management",
restricted = True,
module_type = 10,
)),
("event", Storage(
name_nice = "Events",
restricted = True,
module_type = 2,
)),
("fire", Storage(
name_nice = "Fire",
restricted = True,
module_type = None,
)),
("police", Storage(
name_nice = "Police",
restricted = True,
module_type = None,
)),
("project", Storage(
name_nice = "Tasks",
restricted = True,
module_type = None,
)),
("doc", Storage(
name_nice = "Documents",
#description = "A library of digital resources, such as photos, documents and reports",
restricted = True,
module_type = None,
)),
("stats", Storage(
name_nice = "Statistics",
restricted = True,
module_type = None
)),
])
# -------------------------------------------------------------------------
# CMS Content Management
#
settings.cms.bookmarks = True
settings.cms.richtext = True
settings.cms.show_tags = True
# -------------------------------------------------------------------------
def cms_post_onaccept(form):
"""
Handle Tags in Create / Update forms
"""
post_id = form.vars.id
db = current.db
s3db = current.s3db
ttable = s3db.cms_tag
ltable = s3db.cms_tag_post
# Delete all existing tags for this post
db(ltable.post_id == post_id).delete()
# Add these tags
tags = current.request.post_vars.get("tags")
if not tags:
return
tags = tags.split(",")
tag_ids = db(ttable.name.belongs(tags)).select(ttable.id,
ttable.name).as_dict(key="name")
for tag in tags:
row = tag_ids.get("tag")
if row:
tag_id = row.get("id")
else:
tag_id = ttable.insert(name=tag)
ltable.insert(post_id = post_id,
tag_id = tag_id,
)
# -------------------------------------------------------------------------
def customise_cms_post_resource(r, tablename):
db = current.db
s3db = current.s3db
table = s3db.cms_post
table.priority.readable = table.priority.writable = True
table.series_id.readable = table.series_id.writable = True
table.status_id.readable = table.status_id.writable = True
method = r.method
if method in ("create", "update"):
# Custom Form
from s3 import S3SQLCustomForm, S3SQLInlineComponent
crud_fields = [(T("Type"), "series_id"),
(T("Priority"), "priority"),
(T("Status"), "status_id"),
(T("Title"), "title"),
(T("Text"), "body"),
(T("Location"), "location_id"),
# Tags are added client-side
S3SQLInlineComponent("document",
name = "file",
label = T("Files"),
fields = [("", "file"),
#"comments",
],
),
]
if r.tablename != "event_incident":
if r.tablename == "event_event":
from gluon import IS_EMPTY_OR
from s3 import IS_ONE_OF
itable = s3db.event_incident
query = (itable.event_id == r.id) & \
(itable.closed == False) & \
(itable.deleted == False)
set = db(query)
f = s3db.event_post.incident_id
f.requires = IS_EMPTY_OR(
IS_ONE_OF(set, "event_incident.id",
f.represent,
orderby="event_incident.name",
sort=True))
crud_fields.insert(0, S3SQLInlineComponent("incident_post",
fields = [("", "incident_id")],
label = T("Incident"),
multiple = False,
))
crud_form = S3SQLCustomForm(*crud_fields
)
# Client support for Tags
appname = r.application
s3 = current.response.s3
scripts_append = s3.scripts.append
if s3.debug:
scripts_append("/%s/static/scripts/tag-it.js" % appname)
else:
scripts_append("/%s/static/scripts/tag-it.min.js" % appname)
scripts_append("/%s/static/themes/WACOP/js/update_tags.js" % appname)
if method == "create":
s3.jquery_ready.append('''wacop_update_tags("")''')
elif method == "update":
ttable = s3db.cms_tag
ltable = s3db.cms_tag_post
if r.tablename == "cms_post":
post_id = r.id
else:
post_id = r.component.id
query = (ltable.post_id == post_id) & \
(ltable.tag_id == ttable.id)
tags = db(query).select(ttable.name)
tags = [tag.name for tag in tags]
tags = ",".join(tags)
s3.jquery_ready.append('''wacop_update_tags("%s")''' % tags)
# Processing Tags
default = s3db.get_config(tablename, "onaccept")
if isinstance(default, list):
onaccept = default
onaccept.append(cms_post_onaccept)
else:
onaccept = [default, cms_post_onaccept]
s3db.configure(tablename,
crud_form = crud_form,
onaccept = onaccept,
)
elif method in ("custom", "datalist", "filter"):
# dataList configuration
from templates.WACOP.controllers import cms_post_list_layout
s3 = current.response.s3
s3.dl_no_header = True
s3db.configure(tablename,
list_fields = ["series_id",
"priority",
"status_id",
"date",
"title",
"body",
"created_by",
"tag.name",
"document.file",
"comment.id",
#"comment.body", # Extra fields come in unsorted, so can't match up to records
#"comment.created_by",
#"comment.created_on",
],
list_layout = cms_post_list_layout,
# Default
#orderby = "cms_post.date desc",
)
if method in ("custom", "filter"):
# Filter Widgets
from s3 import S3DateFilter, \
S3LocationFilter, \
S3OptionsFilter, \
S3TextFilter
if method == "filter":
# Apply filter_vars
get_vars = r.get_vars
for k, v in get_vars.iteritems():
# We only expect a maximum of 1 of these, no need to append
from s3 import FS
s3.filter = (FS(k) == v)
date_filter = S3DateFilter("date",
# If we introduce an end_date on Posts:
#["date", "end_date"],
label = "",
#hide_time = True,
slider = True,
clear_text = "X",
)
date_filter.input_labels = {"ge": "Start Time/Date", "le": "End Time/Date"}
from templates.WACOP.controllers import text_filter_formstyle
filter_widgets = [S3TextFilter(["body",
],
formstyle = text_filter_formstyle,
label = T("Search"),
_placeholder = T("Enter search term…"),
),
S3OptionsFilter("series_id",
label = "",
noneSelectedText = "Type", # T() added in widget
no_opts = "",
),
S3OptionsFilter("priority",
label = "",
noneSelectedText = "Priority", # T() added in widget
no_opts = "",
),
S3OptionsFilter("status_id",
label = "",
noneSelectedText = "Status", # T() added in widget
no_opts = "",
),
S3OptionsFilter("created_by$organisation_id",
label = "",
noneSelectedText = "Source", # T() added in widget
no_opts = "",
),
S3OptionsFilter("tag_post.tag_id",
label = "",
noneSelectedText = "Tag", # T() added in widget
no_opts = "",
),
date_filter,
]
if r.tablename == "event_event" or \
(method == "filter" and get_vars.get("event_post.event_id")):
# Event Profile
filter_widgets.insert(1, S3OptionsFilter("incident_post.incident_id",
label = "",
noneSelectedText = "Incident", # T() added in widget
no_opts = "",
))
user = current.auth.user
if user:
filter_widgets.insert(1, S3OptionsFilter("bookmark.user_id",
label = "",
options = {"*": T("All"),
user.id: T("My Bookmarks"),
},
cols = 2,
multiple = False,
table = False,
))
s3db.configure(tablename,
filter_widgets = filter_widgets,
)
settings.customise_cms_post_resource = customise_cms_post_resource
# -------------------------------------------------------------------------
# Event/Incident Management
#
settings.event.incident_teams_tab = "Units"
# Uncomment to preserve linked Incidents when an Event is deleted
settings.event.cascade_delete_incidents = False
# -------------------------------------------------------------------------
def customise_event_event_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod
db = current.db
s3db = current.s3db
# Virtual Fields
etable = s3db.event_event
#append = etable._virtual_methods.append
def event_name(row):
return A(row["event_event.name"],
_href = URL(c="event", f="event",
args=[row["event_event.id"], "custom"],
extension = "", # ensure no .aadata
),
)
#append(Field.Method("name_click", event_name))
etable.name_click = s3_fieldmethod("name_click",
event_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
represent = lambda v: v,
)
def event_status(row):
if row["event_event.exercise"]:
status = T("Testing")
elif not row["event_event.end_date"]:
status = T("Open")
else:
status = T("Closed")
return status
#append(Field.Method("status", event_status))
etable.status = s3_fieldmethod("status", event_status)
itable = s3db.event_incident
def event_incidents(row):
query = (itable.event_id == row["event_event.id"]) & \
(itable.deleted == False)
incidents = db(query).count()
return incidents
#append(Field.Method("incidents", event_incidents))
etable.incidents = s3_fieldmethod("incidents", event_incidents)
ertable = s3db.event_team
def event_resources(row):
query = (ertable.event_id == row["event_event.id"]) & \
(ertable.deleted == False)
resources = db(query).count()
return resources
#append(Field.Method("resources", event_resources))
etable.resources = s3_fieldmethod("resources", event_resources)
ettable = s3db.event_tag
ttable = s3db.cms_tag
def event_tags(row):
query = (ettable.event_id == row["event_event.id"]) & \
(ettable.deleted == False) & \
(ettable.tag_id == ttable.id)
tags = db(query).select(ttable.name)
if tags:
tags = [t.name for t in tags]
tags = ", ".join(tags)
return tags
else:
return current.messages["NONE"]
#append(Field.Method("tags", event_tags))
etable.tags = s3_fieldmethod("tags", event_tags)
list_fields = [(T("Name"), "name_click"),
(T("Status"), "status"),
(T("Zero Hour"), "start_date"),
(T("Closed"), "end_date"),
(T("City"), "location.location_id.L3"),
(T("State"), "location.location_id.L1"),
(T("Tags"), "tags"),
(T("Incidents"), "incidents"),
(T("Resources"), "resources"),
]
s3db.configure(tablename,
extra_fields = ("name",
"end_date",
"exercise",
),
list_fields = list_fields,
orderby = "event_event.name",
)
settings.customise_event_event_resource = customise_event_event_resource
# -------------------------------------------------------------------------
def customise_event_event_controller(**attr):
s3db = current.s3db
s3 = current.response.s3
# Modify Components
s3db.add_components("event_event",
# Events have just a single Location
event_event_location = {"joinby": "event_id",
"multiple": False,
},
# Incidents are linked to Events, not created from them
# - not a link table though, so can't change the actuation
#event_incident = {"joinby": "event_id",
# },
)
# Custom Browse
from templates.WACOP.controllers import event_Browse, event_Profile
set_method = s3db.set_method
set_method("event", "event",
method = "browse",
action = event_Browse)
# Custom Profile
set_method("event", "event",
method = "custom",
action = event_Profile)
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard postp
if callable(standard_prep):
result = standard_prep(r)
cname = r.component_name
if not cname:
f = s3db.event_event.event_type_id
f.readable = f.writable = False
elif cname == "task":
from gluon import IS_EMPTY_OR
from s3 import IS_ONE_OF, S3SQLCustomForm, S3SQLInlineComponent
itable = s3db.event_incident
query = (itable.event_id == r.id) & \
(itable.closed == False) & \
(itable.deleted == False)
set = current.db(query)
f = s3db.event_task.incident_id
f.requires = IS_EMPTY_OR(
IS_ONE_OF(set, "event_incident.id",
f.represent,
orderby="event_incident.name",
sort=True))
crud_form = S3SQLCustomForm(
S3SQLInlineComponent("incident",
fields = [("", "incident_id")],
label = T("Incident"),
multiple = False,
filterby = dict(field = "event_id",
options = r.id,
)
),
"name",
"description",
"source",
"priority",
"pe_id",
"date_due",
"status",
"comments",
)
r.component.configure(crud_form = crud_form,
)
elif r.representation == "popup" and r.get_vars.get("view"):
# Popups for lists in Parent Event of Incident Screen or Event Profile header
# No Title since this is on the Popup
s3.crud_strings["event_event"].title_display = ""
# No create button & Tweak list_fields
if cname == "incident":
list_fields = ["date",
"name",
"incident_type_id",
]
elif cname == "group":
list_fields = ["incident_id",
"group_id",
"status_id",
]
elif cname == "post":
list_fields = ["date",
"series_id",
"priority",
"status_id",
"body",
]
else:
# Shouldn't get here but want to avoid crashes
list_fields = []
r.component.configure(insertable = False,
list_fields = list_fields,
)
return True
s3.prep = custom_prep
# Custom rheader tabs
attr = dict(attr)
attr["rheader"] = wacop_event_rheader
return attr
settings.customise_event_event_controller = customise_event_event_controller
# -------------------------------------------------------------------------
def customise_event_incident_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod
s3db = current.s3db
# Virtual Fields
itable = s3db.event_incident
#append = itable._virtual_methods.append
def incident_name(row):
return A(row["event_incident.name"],
_href = URL(c="event", f="incident",
args=[row["event_incident.id"], "custom"],
extension = "", # ensure no .aadata
),
)
#append(Field.Method("name_click", incident_name))
itable.name_click = s3_fieldmethod("name_click",
incident_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
represent = lambda v: v,
)
def incident_status(row):
if row["event_incident.exercise"]:
status = T("Testing")
elif not row["event_incident.end_date"]:
status = T("Open")
else:
status = T("Closed")
return status
#append(Field.Method("status", incident_status))
itable.status = s3_fieldmethod("status", incident_status)
if r.method == "browse" or r.get_vars.get("browse"):
# Incident Browse
db = current.db
ertable = s3db.event_team
def incident_resources(row):
query = (ertable.event_id == row["event_incident.id"]) & \
(ertable.deleted == False)
resources = db(query).count()
return resources
#append(Field.Method("resources", incident_resources))
itable.resources = s3_fieldmethod("resources", incident_resources)
ettable = s3db.event_tag
ttable = s3db.cms_tag
def incident_tags(row):
query = (ettable.incident_id == row["event_incident.id"]) & \
(ettable.deleted == False) & \
(ettable.tag_id == ttable.id)
tags = db(query).select(ttable.name)
if tags:
tags = [t.name for t in tags]
tags = ", ".join(tags)
return tags
else:
return current.messages["NONE"]
#append(Field.Method("tags", incident_tags))
itable.tags = s3_fieldmethod("tags", incident_tags)
list_fields = [(T("Name"), "name_click"),
(T("Status"), "status"),
(T("Type"), "incident_type_id"),
(T("Zero Hour"), "date"),
(T("Closed"), "end_date"),
(T("City"), "location.location_id.L3"),
(T("State"), "location.location_id.L1"),
(T("Tags"), "tags"),
(T("Resources"), "resources"),
(T("Event"), "event_id"),
]
else:
# Homepage or Event Profile
list_fields = [(T("Name"), "name_click"),
(T("Status"), "status"),
(T("Type"), "incident_type_id"),
"location_id",
(T("Start"), "date"),
]
s3db.configure(tablename,
extra_fields = ("name",
"end_date",
"exercise",
),
list_fields = list_fields,
orderby = "event_incident.name",
)
settings.customise_event_incident_resource = customise_event_incident_resource
# -------------------------------------------------------------------------
def customise_event_incident_controller(**attr):
s3db = current.s3db
response = current.response
s3 = response.s3
# Load normal model to be able to override configuration
table = s3db.event_incident
table.event_id.readable = table.event_id.writable = True
# Custom Browse
from templates.WACOP.controllers import incident_Browse, incident_Profile
set_method = s3db.set_method
set_method("event", "incident",
method = "browse",
action = incident_Browse)
# Custom Profile
set_method("event", "incident",
method = "custom",
action = incident_Profile)
#s3.crud_strings["event_incident"].title_list = T("Browse Incidents")
# Custom prep
standard_prep = s3.prep
def custom_prep(r):
# Call standard postp
if callable(standard_prep):
result = standard_prep(r)
if r.method == "assign":
current.menu.main = ""
elif r.component_name == "task":
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("name",
"description",
"source",
"priority",
"pe_id",
"date_due",
"status",
"comments",
)
r.component.configure(crud_form = crud_form,
)
elif r.representation == "popup":
if not r.component:
if r.get_vars.get("set_event"):
# Popup just used to link to Event
#s3.crud_strings["event_incident"].title_update = T("Add to Event")
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm("event_id",
)
s3db.configure("event_incident",
crud_form = crud_form,
)
#elif r.component_name == "post":
# from s3 import S3SQLCustomForm
# crud_form = S3SQLCustomForm("body",
# )
# s3db.configure("cms_post",
# crud_form = crud_form,
# )
return True
s3.prep = custom_prep
# Custom postp
standard_postp = s3.postp
def custom_postp(r, output):
# Call standard postp
if callable(standard_postp):
output = standard_postp(r, output)
if r.interactive and isinstance(output, dict):
if r.method == "assign":
# No Top Menu
current.menu.main = ""
# Custom View to waste less space inside popup
import os
response.view = os.path.join(r.folder,
"modules", "templates",
"WACOP", "views",
"assign.html")
#elif r.component_name == "post":
# # Add Tags - no, do client-side
# output["form"].append()
#else:
# # Summary or Profile pages
# # Additional styles
# s3.external_stylesheets += ["https://cdn.knightlab.com/libs/timeline3/latest/css/timeline.css",
# "https://fonts.googleapis.com/css?family=Merriweather:400,700|Source+Sans+Pro:400,700",
# ]
#if r.method == "summary":
# # Open the Custom profile page instead of the normal one
# from gluon import URL
# from s3 import S3CRUD
# custom_url = URL(args = ["[id]", "custom"])
# S3CRUD.action_buttons(r,
# read_url=custom_url,
# update_url=custom_url)
# # System-wide Alert
# from templates.WACOP.controllers import custom_WACOP
# custom = custom_WACOP()
# output["system_wide"] = custom._system_wide_html()
return output
s3.postp = custom_postp
# Custom rheader tabs
#attr = dict(attr)
#attr["rheader"] = wacop_event_rheader
attr["rheader"] = None
# No sidebar menu
current.menu.options = None
return attr
settings.customise_event_incident_controller = customise_event_incident_controller
# -------------------------------------------------------------------------
def customise_event_human_resource_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod
s3db = current.s3db
# Virtual Fields
# Always used from either the Event or Incident context
f = r.function
record_id = r.id
ehrtable = s3db.event_human_resource
hr_represent = ehrtable.human_resource_id.represent
def hr_name(row):
hr_id = row["event_human_resource.human_resource_id"]
return A(hr_represent(hr_id),
_href = URL(c="event", f=f,
args=[record_id, "human_resource", hr_id, "profile"],
),
)
ehrtable.name_click = s3_fieldmethod("name_click",
hr_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
# @ToDo: Bulk lookups
represent = lambda v: v,
)
s3db.configure(tablename,
#crud_form = crud_form,
extra_fields = ("human_resource_id",
),
list_fields = [(T("Name"), "name_click"),
(T("Title"), "human_resource_id$job_title_id"),
"human_resource_id$organisation_id",
(T("Email"), "human_resource_id$person_id$email.value"),
(T("Phone"), "human_resource_id$person_id$phone.value"),
"status",
(T("Notes"), "comments"),
],
orderby = "event_human_resource.human_resource_id",
)
settings.customise_event_human_resource_resource = customise_event_human_resource_resource
# -------------------------------------------------------------------------
def customise_event_organisation_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod
s3db = current.s3db
# Virtual Fields
# Always used from either the Event or Incident context
f = r.function
record_id = r.id
eotable = s3db.event_organisation
org_represent = eotable.organisation_id.represent
def org_name(row):
organisation_id = row["event_organisation.organisation_id"]
return A(org_represent(organisation_id),
_href = URL(c="event", f=f,
args=[record_id, "organisation", organisation_id, "profile"],
),
)
eotable.name_click = s3_fieldmethod("name_click",
org_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
# @ToDo: Bulk lookups
represent = lambda v: v,
)
s3db.configure(tablename,
#crud_form = crud_form,
extra_fields = ("organisation_id",
),
list_fields = [(T("Name"), "name_click"),
"status",
"comments",
],
orderby = "event_organisation.organisation_id",
)
settings.customise_event_organisation_resource = customise_event_organisation_resource
# -------------------------------------------------------------------------
def customise_event_team_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod, S3SQLCustomForm
s3db = current.s3db
ertable = s3db.event_team
#sertable.group_id.label = T("Resource")
# Form
# @ToDo: Have both Team & Event_Team in 1 form
crud_form = S3SQLCustomForm("incident_id",
"group_id",
"status_id",
)
# Virtual Fields
# Always used from either the Event or Incident context
f = r.function
record_id = r.id
group_represent = ertable.group_id.represent
def team_name(row):
group_id = row["event_team.group_id"]
return A(group_represent(group_id),
_href = URL(c="event", f=f,
args=[record_id, "group", group_id, "profile"],
extension = "", # ensure no .aadata
),
)
ertable.name_click = s3_fieldmethod("name_click",
team_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
# @ToDo: Bulk lookups
represent = lambda v: v,
)
s3db.configure(tablename,
crud_form = crud_form,
extra_fields = ("group_id",
),
list_fields = [(T("Name"), "name_click"),
"status_id",
],
orderby = "pr_group.name",
)
settings.customise_event_team_resource = customise_event_team_resource
# -------------------------------------------------------------------------
def customise_pr_group_resource(r, tablename):
s3db = current.s3db
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Create Resource"),
title_display = T("Resource Details"),
title_list = T("Resources"),
title_update = T("Edit Resource"),
label_list_button = T("List Resources"),
label_delete_button = T("Delete Resource"),
msg_record_created = T("Resource added"),
msg_record_modified = T("Resource updated"),
msg_record_deleted = T("Resource deleted"),
msg_list_empty = T("No Resources currently registered"))
field = s3db.pr_group.status_id
field.readable = field.writable = True
from s3 import S3SQLCustomForm
crud_form = S3SQLCustomForm((T("Name"), "name"),
"status_id",
"comments",
)
list_fields = [(T("Name"), "name"),
"status_id",
"comments",
]
s3db.configure(tablename,
crud_form = crud_form,
list_fields = list_fields,
)
settings.customise_pr_group_resource = customise_pr_group_resource
# -------------------------------------------------------------------------
def customise_pr_person_controller(**attr):
# Custom Profile
from templates.WACOP.controllers import person_Dashboard
current.s3db.set_method("pr", "person",
method = "dashboard",
action = person_Dashboard)
# No sidebar menu
current.menu.options = None
return attr
settings.customise_pr_person_controller = customise_pr_person_controller
# -------------------------------------------------------------------------
def customise_project_task_resource(r, tablename):
from gluon import A, URL
from s3 import s3_fieldmethod
s3db = current.s3db
# Virtual Fields
# Always used from either the Event or Incident context
f = r.function
record_id = r.id
def task_name(row):
return A(row["project_task.name"],
_href = URL(c="event", f=f,
args=[record_id, "task", row["project_task.id"], "profile"],
),
)
s3db.project_task.name_click = s3_fieldmethod("name_click",
task_name,
# over-ride the default represent of s3_unicode to prevent HTML being rendered too early
represent = lambda v: v,
)
s3db.configure(tablename,
#crud_form = crud_form,
extra_fields = ("name",
),
list_fields = ["status",
(T("Description"), "name_click"),
(T("Created"), "created_on"),
(T("Due"), "date_due"),
],
orderby = "project_task.date_due",
)
settings.customise_project_task_resource = customise_project_task_resource
# =============================================================================
def wacop_event_rheader(r, tabs=[]):
""" EVENT custom resource headers """
if r.representation != "html":
# Resource headers only used in interactive views
return None
from s3 import s3_rheader_resource, S3ResourceHeader
tablename, record = s3_rheader_resource(r)
if tablename != r.tablename:
resource = current.s3db.resource(tablename, id=record.id)
else:
resource = r.resource
rheader = None
rheader_fields = []
if record:
T = current.T
if tablename == "event_event":
if not tabs:
tabs = [(T("Event Details"), None),
(T("Incidents"), "incident"),
(T("Units"), "group"),
(T("Tasks"), "task"),
(T("Updates"), "post"),
]
rheader_fields = [["name"],
["start_date"],
["comments"],
]
elif tablename == "event_incident":
if not tabs:
tabs = [(T("Incident Details"), None),
(T("Units"), "group"),
(T("Tasks"), "task"),
(T("Updates"), "post"),
]
rheader_fields = [["name"],
["date"],
["comments"],
]
rheader = S3ResourceHeader(rheader_fields, tabs)(r,
table=resource.table,
record=record,
)
return rheader
# END =========================================================================
| 42.080498
| 142
| 0.428264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 14,238
| 0.280773
|
a2b8296c9037d221e852aad4ef00a8219c5bd0cc
| 1,185
|
py
|
Python
|
main.py
|
guysoft/kivy-external-storage-permission
|
a1eefedcabab2e82af948362271a21b4a8b89b56
|
[
"MIT"
] | 1
|
2020-04-07T15:13:12.000Z
|
2020-04-07T15:13:12.000Z
|
main.py
|
guysoft/kivy-external-storage-permission
|
a1eefedcabab2e82af948362271a21b4a8b89b56
|
[
"MIT"
] | null | null | null |
main.py
|
guysoft/kivy-external-storage-permission
|
a1eefedcabab2e82af948362271a21b4a8b89b56
|
[
"MIT"
] | null | null | null |
import kivy
from kivy.app import App
from kivy.uix.button import Button
import android
import os
import time
from android.permissions import Permission, request_permission, check_permission
from kivy.clock import Clock
class MyApp(App):
def second_thread(self, data):
print("starting second thread")
permission_status = check_permission(Permission.WRITE_EXTERNAL_STORAGE)
print(permission_status)
if permission_status is not None and permission_status:
print("got permission")
path = os.environ["SECONDARY_STORAGE"]
test_path = os.path.join(path, "test_yay")
os.makedirs(test_path)
else:
Clock.schedule_once(self.second_thread, 1)
def callback(self, data):
print("Pushed button, running")
print("request permission")
print(request_permission(Permission.WRITE_EXTERNAL_STORAGE))
Clock.schedule_once(self.second_thread, 5)
def build(self):
return Button(text='Touch to test writing to ' + os.environ["SECONDARY_STORAGE"], on_press=self.callback)
if __name__ == '__main__':
MyApp().run()
| 25.76087
| 113
| 0.672574
| 914
| 0.771308
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.142616
|
a2bc3a31cc03d9c0efb26d0509eb49d178d88baf
| 401
|
py
|
Python
|
my_oop/oop05.py
|
xxwqlee/pylearn
|
6eb9ad61bc68b3d0ca0b093f9876df3105efd98e
|
[
"Apache-2.0"
] | 1
|
2019-03-14T05:04:02.000Z
|
2019-03-14T05:04:02.000Z
|
my_oop/oop05.py
|
xxwqlee/pylearn
|
6eb9ad61bc68b3d0ca0b093f9876df3105efd98e
|
[
"Apache-2.0"
] | null | null | null |
my_oop/oop05.py
|
xxwqlee/pylearn
|
6eb9ad61bc68b3d0ca0b093f9876df3105efd98e
|
[
"Apache-2.0"
] | null | null | null |
"""
继承调用关系
"""
class A:
def a_say(self):
print('执行A:', self)
class B(A):
def b_say(self):
A.a_say(self) # 效果与下面的语句相同
super().a_say() # super()方法调用父类的定义,
# 默认传入当前对象的引用self
A().a_say() # 类对象的直接使用,先创建一个类对象A
print('执行B:', self)
a = A()
b = B()
a.a_say()
b.a_say()
print("*" * 50)
b.b_say() # 仍然引用子类实例化的对象
print("*" * 50)
B().b_say()
| 13.366667
| 44
| 0.506234
| 370
| 0.686456
| 0
| 0
| 0
| 0
| 0
| 0
| 256
| 0.474954
|
a2bc5af81309d6409de30031673fb2592880c8d4
| 8,852
|
py
|
Python
|
python/tests/sbp/utils.py
|
zk20/libsbp
|
041c5055f5db422258ebb3ce3f8e9f6e5d3e5fa9
|
[
"MIT"
] | null | null | null |
python/tests/sbp/utils.py
|
zk20/libsbp
|
041c5055f5db422258ebb3ce3f8e9f6e5d3e5fa9
|
[
"MIT"
] | null | null | null |
python/tests/sbp/utils.py
|
zk20/libsbp
|
041c5055f5db422258ebb3ce3f8e9f6e5d3e5fa9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# Copyright (C) 2015 Swift Navigation Inc.
# Contact: https://support.swiftnav.com
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
"""
Utilities for running YAML-defined unit tests.
"""
import base64
import os.path
import json
import sys
import unittest
import yaml
from sbp.msg import SBP
from sbp.table import dispatch, _SBP_TABLE
HERE = os.path.dirname(__file__)
PYTHON_ROOT = os.path.join(HERE, "..", "..")
SPEC_ROOT = os.path.join(PYTHON_ROOT, "..", "spec", "yaml", "swiftnav", "sbp")
_SPECS = {}
def flatten_array(a):
"""Return a mapping from a yaml array of mappings."""
return dict((next(iter(item.keys())), item[next(iter(item.keys()))]) for item in a)
def load_msg_field_classes(msg):
"""Return a mapping of msg field names to custom classes."""
# any_case is only available on Python 3.6+
try:
from any_case import to_snake_case
except ImportError:
return {}
module_name = msg.__class__.__module__
msg_name = msg.__class__.__name__
if module_name not in _SPECS:
sbp_module_name = module_name.rsplit(".", 1)[-1]
module_filename = os.path.join(SPEC_ROOT, sbp_module_name + ".yaml")
if not os.path.exists(module_filename):
raise RuntimeError(module_filename, msg)
with open(module_filename) as f:
_SPECS[module_name] = yaml.load(f.read(), Loader=yaml.FullLoader)
definitions = flatten_array(_SPECS[module_name]["definitions"])
msg_key = to_snake_case(msg_name).upper()
obj_fields = flatten_array(definitions[msg_key]["fields"])
field_classes = {}
for field_name, field in obj_fields.items():
if field["type"] in definitions:
mod = sys.modules[module_name]
cls = getattr(mod, field["type"])
field_classes[field_name] = cls
return field_classes
def _encoded_string(s):
"""Encode the string-like argument as bytes if suitable"""
return s.encode('ascii') if hasattr(s, 'encode') else s
def _assert_unsorted_equal(a, b):
"""
Perform unittest.TestCase.assertCountEqual.
"""
# pytest does not have a similar feature
# https://github.com/pytest-dev/pytest/issues/5548
# This is intentionally inside the function so that it is not collected as a test class
class UnitTestCase(unittest.TestCase):
def runTest(self):
pass
if not hasattr(unittest.TestCase, "assertCountEqual"):
def assertCountEqual(self, *args, **kw):
return self.assertItemsEqual(*args, **kw) # pylint: disable=no-member
case = UnitTestCase()
case.assertCountEqual(a, b)
def _assert_sbp(sbp, test_case):
"""
Assert that a proper SBP parsing from a raw package of data.
Parameters
----------
sbp : :class: `SBP`
SBP message parsed from unit test's raw packet.
test_case : dict
Unit test case parsed from YAML.
"""
assert sbp.crc == int(test_case['crc'], 0), "Invalid crc."
assert sbp.msg_type == int(test_case['msg_type'], 0), "Invalid msg_type."
assert sbp.sender == int(test_case['sender'], 0), "Invalid sender."
assert sbp.length == test_case['length'], "Invalid length."
assert base64.standard_b64encode(sbp.payload) == _encoded_string(test_case['payload']), \
"Invalid payload."
def deep_encode(e, encoding='ascii'):
"""
Encodes all strings using encoding, default ascii.
"""
if isinstance(e, dict):
return dict((i, deep_encode(j, encoding)) for (i, j) in e.items())
elif isinstance(e, list):
return [deep_encode(i, encoding) for i in e]
elif isinstance(e, str):
e = e.encode(encoding)
return e
def field_eq(p, e):
"""
Checks the field values of a parsed message for equality against
some ground truth value.
Parameters
----------
p : object with dict-like attributed access
Parsed field contents.
e : object with dict-like attributed access
Expected field contents.
Returns
----------
True if fields are equal, else False.
"""
if isinstance(e, dict):
return all(field_eq(p[i], j) for (i, j) in iter(e.items()))
elif isinstance(e, list):
return all(field_eq(p[i], j) for (i, j) in enumerate(e))
elif isinstance(e, str) and isinstance(p, bytes) and p.endswith(b'\x00'):
e = e.encode('ascii')
return p == e
def _assert_msg(msg, test_case):
"""
Asserts that the parsed payload of an SBP message has the expected
field values.
Parameters
----------
msg : Parsed SBP message.
Parsed SBP message.
test_case : dict
Unit test case for this message.
"""
assert msg.__class__.__name__ == test_case['name'], (
"test case name {} loaded class name {}".format(test_case['name'], msg.__class__.__name__))
if test_case['fields']:
for field_name, field_value in test_case['fields'].items():
assert field_eq(getattr(msg, field_name), _encoded_string(field_value)), \
"Unequal field values (name: %s): got %r, but expected %r!" \
% (field_name, getattr(msg, field_name), field_value)
def _assert_msg_roundtrip(msg, raw_packet):
"""
Asserts that a msg gets serialized back into binary with the
expected value.
Parameters
----------
msg : Parsed SBP message.
Parsed SBP message.
raw_packet : dict
Unit test case for this message.
"""
encoding = base64.standard_b64encode(msg.to_binary())
assert encoding == _encoded_string(raw_packet)
def _assert_msg_roundtrip_json(msg, raw_json):
"""
Asserts that a msg gets serialized back into JSON with the
expected value, as well as gets serialized from JSON into
an expected object.
"""
to_json = json.loads(msg.to_json())
from_json = json.loads(raw_json)
assert sorted(to_json.items()) == sorted(from_json.items())
assert msg == msg.from_json(raw_json)
def _assert_materialization(msg, sbp, raw_json):
"""Asserts that a message materialized will get serialized into the
right JSON object.
"""
fields = msg['fields'] or dict()
live_msg = _SBP_TABLE[sbp.msg_type](sbp, **fields)
assert isinstance(live_msg.to_json_dict(), dict)
assert live_msg.to_json_dict() == json.loads(raw_json)
fields = deep_encode(fields)
live_msg = _SBP_TABLE[sbp.msg_type](sbp=None, **fields)
assert isinstance(live_msg.to_json_dict(), dict)
assert sorted(live_msg.to_json_dict().keys()) == sorted(live_msg.to_json_dict().keys())
_assert_unsorted_equal(live_msg.to_json_dict(), live_msg.to_json_dict())
assert msg['module']
assert msg['name']
# Locate the classes for any fields that use one from the same
# module as the test case
if not fields:
return
field_class_map = load_msg_field_classes(live_msg)
if not field_class_map:
return
# Instantiate fields as classes and then instantiate msg using those objects
member_fields = {}
for name, value in fields.items():
if name in field_class_map:
assert isinstance(value, dict)
member_fields[name] = field_class_map[name](sbp=None, **value)
else:
member_fields[name] = value
live_msg = _SBP_TABLE[sbp.msg_type](sbp=None, **member_fields)
_assert_unsorted_equal(live_msg.to_json_dict(), json.loads(raw_json))
def _assert_sane_package(pkg_name, pkg):
"""
Sanity check the package collection of tests before actually
running the tests.
Parameters
----------
pkg_name : str
Name of package to test
pkg : dict
Parsed contents of YAML file.
"""
assert len(pkg['tests']) > 0, "Package has no tests!"
def load_test_package(test_filename):
"""
Runs unit tests for message bindings by reading a YAML unit test
specification, parsing a raw packet for each test, and then
asserting that SBP messages and parsed payloads have their intended
values.
Parameters
----------
test_filename : str
Filepath to unit test specifications
pkg_name : str
Name of package to test
"""
pkg_name = os.path.basename(test_filename)
with open(test_filename, 'r') as f:
pkg = yaml.load(f.read(), Loader=yaml.FullLoader)
try:
_assert_sane_package(pkg_name, pkg)
except Exception as e:
raise RuntimeError("Loading {} failed: {}".format(test_filename, e))
return pkg
def assert_package(test_filename):
pkg = load_test_package(test_filename)
for test_case in pkg['tests']:
sbp = SBP.unpack(base64.standard_b64decode(test_case['raw_packet']))
_assert_sbp(sbp, test_case['sbp'])
_assert_msg(dispatch(sbp), test_case['msg'])
_assert_msg_roundtrip(dispatch(sbp), test_case['raw_packet'])
_assert_msg_roundtrip_json(dispatch(sbp), test_case['raw_json'])
_assert_materialization(test_case['msg'], sbp, test_case['raw_json'])
| 32.189091
| 95
| 0.703344
| 257
| 0.029033
| 0
| 0
| 0
| 0
| 0
| 0
| 3,603
| 0.407027
|
a2bd09228744e69177dc7286c70d7e20bc69a6fd
| 2,453
|
py
|
Python
|
train_arg_parser.py
|
DaringDane/Image-Classifier
|
6e6a835bd72453c1ee9c5b57cf4959fc9011971b
|
[
"MIT"
] | null | null | null |
train_arg_parser.py
|
DaringDane/Image-Classifier
|
6e6a835bd72453c1ee9c5b57cf4959fc9011971b
|
[
"MIT"
] | null | null | null |
train_arg_parser.py
|
DaringDane/Image-Classifier
|
6e6a835bd72453c1ee9c5b57cf4959fc9011971b
|
[
"MIT"
] | null | null | null |
import argparse
'''
Example commands for the command line:
- Select directory to save checkpoints in: python train.py data_directory --save_dir save_directory
- Select training architecture: python train.py data_directory --arch "densenet121"
- Set hyperparameters: python train.py data_directory --learning_rate 0.005 --hidden_units 2048 --epochs 8
- Use GPU for training: python train.py data_directory --gpu
'''
def get_input_arguments():
parser = argparse.ArgumentParser(description='Set hyperparameters, architecture, train and validation datasets, and save state of the trained image classifier',
usage="python train.py flowers/ --gpu --arch densenet121 --learnrate 0.001 --hidden_units 4096 --epochs 5",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('data_directory', type=str, action='store', help='Directory to train on')
parser.add_argument('--category_to_name', type=str, action='store', dest='category_json', default='cat_to_name.json', help='Maps categories/classes to names from the data')
parser.add_argument('--arch', type=str, dest='arch', default='vgg16', help='Pretrained model architecture options include: vgg16, densenet121, alexnet')
# store the checkpoint in a directory
parser.add_argument('--save_dir', action='store', dest='save_dir', default='checkpoint', type=str, help='Saves checkpoint in the current directory, or write directory to save into')
#parser.add_argument('--save_dir_name', action='store', dest='save_dir_name', default='checkpoint', type=str, help='Choose name for save point directory')
# Set command access to setting Hyperparameters for training with defaults of lr=0.002, Hiddenlayer=4096 units, epochs=5
hyperparameters = parser.add_argument_group('hyperparameters')
hyperparameters.add_argument('--learnrate', type=float, action='store', default='0.002', help='Learning rate')
hyperparameters.add_argument('--hidden_units', type=int, action='store', default=4096, help='Units in hidden layer')
hyperparameters.add_argument('--epochs', type=int, action='store', default=5, help='Number of epochs')
# activate gpu processing
parser.add_argument('--gpu', action='store_true', dest='use_gpu', default=False, help='run training on gpu or cpu')
in_args = parser.parse_args()
return in_args
| 62.897436
| 185
| 0.721565
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,529
| 0.623318
|
a2bd0fd34368e4604144c29b0f69a07f59c44be6
| 12,878
|
py
|
Python
|
ckanext-hdx_org_group/ckanext/hdx_org_group/tests/test_controller/test_member_controller.py
|
alexandru-m-g/hdx-ckan
|
647f1f23f0505fa195601245b758edcaf4d25985
|
[
"Apache-2.0"
] | null | null | null |
ckanext-hdx_org_group/ckanext/hdx_org_group/tests/test_controller/test_member_controller.py
|
alexandru-m-g/hdx-ckan
|
647f1f23f0505fa195601245b758edcaf4d25985
|
[
"Apache-2.0"
] | null | null | null |
ckanext-hdx_org_group/ckanext/hdx_org_group/tests/test_controller/test_member_controller.py
|
alexandru-m-g/hdx-ckan
|
647f1f23f0505fa195601245b758edcaf4d25985
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on Jun 23, 2015
@author: alexandru-m-g
'''
import logging
import mock
import ckan.model as model
import ckan.common as common
import ckan.lib.helpers as h
import ckan.lib.mailer as mailer
import ckanext.hdx_users.controllers.mailer as hdx_mailer
import ckanext.hdx_theme.tests.hdx_test_base as hdx_test_base
import ckanext.hdx_theme.tests.mock_helper as mock_helper
import ckanext.hdx_org_group.controllers.member_controller as member_controller
import ckanext.hdx_org_group.tests as org_group_base
c = common.c
log = logging.getLogger(__name__)
q = None
sort = None
c_dict = None
invited_user = None
class TestMembersController(org_group_base.OrgGroupBaseWithIndsAndOrgsTest):
@classmethod
def _load_plugins(cls):
hdx_test_base.load_plugin('ytp_request hdx_org_group hdx_theme')
@classmethod
def _create_test_data(cls):
super(TestMembersController, cls)._create_test_data(create_datasets=False, create_members=True)
def setup(self):
global q, sort, c_dict
q = None
sort = None
c_dict = None
user_invite_params = None
def _populate_member_names(self, members, users):
ret = [next(user['fullname'] for user in users if user['id'] == member[0]) for member in members]
return ret
@mock.patch('ckanext.hdx_theme.helpers.helpers.c')
@mock.patch('ckanext.hdx_org_group.helpers.organization_helper.c')
@mock.patch('ckanext.hdx_org_group.controllers.member_controller.c')
def test_members(self, member_c, org_helper_c, theme_c):
global sort, q
test_username = 'testsysadmin'
mock_helper.populate_mock_as_c(member_c, test_username)
mock_helper.populate_mock_as_c(org_helper_c, test_username)
mock_helper.populate_mock_as_c(theme_c, test_username)
context = {
'model': model, 'session': model.Session, 'user': 'testsysadmin'}
org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
# By default the users should be sorted alphabetically asc
user_controller = MockedHDXOrgMemberController()
user_controller.members('hdx-test-org')
user_list = self._populate_member_names(c_dict['members'], org['users'])
for idx, val in enumerate(user_list):
if idx < len(user_list) - 1 and user_list[idx] and user_list[idx + 1]:
assert user_list[idx] < user_list[idx + 1], "{} should be before {}". \
format(user_list[idx], user_list[idx + 1])
# Sorting alphabetically desc
sort = 'title desc'
user_controller.members('hdx-test-org')
user_list = self._populate_member_names(c_dict['members'], org['users'])
for idx, val in enumerate(user_list):
if idx < len(user_list) - 1 and user_list[idx] and user_list[idx + 1]:
assert user_list[idx] > user_list[idx + 1], "{} should be before {}". \
format(user_list[idx], user_list[idx + 1])
# Sorting alphabetically desc
q = 'anna'
user_controller.members('hdx-test-org')
user_list = self._populate_member_names(c_dict['members'], org['users'])
assert len(user_list) == 1, "Only one user should be found for query"
assert user_list[0] == 'Anna Anderson2'
@mock.patch('ckanext.hdx_theme.helpers.helpers.c')
@mock.patch('ckanext.hdx_org_group.helpers.organization_helper.c')
@mock.patch('ckanext.hdx_org_group.controllers.member_controller.c')
def test_members_delete_add(self, member_c, org_helper_c, theme_c):
test_username = 'testsysadmin'
mock_helper.populate_mock_as_c(member_c, test_username)
mock_helper.populate_mock_as_c(org_helper_c, test_username)
mock_helper.populate_mock_as_c(theme_c, test_username)
url = h.url_for(
controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
action='member_delete',
id='hdx-test-org'
)
self.app.post(url, params={'user': 'annaanderson2'}, extra_environ={"REMOTE_USER": "testsysadmin"})
context = {
'model': model, 'session': model.Session, 'user': 'testsysadmin'}
org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
user_controller = MockedHDXOrgMemberController()
user_controller.members('hdx-test-org')
user_list = self._populate_member_names(c_dict['members'], org['users'])
deleted_length = len(user_list)
assert 'Anna Anderson2' not in user_list
url = h.url_for(
controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
action='member_new',
id='hdx-test-org'
)
self.app.post(url, params={'username': 'annaanderson2', 'role': 'editor'},
extra_environ={"REMOTE_USER": "testsysadmin"})
org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
assert len(org['users']) == deleted_length + 1, 'Number of members should have increased by 1'
member_anna = next((user for user in org['users'] if user['name'] == 'annaanderson2'), None)
assert member_anna, 'User annaanderson2 needs to be a member of the org'
assert member_anna['capacity'] == 'editor', 'User annaanderson2 needs to be an editor'
# def test_members_invite(self):
#
# original_send_invite = mailer.send_invite
#
# def mock_send_invite(user):
# global invited_user
# invited_user = user
#
# mailer.send_invite = mock_send_invite
#
# context = {
# 'model': model, 'session': model.Session, 'user': 'testsysadmin'}
# url = h.url_for(
# controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
# action='member_new',
# id='hdx-test-org'
# )
# self.app.post(url, params={'email': 'hdxtestuser123@test.test', 'role': 'editor'},
# extra_environ={"REMOTE_USER": "testsysadmin"})
# org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
#
# new_member = next((user for user in org['users'] if 'hdxtestuser123' in user['name']), None)
# assert new_member, 'Invited user needs to be a member of the org'
# assert new_member['capacity'] == 'editor', 'Invited user needs to be an editor'
#
# mailer.send_invite = original_send_invite
#
# @mock.patch('ckanext.hdx_theme.helpers.helpers.c')
# @mock.patch('ckanext.hdx_org_group.helpers.organization_helper.c')
# @mock.patch('ckanext.hdx_org_group.controllers.member_controller.c')
# def test_bulk_members_invite(self, member_c, org_helper_c, theme_c):
# test_username = 'testsysadmin'
# mock_helper.populate_mock_as_c(member_c, test_username)
# mock_helper.populate_mock_as_c(org_helper_c, test_username)
# mock_helper.populate_mock_as_c(theme_c, test_username)
# original_send_invite = mailer.send_invite
#
# def mock_send_invite(user):
# global invited_user
# invited_user = user
#
# mailer.send_invite = mock_send_invite
# context = {'model': model, 'session': model.Session, 'user': test_username}
#
# # removing one member from organization
# url = h.url_for(
# controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
# action='member_delete',
# id='hdx-test-org'
# )
# self.app.post(url, params={'user': 'johndoe1'}, extra_environ={"REMOTE_USER": test_username})
#
# org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
# user_controller = MockedHDXOrgMemberController()
# user_controller.members('hdx-test-org')
# user_list = self._populate_member_names(c_dict['members'], org['users'])
# deleted_length = len(user_list)
# assert 'John Doe1' not in user_list
#
# # bulk adding members
# url = h.url_for(
# controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
# action='bulk_member_new',
# id='hdx-test-org'
# )
#
# self.app.post(url, params={'emails': 'janedoe3,johndoe1,dan@k.ro', 'role': 'editor'},
# extra_environ={"REMOTE_USER": test_username})
# org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
#
# assert len(org['users']) == deleted_length + 2, 'Number of members should have increased by 2'
# new_member = next((user for user in org['users'] if 'johndoe1' in user['name']), None)
# assert new_member, 'Invited user needs to be a member of the org'
# assert new_member['capacity'] == 'editor', 'Invited user needs to be an editor'
#
# # making john doe1 a member back
# self.app.post(url, params={'emails': 'johndoe1', 'role': 'member'},
# extra_environ={"REMOTE_USER": test_username})
# org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
# new_member = next((user for user in org['users'] if 'johndoe1' in user['name']), None)
# assert new_member, 'Invited user needs to be a member of the org'
# assert new_member['capacity'] == 'member', 'Invited user needs to be an member'
#
# mailer.send_invite = original_send_invite
@mock.patch('ckanext.ytp.request.controller.request')
@mock.patch('ckanext.hdx_theme.helpers.helpers.c')
@mock.patch('ckanext.hdx_org_group.helpers.organization_helper.c')
@mock.patch('ckanext.hdx_org_group.controllers.member_controller.c')
def test_request_membership(self, member_c, org_helper_c, theme_c, request_c):
test_sysadmin = 'testsysadmin'
test_username = 'johndoe1'
mock_helper.populate_mock_as_c(member_c, test_sysadmin)
mock_helper.populate_mock_as_c(org_helper_c, test_sysadmin)
mock_helper.populate_mock_as_c(theme_c, test_sysadmin)
original_send_invite = mailer.send_invite
original_mail_recipient = hdx_mailer._mail_recipient
def mock_send_invite(user):
global invited_user
invited_user = user
def mock_mail_recipient(recipients_list, subject, body, sender_name, bcc_recipients_list=None, footer=None,
headers={}, sender_email=None):
return True
mailer.send_invite = mock_send_invite
hdx_mailer._mail_recipient = mock_mail_recipient
context = {'model': model, 'session': model.Session, 'user': test_sysadmin}
# removing one member from organization
url = h.url_for(
controller='ckanext.hdx_org_group.controllers.member_controller:HDXOrgMemberController',
action='member_delete',
id='hdx-test-org'
)
self.app.post(url, params={'user': 'johndoe1'}, extra_environ={"REMOTE_USER": test_sysadmin})
org = self._get_action('organization_show')(context, {'id': 'hdx-test-org'})
user_controller = MockedHDXOrgMemberController()
user_controller.members('hdx-test-org')
user_list = self._populate_member_names(c_dict['members'], org['users'])
assert 'John Doe1' not in user_list
# user update - email
self._get_action('user_update')(context, {'id': test_sysadmin, 'email': 'test@sys.admin'})
usr_dict = self._get_action('user_show')(context, {'id': test_sysadmin})
# send a membership request
mock_helper.populate_mock_as_c(member_c, test_username)
mock_helper.populate_mock_as_c(org_helper_c, test_username)
mock_helper.populate_mock_as_c(theme_c, test_username)
request_c.referer = '/organization/wfp'
url = h.url_for('member_request_new')
ret_page = self.app.post(url, params={'organization': 'hdx-test-org', 'role': 'member', 'save': 'save',
'message': 'add me to your organization'},
extra_environ={"REMOTE_USER": test_username})
mailer.send_invite = original_send_invite
hdx_mailer._mail_recipient = original_mail_recipient
class MockedHDXOrgMemberController(member_controller.HDXOrgMemberController):
def _find_filter_params(self):
return q, sort
def _set_c_params(self, params):
global c_dict
c_dict = params
def _get_context(self):
context = {'model': model, 'session': model.Session, 'user': 'testsysadmin'}
return context
def _render_template(self, template_name, group_type):
pass
| 43.802721
| 115
| 0.656701
| 12,252
| 0.95139
| 0
| 0
| 7,161
| 0.556065
| 0
| 0
| 6,070
| 0.471346
|
a2bd228991060d0a29b89ddd1eb606ca0ff8fed6
| 1,044
|
py
|
Python
|
bulletin/factories.py
|
ralphqq/ph-earthquake-dashboard
|
b9a599e92844b13fd1f7e3f54e087ec0ab6bc53a
|
[
"MIT"
] | null | null | null |
bulletin/factories.py
|
ralphqq/ph-earthquake-dashboard
|
b9a599e92844b13fd1f7e3f54e087ec0ab6bc53a
|
[
"MIT"
] | 7
|
2020-06-05T20:14:42.000Z
|
2022-03-02T15:00:30.000Z
|
bulletin/factories.py
|
ralphqq/ph-earthquake-dashboard
|
b9a599e92844b13fd1f7e3f54e087ec0ab6bc53a
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
import random
from django.utils import timezone
import factory
class BulletinFactory(factory.DjangoModelFactory):
class Meta:
model = 'bulletin.Bulletin'
url = factory.Sequence(lambda n: f'https://www.sitepage.com/{n}')
latitude = factory.Faker(
'pydecimal',
right_digits=2,
min_value=-90,
max_value=90
)
longitude = factory.Faker(
'pydecimal',
right_digits=2,
min_value=-180,
max_value=180
)
depth = factory.Faker(
'pydecimal',
right_digits=1,
min_value=0,
max_value=500
)
magnitude = factory.Faker(
'pydecimal',
right_digits=1,
min_value=1,
max_value=10
)
location = factory.Faker('address')
@factory.sequence
def time_of_quake(n):
"""Creates sequence of datetime obj 30 minutes apart."""
td = timedelta(minutes=30)
return timezone.now() - (n * td)
| 23.2
| 70
| 0.573755
| 938
| 0.898467
| 0
| 0
| 188
| 0.180077
| 0
| 0
| 159
| 0.152299
|
a2beade77d575c19dad94b1f0e0efaa28bdb3efa
| 792
|
py
|
Python
|
figure2/Initialization.py
|
QianLab/Soft_MOCU
|
516ab0c9fffcde0542576c5c9b20132880ea2dc1
|
[
"MIT"
] | 1
|
2021-02-24T19:33:32.000Z
|
2021-02-24T19:33:32.000Z
|
figure2/Initialization.py
|
QianLab/Soft_MOCU
|
516ab0c9fffcde0542576c5c9b20132880ea2dc1
|
[
"MIT"
] | null | null | null |
figure2/Initialization.py
|
QianLab/Soft_MOCU
|
516ab0c9fffcde0542576c5c9b20132880ea2dc1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
def pz_theta_model(x, theta_r):
# x can be np.array of size 2 or a list of two np.array x1 and x2
# x = x[0]
a = theta_r[0]
b = theta_r[1]
pyx = np.exp(x)/(1+np.exp(x))*0.6+0.2 #average center
cx = a*np.exp(-x**2)+b*(np.exp(-(x-4)**2)+np.exp(-(x+4)**2));# additive uncertainty
# pzx_theta = np.exp(x+cx)/(1+np.exp(x+cx))
pzx_theta = pyx+cx
return pzx_theta
py_eq_z = None
def Initialization(xnum = 101, thetanum = 21):
xspace = np.linspace(-4, 4, xnum)
yspace = None
alist = np.linspace(-0.1, 0.1, thetanum)
blist = np.linspace(-0.2, 0.2, thetanum)
thetalist = [np.array([a, b]) for a in alist for b in blist]
return xspace, yspace, thetalist
classnum = 2
multiclass = False
| 26.4
| 87
| 0.597222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 184
| 0.232323
|
a2bef8254bccb013d26eb0c1c08e9ae8163682c2
| 14,153
|
py
|
Python
|
plex_export_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
plex_export_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
plex_export_watched_history.py
|
chazlarson/plex-watched-tools
|
ef3e34e733ec9555353d695ced582395bdc73480
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# python3 -m pip install --force -U --user PlexAPI
"""
Metadata to be handled:
* Audiobooks
* Playlists -- https://github.com/pkkid/python-plexapi/issues/551
"""
import copy
import json
import time
import logging
import collections
from urllib.parse import urlparse
import plexapi
import plexapi.video
import plexapi.myplex
import plexapi.server
import plexapi.library
import plexapi.exceptions
PLEX_URL = ""
PLEX_TOKEN = ""
WATCHED_HISTORY = ""
LOG_FILE = ""
BATCH_SIZE = 10000
PLEX_REQUESTS_SLEEP = 0
CHECK_USERS = [
]
LOG_FORMAT = \
"[%(name)s][%(process)05d][%(asctime)s][%(levelname)-8s][%(funcName)-15s]" \
" %(message)s"
LOG_DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
LOG_LEVEL = logging.INFO
plexapi.server.TIMEOUT = 3600
plexapi.server.X_PLEX_CONTAINER_SIZE = 2500
_SHOW_RATING_KEY_GUID_MAPPING = {}
_MOVIE_RATING_KEY_GUID_MAPPING = {}
_EPISODE_RATING_KEY_GUID_MAPPING = {}
logger = logging.getLogger("PlexWatchedHistoryExporter")
SHOW_HISTORY = {
'guid': "",
'title': "",
'watched': False,
'userRating': "",
'episodes': collections.defaultdict(lambda: copy.deepcopy(EPISODE_HISTORY))
}
MOVIE_HISTORY = {
'guid': "",
'title': "",
'watched': False,
'viewCount': 0,
'viewOffset': 0,
'userRating': ""
}
EPISODE_HISTORY = {
'guid': "",
'title': "",
'watched': False,
'viewCount': 0,
'viewOffset': 0,
'userRating': ""
}
def _get_config_str(key):
return plexapi.CONFIG.get(key, default="", cast=str).strip("'").strip('"').strip()
def _load_config():
global PLEX_URL, PLEX_TOKEN, WATCHED_HISTORY, CHECK_USERS, LOG_FILE, LOG_LEVEL
if PLEX_URL == "":
PLEX_URL = _get_config_str("sync.src_url")
if PLEX_TOKEN == "":
PLEX_TOKEN = _get_config_str("sync.src_token")
if WATCHED_HISTORY == "":
WATCHED_HISTORY = _get_config_str("sync.watched_history")
if len(CHECK_USERS) == 0:
config_check_users = _get_config_str("sync.check_users").split(",")
CHECK_USERS = [user.strip() for user in config_check_users if user]
if LOG_FILE == "":
LOG_FILE = _get_config_str("sync.export_log_file")
debug = plexapi.utils.cast(bool, _get_config_str("sync.debug").lower())
if debug:
LOG_LEVEL = logging.DEBUG
def _setup_logger():
logging.Formatter.converter = time.gmtime
logging.raiseExceptions = False
logger.setLevel(logging.DEBUG)
logger.handlers = []
logger.propagate = False
detailed_formatter = logging.Formatter(fmt=LOG_FORMAT,
datefmt=LOG_DATE_FORMAT)
file_handler = logging.FileHandler(filename=LOG_FILE, mode="a+")
file_handler.setFormatter(detailed_formatter)
file_handler.setLevel(LOG_LEVEL)
logger.addHandler(file_handler)
def _cast(func, value):
if value is None:
return func()
if func == str:
return str(value)
if not isinstance(value, func):
raise ValueError(value)
return value
def _get_guid(rating_key_guid_mapping, item):
if item.ratingKey in rating_key_guid_mapping:
item_guid = rating_key_guid_mapping[item.ratingKey]
else:
item_guid = item.guid
rating_key_guid_mapping[item.ratingKey] = item_guid
return item_guid
def _tv_item_iterator(plex_section, start, batch_size):
libtype = "show"
# Get shows that have been fully watched
watched_kwargs = {'show.unwatchedLeaves': False}
items = plex_section.search(
libtype=libtype,
container_start=start,
maxresults=batch_size,
**watched_kwargs
)
for item in items:
logger.debug(f"Fully Watched Show: {item.title}")
yield item
# Get shows have have not been fully watched but have episodes have been fully watched
# Searching by episode.viewCount instead of show.viewCount to handle shows with
# episodes that were watched and then unwatched
partially_watched_kwargs = {'show.unwatchedLeaves': True, 'episode.viewCount!=': 0}
items = plex_section.search(
libtype=libtype,
container_start=start,
maxresults=batch_size,
**partially_watched_kwargs
)
for item in items:
logger.debug(f"Partially Watched Show with Fully Watched Episodes: {item.title}")
yield item
# Get shows have have not been fully watched and have no episodes that have been fully
# watched but have episodes that are in-progress
partially_watched_kwargs = {'show.unwatchedLeaves': True, 'show.viewCount=': 0,
'episode.inProgress': True}
items = plex_section.search(
libtype=libtype,
container_start=start,
maxresults=batch_size,
**partially_watched_kwargs
)
for item in items:
logger.debug(f"Partially Watched Show with Partially Watched Episodes: {item.title}")
yield item
def _movie_item_iterator(plex_section, start, batch_size):
libtype = "movie"
watched_kwargs = {'movie.viewCount!=': 0}
partially_watched_kwargs = {'movie.viewCount=': 0, 'movie.inProgress': True}
items = plex_section.search(
libtype=libtype,
container_start=start,
maxresults=batch_size,
**watched_kwargs
)
for item in items:
yield item
items = plex_section.search(
libtype=libtype,
container_start=start,
maxresults=batch_size,
**partially_watched_kwargs
)
for item in items:
yield item
def _batch_get(plex_section, batch_size):
start = 0
while True:
if start >= plex_section.totalSize:
break
if isinstance(plex_section, plexapi.library.ShowSection):
yield from _tv_item_iterator(plex_section, start, batch_size)
elif isinstance(plex_section, plexapi.library.MovieSection):
yield from _movie_item_iterator(plex_section, start, batch_size)
else:
logger.warning(f"Skipping Un-processable Section: {plex_section.title} [{plex_section.type}]")
return
start = start + 1 + batch_size
def _get_movie_section_watched_history(section, movie_history):
movies_watched_history = _batch_get(section, BATCH_SIZE)
for movie in movies_watched_history:
movie_guid = _get_guid(_MOVIE_RATING_KEY_GUID_MAPPING, movie)
# TODO: Check if reload is necessary
# movie.reload(checkFiles=False)
if urlparse(movie_guid).scheme != 'plex':
continue
if movie.isWatched:
logger.debug(f"Fully Watched Movie: {movie.title} [{movie_guid}]")
movie_history[movie_guid].update({
'guid': _cast(str, movie_guid),
'title': _cast(str, movie.title),
'watched': _cast(bool, movie.isWatched),
'viewCount': _cast(int, movie.viewCount),
'viewOffset': _cast(int, movie.viewOffset),
'userRating': _cast(str, movie.userRating)
})
else:
logger.debug(f"Partially Watched Movie: {movie.title} [{movie_guid}]")
existing_watched = movie_history[movie_guid]['watched']
# Prefer fully watched over partially watched entries
# TODO: Check for userRating & viewOffset too, however this shouldn't ever be
# different since Plex tracks the item via the GUID across libraries/sections
if existing_watched:
continue
movie_history[movie_guid].update({
'guid': _cast(str, movie_guid),
'title': _cast(str, movie.title),
'watched': _cast(bool, movie.isWatched),
'viewCount': _cast(int, movie.viewCount),
'viewOffset': _cast(int, movie.viewOffset),
'userRating': _cast(str, movie.userRating)
})
def _get_show_section_watched_history(section, show_history):
shows_watched_history = _batch_get(section, BATCH_SIZE)
for show in shows_watched_history:
show_guid = _get_guid(_SHOW_RATING_KEY_GUID_MAPPING, show)
# TODO: Check if reload is necessary
# show.reload(checkFiles=False)
if urlparse(show_guid).scheme != 'plex':
continue
show_item_history = show_history[show_guid]
if show.isWatched:
logger.debug(f"Fully Watched Show: {show.title} [{show_guid}]")
show_item_history.update({
'guid': _cast(str, show_guid),
'title': _cast(str, show.title),
'watched': _cast(bool, show.isWatched),
'userRating': _cast(str, show.userRating),
})
for episode in show.episodes(viewCount__gt=0):
episode_guid = _get_guid(_EPISODE_RATING_KEY_GUID_MAPPING, episode)
logger.debug(f"Fully Watched Episode: {episode.title} [{episode_guid}]")
show_item_history['episodes'][episode_guid].update({
'guid': _cast(str, episode_guid),
'title': _cast(str, episode.title),
'watched': _cast(bool, episode.isWatched),
'viewCount': _cast(int, episode.viewCount),
'viewOffset': _cast(int, episode.viewOffset),
'userRating': _cast(str, episode.userRating),
})
else:
logger.debug(f"Partially Watched Show: {show.title} [{show_guid}]")
# Prefer fully watched over partially watched entries
# TODO: Check for userRating & viewOffset too, however this shouldn't ever be
# different since Plex tracks the item via the GUID across libraries/sections
existing_watched = show_item_history['watched']
if existing_watched:
continue
show_item_history.update({
'guid': _cast(str, show_guid),
'title': _cast(str, show.title),
'watched': _cast(bool, show.isWatched),
'userRating': _cast(str, show.userRating),
})
for episode in show.episodes(viewCount__gt=0):
episode_guid = _get_guid(_EPISODE_RATING_KEY_GUID_MAPPING, episode)
logger.debug(f"Fully Watched Episode: {episode.title} [{episode_guid}]")
show_item_history['episodes'][episode_guid].update({
'guid': _cast(str, episode_guid),
'title': _cast(str, episode.title),
'watched': _cast(bool, episode.isWatched),
'viewCount': _cast(int, episode.viewCount),
'viewOffset': _cast(int, episode.viewOffset),
'userRating': _cast(str, episode.userRating),
})
for episode in show.episodes(viewOffset__gt=0):
episode_guid = _get_guid(_EPISODE_RATING_KEY_GUID_MAPPING, episode)
logger.debug(f"Partially Watched Episode: {episode.title} [{episode_guid}]")
show_item_history['episodes'][episode_guid].update({
'guid': _cast(str, episode_guid),
'title': _cast(str, episode.title),
'watched': _cast(bool, episode.isWatched),
'viewCount': _cast(int, episode.viewCount),
'viewOffset': _cast(int, episode.viewOffset),
'userRating': _cast(str, episode.userRating),
})
show_history[show_guid] = show_item_history
def _get_user_server_watched_history(server):
show_history = collections.defaultdict(lambda: copy.deepcopy(SHOW_HISTORY))
movie_history = collections.defaultdict(lambda: copy.deepcopy(MOVIE_HISTORY))
music_history = {}
for section in server.library.sections():
if section.type == "movie":
_get_movie_section_watched_history(section, movie_history)
elif section.type == "show":
_get_show_section_watched_history(section, show_history)
else:
logger.warning(f"Skipping Un-processable Section: {section.title} [{section.type}]")
user_history = {
'show': show_history,
'movie': movie_history,
'music': music_history,
}
return user_history
def main():
_load_config()
_setup_logger()
plex_server = plexapi.server.PlexServer(PLEX_URL, PLEX_TOKEN, timeout=300)
plex_account = plex_server.myPlexAccount()
watched_history = {}
logger.info(f"Starting Export")
plex_users = plex_account.users()
# Owner will be processed separately
logger.info(f"Total Users: {len(plex_users) + 1}")
if not (len(CHECK_USERS) > 0 and plex_account.username not in CHECK_USERS and
plex_account.email not in CHECK_USERS):
logger.info(f"Processing Owner: {plex_account.username}")
user_history = _get_user_server_watched_history(plex_server)
user_history['username'] = plex_account.username
watched_history[plex_account.username] = user_history
for user_index, user in enumerate(plex_users):
if (len(CHECK_USERS) > 0 and user.username not in CHECK_USERS and
user.email not in CHECK_USERS):
continue
logger.info(f"Processing User: {user.username}")
user_server_token = user.get_token(plex_server.machineIdentifier)
try:
user_server = plexapi.server.PlexServer(PLEX_URL, user_server_token, timeout=300)
except plexapi.exceptions.Unauthorized:
# This should only happen when no libraries are shared
logger.warning(f"Skipped User with No Libraries Shared: {user.username}")
continue
user_history = _get_user_server_watched_history(user_server)
user_history['username'] = user.username
watched_history[user.username] = user_history
with open(WATCHED_HISTORY, "w") as watched_history_file:
json.dump(watched_history, watched_history_file, sort_keys=True, indent=4)
logger.info(f"Completed Export")
if __name__ == "__main__":
main()
| 33.458629
| 106
| 0.637603
| 0
| 0
| 2,838
| 0.200523
| 0
| 0
| 0
| 0
| 3,268
| 0.230905
|
a2c10a54ceee9affb03ce15e17008ad6f880f4e9
| 414
|
py
|
Python
|
src/models/product.py
|
superxuu/fastapi_pony_2
|
297ef01cc009a40af891593018565fe5b06b4ee8
|
[
"MIT"
] | 2
|
2020-06-17T09:53:13.000Z
|
2020-10-23T18:20:13.000Z
|
src/models/product.py
|
superxuu/fastapi_pony_2
|
297ef01cc009a40af891593018565fe5b06b4ee8
|
[
"MIT"
] | null | null | null |
src/models/product.py
|
superxuu/fastapi_pony_2
|
297ef01cc009a40af891593018565fe5b06b4ee8
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from decimal import Decimal
from src.models import db, Required, Optional
class Product(db.Entity):
name = Required(str, unique=True)
price = Required(Decimal)
description = Optional(str)
create_time = Required(datetime, default=datetime.now, precision=6)
update_time = Optional(datetime)
def before_update(self):
self.update_time = datetime.now()
| 25.875
| 71
| 0.729469
| 306
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
a2c13ed5fd70470e8dadf6ddcecfc8a4c03d41b3
| 32,397
|
py
|
Python
|
tgthemer/themer.py
|
eskilop/TgThemer-py
|
3ebb7d1c3c78c32754cee82aa92a6c97ac18f27f
|
[
"MIT"
] | 1
|
2020-05-12T21:33:56.000Z
|
2020-05-12T21:33:56.000Z
|
tgthemer/themer.py
|
eskilop/TgThemer-py
|
3ebb7d1c3c78c32754cee82aa92a6c97ac18f27f
|
[
"MIT"
] | null | null | null |
tgthemer/themer.py
|
eskilop/TgThemer-py
|
3ebb7d1c3c78c32754cee82aa92a6c97ac18f27f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from .color import Color
import shutil
import os
std_none = "#FF000000"
class Themer:
def __init__(self, primary=std_none, secondary=std_none, accent=std_none,
mode=None, ttype="dark"):
self.primary = Color(primary)
self.ttype = ttype
if mode is None:
self.mode = 'lighten' if self.ttype == 'dark' else 'darken'
else:
self.mode = mode
if secondary == std_none:
self.secondary = self.primary.lighten(0.5) \
if self.mode == 'lighten' \
else self.primary.lighten(-0.5)
else:
self.secondary = Color(secondary)
self.tertiary = self.secondary.lighten(0.75) \
if self.mode == 'lighten' \
else self.secondary.lighten(-0.75)
self.accent = Color(accent)
self.contents = ""
self.theme_dict = {}
if not os.path.exists("out"):
os.mkdir('out', 0o755)
def _read_file(self, inputfile):
source = open(inputfile+'.attheme', 'r')
contents = source.read()
source.close()
self.contents = contents
self.theme_dict = self._to_dict()
def _to_file(self, contents, outfile):
result = open(outfile+'.attheme', 'w')
result.write(contents)
result.close()
def _accent_text(self, color):
mid = Color("#FF808080")
if color.argb > mid.argb:
return Color('#FF000000')
elif color.argb < mid.argb:
return Color('#FFFFFFFF')
else: # either one will go
return Color('#FF000000')
def _to_string(self, content_dict):
result = ""
for k, v in content_dict.items():
result += "{}={}\n".format(k, v)
return result
def _to_dict(self):
result_dict = {}
pairs = self.contents.split('\n')
for pair in pairs:
if pair != '':
kvpair = pair.split('=')
result_dict[kvpair[0]] = kvpair[1]
return result_dict
def _transform_dict(self, fn):
result = {}
for k, v in self.theme_dict.items():
result[k] = fn(v)
return result
def clear(self):
shutil.rmtree('out', ignore_errors=True)
@property
def human_dict(self):
return self._transform_dict(
lambda x: Color(int(x)).hex
)
@property
def telegram_dict(self):
return self._transform_dict(
lambda x: Color(x).sint
)
@property
def telegram_string(self):
return self._to_string(self.telegram_dict)
@property
def human_string(self):
return self._to_string(self.human_dict)
def generate_android(self, custom=None, out=None):
source = "sources/android/source_dark" \
if self.ttype == 'dark' \
else "sources/android/source_light"
self._read_file(source)
self.theme_dict = self.human_dict
if custom is not None:
for k, v in custom.items():
self.theme_dict[k] = v
else:
def set(key, color):
self.theme_dict[key] = color.hex
pri_text = self._accent_text(self.primary)
sec_text = self._accent_text(self.secondary)
ter_text = self._accent_text(self.tertiary)
acc_text = self._accent_text(self.accent)
acc_icon = acc_text.alpha(-0.5)
title_text = pri_text.alpha(-0.1)
msg_text = pri_text.alpha(-0.2)
info_text = pri_text.alpha(-0.75)
set('windowBackgroundWhite', self.primary)
set('actionBarDefault', self.secondary)
set('actionBarDefaultIcon', sec_text.alpha(-0.1))
set('actionBarDefaultTitle', sec_text.alpha(-0.1))
set('actionBarDefaultSelector', acc_text.alpha(-0.8))
set('actionBarDefaultSearch', sec_text.alpha(-0.2))
set('actionBarDefaultSearchPlaceholder', sec_text.alpha(-0.8))
set('actionBarActionModeDefaultIcon', sec_text.alpha(-0.1))
set('actionBarActionModeDefault', self.secondary)
set('actionBarActionModeDefaultSelector', acc_text.alpha(-0.8))
set('divider', pri_text.alpha(-0.9))
set('emptyListPlaceholder', pri_text.alpha(-0.8))
set('progressCircle', self.accent)
set('chats_nameMessage_threeLines', self.accent.alpha(-0.2))
set('chats_message', msg_text)
set('chats_actionIcon', acc_icon)
set('chats_actionBackground', self.accent)
set('avatar_text', acc_icon)
set('avatar_backgroundSaved', self.accent)
set('chats_unreadCounter', self.accent)
set('chats_unreadCounterText', acc_icon)
set('chats_name', title_text)
set('chats_secretName', self.accent.alpha(-0.15))
set('chats_secretIcon', self.accent.alpha(-0.15))
# set('chats_draft', )
set('chats_pinnedIcon', pri_text.alpha(-0.5))
set('chats_message_threeLines', msg_text)
set('chats_nameMessage', self.accent.alpha(-0.25))
set('chats_attachMessage', msg_text)
set('chats_nameArchived', self.accent.alpha(-0.25))
set('chats_messageArchived', msg_text)
set('chats_actionMessage', msg_text)
set('chats_date', msg_text)
set('chats_pinnedOverlay', Color("#FF000000").alpha(-0.75))
# set('chats_tabletSelectedOverlay')
set('chats_sentCheck', self.accent.alpha(-0.2))
set('chats_sentClock', msg_text)
# set('chats_sentError') chats_sentErrorIcon
set('chats_verifiedCheck', acc_icon)
set('chats_verifiedBackground', self.accent.alpha(-0.2))
set('chats_muteIcon', msg_text)
set('chats_mentionIcon', acc_text)
set('chats_archiveBackground', self.accent)
set('chats_archiveIcon', acc_text)
set('chats_menuName', self.accent.alpha(-0.2))
set('chats_menuPhone', info_text)
# set('chats_menuPhoneCats', ) 'chats_menuCloudBackgroundCats',
# chat_serviceBackground, chats_menuTopShadow
set('avatar_backgroundActionBarBlue', self.secondary)
set('chats_menuItemText', msg_text)
set('windowBackgroundWhiteGrayText3', info_text)
set('windowBackgroundWhiteBlueText3', self.accent.alpha(-0.2))
set('key_graySectionText', info_text)
set('windowBackgroundWhiteBlackText', msg_text)
set('actionBarDefaultArchived', self.secondary)
set('windowBackgroundGrayShadow', Color('#FF000000'))
set('chats_archiveText', acc_text.alpha(-0.25))
set('chats_onlineCircle', self.accent)
set('inAppPlayerTitle', msg_text)
set('inappPlayerPerformer', msg_text)
set('inappPlayerClose', pri_text)
set('returnToCallBackground', self.accent)
set('returnToCallText', acc_text)
set('undo_background', self.secondary)
set('undo_cancelColor', self.accent)
set('undo_infoColor', sec_text)
set('dialogBackground', self.secondary)
# set('dialogBackgroundGray')
set('dialogTextLink', self.accent)
set('dialogLinkSection', self.accent.alpha(-0.25))
set('dialogTextBlue', self.accent)
set('dialogTextBlue2', self.accent)
set('dialogTextBlue3', self.accent)
set('dialogTextBlue4', self.accent)
# dialogTextRed dialogTextRed2
set('dialogTextGray', sec_text)
set('dialogTextGray2', sec_text)
set('dialogTextGray3', sec_text)
set('dialogTextGray4', sec_text)
set('dialogIcon', title_text)
# set('dialogRedIcon')
set('dialogTextHint', sec_text.alpha(-0.1))
set('dialogInputField', self.secondary)
set('dialogInputFieldActivated', self.accent)
set('dialogCheckboxSquareBackground', self.accent)
set('dialogCheckboxSquareCheck', acc_text)
set('dialogCheckboxSquareUnchecked', self.primary)
set('dialogRadioBackground', self.primary)
set('dialogRadioBackgroundChecked', self.accent)
set('dialogProgressCircle', self.accent)
set('dialogButton', self.accent)
set('dialogButtonSelector', acc_text.alpha(-0.8))
set('dialogRoundCheckBox', self.accent)
set('dialogRoundCheckBoxCheck', acc_text)
set('dialogBadgeBackground', self.accent)
set('dialogBadgeText', acc_text)
set('dialogLineProgress', self.accent)
set('dialogLineProgressBackground', self.primary)
# set('dialogGrayLine',
set('dialogSearchBackground', self.secondary)
set('dialogSearchHint', info_text)
set('dialogSearchIcon', msg_text)
set('dialogSearchText', msg_text)
set('dialogFloatingIcon', acc_text)
# dialogShadowLine key_sheet_scrollUp key_sheet_other
set('player_actionBar', self.secondary)
set('player_actionBarSelecto', self.secondary.alpha(-0.7))
set('player_actionBarTitle', sec_text)
set('player_actionBarSubtitle', sec_text)
set('player_actionBarItems', sec_text.alpha(-0.1))
set('player_background', self.primary)
set('player_time', msg_text)
set('player_progressBackground', self.secondary)
# set('key_player_progressCachedBackground')
set('player_progress', self.accent)
set('player_placeholder', sec_text)
set('player_placeholderBackground', self.secondary)
set('player_button', pri_text.alpha(-0.1))
set('player_buttonActive', self.accent)
set('actionBarDefaultSubmenuItem', sec_text)
set('actionBarDefaultSubtitle', sec_text)
set('chat_muteIcon', sec_text)
set('chat_lockIcon', self.accent)
set('chat_inBubble', self.secondary)
set('chat_outBubble', self.tertiary)
set('chat_outBubbleSelected', self.tertiary)
set('chat_serviceText', pri_text)
set('chat_serviceLink', self.accent.alpha(-0.25))
set('chat_serviceIcon', self.tertiary)
set('chat_serviceBackground', self.primary)
set('chat_messageTextIn', sec_text.alpha(-0.15))
set('chat_messageTextOut', ter_text)
set('chat_messageLinkIn', self.accent)
set('chat_mediaTimeText', info_text)
set('chat_outSentCheck', self.accent)
set('chat_outSentCheckSelected', self.accent)
set('chat_mediaSentCheck', msg_text)
set('chat_mediaSentClock', msg_text)
set('chat_outViews', self.accent.alpha(-0.6))
set('chat_outViewsSelected', self.accent.alpha(-0.4))
set('chat_inViews', self.accent.alpha(-0.8))
set('chat_inViewsSelected', self.accent.alpha(-0.6))
# chat_mediaViews
set('chat_outMenu', ter_text.alpha(-0.2))
set('chat_outMenuSelected', ter_text.alpha(-0.3))
set('chat_inMenu', sec_text.alpha(-0.2))
set('chat_inMenuSelected', sec_text.alpha(-0.3))
# set('chat_mediaMenu',)
set('chat_outInstant', self.accent)
set('chat_inInstant', self.accent)
set('chat_inInstantSelected', self.accent.alpha(-0.2))
# calls_callReceivedRedIcon calls_callReceivedGreenIcon chat_SentError
# chat_sentErrorIcon
set('chat_selectedBackground', self.secondary.alpha(-0.6))
set('chat_previewDurationText', info_text)
set('chat_previewGameText', info_text)
set('chat_outPreviewInstantText', self.accent)
set('chat_outPreviewInstantSelectedText', self.accent)
# chat_secretTimeText chat_stickerNameText chat_botProgress chat_mediaTimeBackground
set('chat_botButtonText', self.accent.alpha(-0.25))
set('chat_inForwardedNameText', self.accent.alpha(-0.25))
set('chat_outForwardedNameText', self.accent.alpha(-0.15))
set('chat_inViaBotNameText', self.accent.alpha(-0.25))
set('chat_outViaBotNameText', self.accent.alpha(-0.15))
set('chat_inReplyLine', self.accent.alpha(-0.25))
set('chat_outReplyLine', self.accent.alpha(-0.15))
set('chat_inReplyNameText', self.accent.alpha(-0.25))
set('chat_outReplyNameText', self.accent.alpha(-0.15))
set('chat_inReplyMessageText', self.accent.alpha(-0.25))
set('chat_outReplyMessageText', self.accent.alpha(-0.15))
set('chat_inReplyMediaMessageText', self.accent.alpha(-0.25))
set('chat_outReplyMediaMessageText', self.accent.alpha(-0.15))
set('chat_inReplyMediaMessageSelectedText', self.accent.alpha(-0.25))
set('chat_outReplyMediaMessageSelectedText', self.accent.alpha(-0.15))
set('chat_inPreviewLine', self.accent.alpha(-0.25))
set('chat_outPreviewLine', self.accent.alpha(-0.15))
set('chat_inContactNameText', self.accent.alpha(-0.25))
set('chat_outContactNameText', self.accent.alpha(-0.5))
set('chat_inSiteNameText', self.accent.alpha(-0.25))
set('chat_outSiteNameText', self.accent.alpha(-0.15))
set('chat_inContactPhoneText', self.accent.alpha(-0.25))
set('chat_outContactPhoneText', self.accent.alpha(-0.15))
set('chat_inContactPhoneSelectedText', self.accent.alpha(-0.25))
set('chat_outContactPhoneSelectedText', self.accent.alpha(-0.15))
set('chat_mediaProgress ', msg_text)
set('chat_inAudioProgress', sec_text)
set('chat_outAudioProgress', ter_text)
set('chat_inAudioSelectedProgress', sec_text)
set('chat_outAudioSelectedProgress', ter_text)
set('chat_inTimeText', sec_text)
set('chat_outTimeText', ter_text)
set('chat_inTimeSelectedText', sec_text.alpha(-0.1))
set('chat_adminText', msg_text)
set('chat_adminSelectedText', msg_text)
set('chat_outTimeSelectedText', ter_text.alpha(-0.1))
set('chat_outTimeSelectedText', sec_text.alpha(-0.1))
set('chat_inAudioPerfomerText', self.accent.alpha(-0.25))
set('chat_outAudioPerfomerText', self.accent.alpha(-0.25))
set('chat_inAudioPerfomerSelectedText', self.accent.alpha(-0.25))
set('chat_outAudioPerfomerSelectedText', self.accent.alpha(-0.25))
set('chat_inAudioTitleText', sec_text)
set('chat_outAudioTitleText', ter_text)
set('chat_inAudioDurationText', ter_text.alpha(-0.25))
set('chat_outAudioDurationText', sec_text.alpha(-0.25))
set('chat_inAudioDurationSelectedText', ter_text.alpha(-0.25))
set('chat_outAudioDurationSelectedText', sec_text.alpha(-0.25))
set('chat_inAudioSeekbar', self.primary)
set('chat_outAudioSeekbar', self.secondary)
set('chat_inAudioSeekbarSelected', self.primary)
set('chat_outAudioSeekbarSelected', self.secondary)
set('chat_inAudioSeekbarFill', self.accent)
set('chat_outAudioSeekbarFill', self.accent)
set('chat_inAudioCacheSeekbar', self.primary)
set('chat_outAudioCacheSeekbar', self.secondary)
set('chat_inVoiceSeekbar', self.primary)
set('chat_outVoiceSeekbar', self.secondary)
set('chat_inVoiceSeekbarSelected', self.primary)
set('chat_outVoiceSeekbarSelected', self.secondary)
set('chat_inVoiceSeekbarFill', self.accent.alpha(-0.25))
set('chat_outVoiceSeekbarFill', self.accent.alpha(-0.25))
set('chat_inFileNameText', sec_text)
set('chat_outFileNameText', ter_text)
set('chat_inFileInfoText', sec_text)
set('chat_outFileInfoText', ter_text)
set('chat_inFileInfoSelectedText', sec_text)
set('chat_outFileInfoSelectedText', ter_text)
set('chat_inVenueInfoText', self.accent.alpha(-0.25))
set('chat_outVenueInfoText', self.accent.alpha(-0.15))
set('chat_inVenueInfoSelectedText', self.accent.alpha(-0.25))
set('chat_outVenueInfoSelectedText', self.accent.alpha(-0.15))
set('chat_linkSelectBackground', self.accent.alpha(-0.75))
set('chat_textSelectBackground', self.accent.alpha(-0.25))
set('chat_messagePanelBackground', self.secondary)
set('chat_inLoader', self.primary)
set('chat_outLoader', self.secondary)
set('chat_inLoaderSelected', self.primary)
set('chat_outLoaderSelected', self.secondary)
set('chat_inMediaIcon', self.accent)
set('chat_outMediaIcon', self.accent)
set('chat_inMediaIconSelected', self.accent)
set('chat_outMediaIconSelected', self.accent)
set('chat_mediaLoaderPhoto', self.primary)
set('chat_mediaLoaderPhotoIcon', self.accent)
set('chat_mediaLoaderPhotoSelected', self.primary)
set('chat_mediaLoaderPhotoIconSelected', self.accent)
set('chat_outLoaderPhoto', self.secondary)
set('chat_outLoaderPhotoSelected', self.secondary)
set('chat_outLoaderPhotoIcon', self.accent)
set('chat_outLoaderPhotoIconSelected', self.accent)
set('chat_inLoaderPhoto', self.primary)
set('chat_inLoaderPhotoSelected', self.primary)
set('chat_inLoaderPhotoIcon', self.accent)
set('chat_inLoaderPhotoIconSelected', self.accent)
set('chat_outFileIcon', self.accent)
set('chat_outFileSelectedIcon', self.accent)
set('chat_inFileIcon', self.accent)
set('chat_inFileSelectedIcon', self.accent)
set('chat_inContactBackground', self.primary)
set('chat_outContactBackground', self.secondary)
set('chat_inContactIcon', self.accent)
set('chat_outContactIcon', self.accent)
set('chat_inLocationBackground', self.primary)
set('chat_outLocationBackground', self.secondary)
set('chat_inLocationIcon', self.accent)
set('chat_outLocationIcon', self.accent)
# chat_messagePanelShadow' default ok
set('chat_fieldOverlayText', self.accent)
set('chat_messagePanelText', pri_text)
set('chat_messagePanelHint', pri_text.alpha(-0.8))
set('chat_messagePanelSend', self.accent)
set('chat_messagePanelIcons', pri_text.alpha(-0.5))
set('chat_messagePanelVoicePressed', acc_icon)
set('key_chat_messagePanelVoiceLock', pri_text.alpha(-0.5))
set('key_chat_messagePanelVoiceLockBackground', self.secondary)
set('chat_messagePanelVoiceBackground', self.accent)
# 'key_chat_messagePanelVoiceLockShadow' ok
set('chat_messagePanelVoiceDelete', msg_text)
set('chat_recordedVoiceBackground', self.accent)
set('chat_recordTime', msg_text)
set('chat_recordVoiceCancel', msg_text)
set('chat_messagePanelVoiceDuration', title_text)
set('contextProgressInner1', self.accent.alpha(-0.5))
set('contextProgressOuter1', self.accent)
set('chat_messagePanelCancelInlineBot', pri_text.alpha(-0.5))
# chat_recordedVoiceDot ok
# chat_messagePanelVoiceShadow ok
set('chat_recordedVoiceProgress', self.accent.alpha(-0.5))
set('chat_recordedVoiceProgressInner', msg_text)
set('chat_recordedVoicePlayPause', msg_text)
set('chat_recordedVoicePlayPausePressed', self.accent.alpha(-0.8))
set('chat_emojiPanelNewTrending', self.accent)
set('chat_emojiPanelBackground', self.secondary)
set('chat_emojiPanelShadowLine', pri_text.alpha(-0.9))
set('chat_emojiPanelEmptyText', self.tertiary)
set('chat_emojiPanelIcon', self.tertiary)
set('chat_emojiPanelIconSelected', self.accent)
set('chat_emojiPanelStickerPackSelector', self.tertiary)
set('chat_emojiPanelBackspace', self.tertiary)
set('chat_emojiPanelTrendingTitle', title_text)
set('chat_emojiPanelTrendingDescription', msg_text)
set('chat_emojiPanelBadgeText', acc_text.alpha(-0.2))
set('chat_emojiPanelBadgeBackground', self.accent)
set('chat_emojiBottomPanelIcon', self.tertiary)
set('chat_emojiSearchIcon', self.tertiary)
set('chat_emojiPanelStickerSetNameHighlight', self.accent)
set('chat_emojiPanelStickerPackSelectorLine', self.accent)
set('chatbotKeyboardButtonText', self.secondary.alpha(-0.25))
set('chatbotKeyboardButtonBackground', self.secondary)
set('chat_topPanelLine', self.accent)
set('chat_topPanelTitle', self.accent)
set('chat_topPanelMessage', sec_text)
set('chat_addContact', self.accent)
set('chat_replyPanelMessage', sec_text)
set('chat_replyPanelIcons', self.accent)
set('chat_replyPanelName', self.accent)
set('chat_searchPanelText', self.accent)
set('chat_searchPanelIcons', self.accent)
set('chat_secretChatStatusText', msg_text)
# chat_stickersHintPanel
set('chat_unreadMessagesStartBackground', self.primary)
set('chat_unreadMessagesStartText', msg_text)
set('chat_botSwitchToInlineText', self.accent.alpha(-0.25))
set('chat_inlineResultIcon', self.accent)
set('windowBackgroundWhiteGrayText2', info_text)
set('windowBackgroundWhiteLinkText', self.accent.alpha(-0.25))
set('chat_gifSaveHintBackground', self.primary)
set('chat_gifSaveHintText', msg_text)
set('chat_attachMediaBanBackground', self.primary)
set('chat_attachMediaBanText', msg_text)
set('chat_goDownButtonCounterBackground', self.accent)
set('chat_goDownButtonCounter', acc_icon)
set('chat_goDownButton', self.secondary)
# chat_goDownButtonShadow ok
set('chat_goDownButtonIcon', sec_text.alpha(-0.2))
set('chat_secretTimerBackground', self.primary)
set('chat_secretTimerText', msg_text)
# chat_attachCameraIcon* ok
# chat_attach*Background ok
# chat_attach*Icon ok
set('chat_attachHideBackground', self.primary)
set('chat_attachHideIcon', pri_text.alpha(-0.25))
set('chat_attachSendBackground', self.accent)
set('chat_attachSendIcon', acc_icon)
# dialogCameraIcon ok
set('avatar_actionBarIconBlue', sec_text)
set('avatar_actionBarSelectorBlue', self.secondary.alpha(-0.8))
set('profile_title', sec_text.alpha(-0.1))
set('avatar_subtitleInProfileBlue', sec_text.alpha(-0.25))
set('actionBarDefaultSubmenuItem', sec_text.alpha(-0.25))
set('listSelectorSDK21', self.secondary.alpha(-0.8))
set('windowBackgroundWhiteValueText', self.accent)
set('windowBackgroundWhiteBlueHeader', self.accent)
set('avatar_backgroundInProfileBlue', self.accent)
set('profile_actionIcon', acc_icon)
set('profile_actionBackground', self.accent)
set('profile_actionPressedBackground', self.accent.alpha(-0.25))
set('switchTrack', self.tertiary)
set('switchTrackChecked', self.accent)
set('radioBackground', self.primary)
set('radioBackgroundChecked', self.accent)
set('windowBackgroundWhiteInputField', self.secondary)
set('windowBackgroundWhiteInputFieldActivated', self.accent)
set('windowBackgroundWhiteBlueText4', self.accent)
set('featuredStickers_addedIcon', self.accent)
set('stickers_menu', pri_text.alpha(-0.5))
set('stickers_menuSelector', pri_text.alpha(-0.7))
set('key_changephoneinfo_changeText', self.accent)
set('changephoneinfo_image', self.tertiary)
set('profile_creatorIcon', self.accent)
set('profile_verifiedBackground', self.accent)
set('windowBackgroundWhiteLinkSelection', self.accent.alpha(-0.75))
set('windowBackgroundWhiteBlueText', self.accent)
set('windowBackgroundWhiteBlueText2', self.accent)
set('windowBackgroundWhiteBlueButton', self.accent)
set('windowBackgroundWhiteBlueIcon', self.accent)
if self.ttype == 'dark':
set('actionBarActionModeDefaultTop',
self.secondary.lighten(0.25))
set('chats_actionPressedBackground', self.accent.lighten(0.25))
set('avatar_backgroundArchived', self.secondary.lighten(0.5))
set('avatar_backgroundArchivedHidden',
self.secondary.lighten(0.25))
set('chats_unreadCounterMuted', self.secondary.lighten(0.5))
set('chats_archivePinBackground', self.primary.lighten(0.5))
set('chats_menuBackground', self.primary.lighten(-0.25))
set('chats_menuItemIcon', self.tertiary.lighten(1))
set('graySection', self.primary.lighten(-0.25))
set('windowBackgroundGray', self.primary.lighten(-0.25))
set('inappPlayerBackground', self.primary.lighten(0.15))
set('inappPlayerPlayPause', self.secondary.lighten(0.25))
set('dialogTextBlack', sec_text.lighten(-0.1))
set('dialogCheckboxSquareDisabled', self.primary.lighten(-0.1))
set('dialogScrollGlow', sec_text.lighten(0.25))
set('player_actionBarTop', self.secondary.lighten(0.5))
set('chat_wallpaper', self.primary.lighten(-0.25))
set('actionBarDefaultSubmenuBackground',
self.secondary.lighten(-0.25))
set('actionBarDefaultSubmenuItemIcon', self.tertiary.lighten(1))
set('chat_inBubbleSelected', self.secondary.lighten(0.25))
set('chat_inBubbleShadow', self.secondary.lighten(-0.25))
set('chat_outBubbleShadow', self.tertiary.lighten(-0.25))
set('chat_serviceBackgroundSelected', self.primary.lighten(0.25))
set('chat_messageLinkOut', self.accent.lighten(0.25))
set('chat_outSentClock', self.accent.lighten(0.2))
set('chat_inSentCheck', self.accent.lighten(-0.3))
set('chat_inSentClock', self.accent.lighten(-0.2))
set('chat_inSentClockSelected', self.accent.lighten(-0.2))
set('chat_outSentClockSelected', self.accent.lighten(0.1))
set('chat_outInstantSelected', self.accent.lighten(0.2))
set('chat_inPreviewInstantText', self.accent.lighten(0.2))
set('chat_inPreviewInstantSelectedText', self.accent.lighten(0.2))
set('chat_inFileProgress', self.accent.lighten(-0.25))
set('chat_outFileProgress', self.accent.lighten(-0.15))
set('chat_inFileProgressSelected', self.accent.lighten(-0.15))
set('chat_outFileProgressSelected', self.accent.lighten(-0.05))
set('chat_inFileBackground', self.accent.lighten(-0.5))
set('chat_outFileBackground', self.accent.lighten(0.5))
set('chat_inFileBackgroundSelected', self.accent.lighten(-0.5))
set('chat_outFileBackgroundSelected', self.accent.lighten(0.5))
set('chatbotKeyboardButtonBackgroundPressed',
self.secondary.lighten(0.1).alpha(-0.7))
set('chat_topPanelBackground', self.secondary.lighten(-0.2))
set('chat_unreadMessagesStartArrowIcon',
self.primary.lighten(0.75).alpha(-0.2))
set('windowBackgroundWhiteGrayIcon', self.tertiary.lighten(1))
elif self.ttype == 'light':
set('actionBarActionModeDefaultTop',
self.secondary.lighten(-0.25))
set('chats_actionPressedBackground', self.accent.lighten(-0.25))
set('avatar_backgroundArchived', self.secondary.lighten(-0.5))
set('avatar_backgroundArchivedHidden',
self.secondary.lighten(-0.25))
set('chats_unreadCounterMuted', self.secondary.lighten(-0.5))
set('chats_archivePinBackground', self.primary.lighten(-5))
set('chats_menuBackground', self.primary.lighten(-0.25))
set('chats_menuItemIcon', self.tertiary.lighten(-1))
set('graySection', self.primary.lighten(0.25))
set('windowBackgroundGray', self.primary.lighten(-0.25))
set('inappPlayerBackground', self.primary.lighten(-0.15))
set('inappPlayerPlayPause', self.secondary.lighten(-0.25))
set('dialogTextBlack', sec_text.lighten(0.1))
set('dialogCheckboxSquareDisabled', self.primary.lighten(0.1))
set('dialogScrollGlow', sec_text.lighten(-0.25))
set('player_actionBarTop', self.secondary.lighten(-0.5))
set('chat_wallpaper', self.primary.lighten(-0.25))
set('actionBarDefaultSubmenuBackground',
self.secondary.lighten(0.25))
set('actionBarDefaultSubmenuItemIcon', self.tertiary.lighten(-1))
set('chat_inBubbleSelected', self.secondary.lighten(-0.25))
set('chat_inBubbleShadow', self.secondary.lighten(0.25))
set('chat_outBubbleShadow', self.tertiary.lighten(0.25))
set('chat_serviceBackgroundSelected',
self.primary.lighten(-0.25))
set('chat_messageLinkOut', self.accent.lighten(-0.25))
set('chat_outSentClock', self.accent.lighten(-0.2))
set('chat_inSentCheck', self.accent.lighten(0.3))
set('chat_inSentClock', self.accent.lighten(0.2))
set('chat_inSentClockSelected', self.accent.lighten(0.2))
set('chat_outSentClockSelected', self.accent.lighten(-0.1))
set('chat_outInstantSelected', self.accent.lighten(-0.2))
set('chat_inPreviewInstantText', self.accent.lighten(-0.2))
set('chat_inPreviewInstantSelectedText',
self.accent.lighten(-0.2))
set('chat_inFileProgress', self.accent.lighten(0.25))
set('chat_outFileProgress', self.accent.lighten(0.15))
set('chat_inFileProgressSelected', self.accent.lighten(0.15))
set('chat_outFileProgressSelected', self.accent.lighten(0.05))
set('chat_inFileBackground', self.accent.lighten(0.5))
set('chat_outFileBackground', self.accent.lighten(-0.5))
set('chat_inFileBackgroundSelected', self.accent.lighten(0.5))
set('chat_outFileBackgroundSelected', self.accent.lighten(-0.5))
set('chatbotKeyboardButtonBackgroundPressed',
self.secondary.lighten(-0.1).alpha(-0.7))
set('chat_topPanelBackground', self.secondary.lighten(0.2))
set('chat_unreadMessagesStartArrowIcon',
self.primary.lighten(-0.75).alpha(-0.2))
set('windowBackgroundWhiteGrayIcon', self.tertiary.lighten(-1))
if out is None:
self._to_file(self.telegram_string, 'out/android')
else:
self._to_file(self.telegram_string, 'out/'+out)
| 52.763844
| 96
| 0.619131
| 32,299
| 0.996944
| 0
| 0
| 419
| 0.012933
| 0
| 0
| 11,905
| 0.367461
|
a2c1f4bdfbf091de32f73f29f4fe1cc1d9bf86e8
| 2,081
|
py
|
Python
|
01 Dimensionality Reduction/Tutorial 03 - Unsupervised nonlinear embedding/isomap/dijkstra.py
|
KateYeon/Business-Anlaytics
|
454c1cb1b88499e94eeb5e8a7a32309afb7165e5
|
[
"MIT"
] | null | null | null |
01 Dimensionality Reduction/Tutorial 03 - Unsupervised nonlinear embedding/isomap/dijkstra.py
|
KateYeon/Business-Anlaytics
|
454c1cb1b88499e94eeb5e8a7a32309afb7165e5
|
[
"MIT"
] | null | null | null |
01 Dimensionality Reduction/Tutorial 03 - Unsupervised nonlinear embedding/isomap/dijkstra.py
|
KateYeon/Business-Anlaytics
|
454c1cb1b88499e94eeb5e8a7a32309afb7165e5
|
[
"MIT"
] | null | null | null |
class Graph(object):
"""
A simple undirected, weighted graph
"""
def __init__(self):
self.nodes = set()
self.edges = {}
self.distances = {}
def add_node(self, value):
self.nodes.add(value)
def add_edge(self, from_node, to_node, distance):
self._add_edge(from_node, to_node, distance)
self._add_edge(to_node, from_node, distance)
def _add_edge(self, from_node, to_node, distance):
self.edges.setdefault(from_node, [])
self.edges[from_node].append(to_node)
self.distances[(from_node, to_node)] = distance
def dijkstra(graph, initial_node):
visited = {initial_node: 0}
nodes = set(graph.nodes)
while nodes:
min_node = None
for node in nodes:
if node in visited:
if min_node is None:
min_node = node
elif visited[node] < visited[min_node]:
min_node = node
if min_node is None:
break
nodes.remove(min_node)
cur_wt = visited[min_node]
for edge in graph.edges[min_node]:
wt = cur_wt + graph.distances[(min_node, edge)]
if edge not in visited or wt < visited[edge]:
visited[edge] = wt
return visited
def dijkstra2(graph, initial_node):
visited = {initial_node: 0}
nodes = set(graph.nodes)
while nodes:
min_node = None
for node in nodes:
if node in visited:
if min_node is None:
min_node = node
elif visited[node] < visited[min_node]:
min_node = node
if min_node is None:
break
nodes.remove(min_node)
cur_wt = visited[min_node]
for edge in graph.edges[min_node]:
wt = cur_wt + graph.distances[(min_node, edge)]
if edge not in visited or wt < visited[edge]:
visited[edge] = wt
return visited
| 26.341772
| 60
| 0.537242
| 626
| 0.300817
| 0
| 0
| 0
| 0
| 0
| 0
| 53
| 0.025469
|
a2c23bf73602d96dd9316f0d033a88b8764a5ac4
| 3,063
|
py
|
Python
|
fakenet/diverters/fnconfig.py
|
AzzOnFire/flare-fakenet-ng
|
bafd7e97b61cd43190dee7f1d2c3f4388488af76
|
[
"Apache-2.0"
] | null | null | null |
fakenet/diverters/fnconfig.py
|
AzzOnFire/flare-fakenet-ng
|
bafd7e97b61cd43190dee7f1d2c3f4388488af76
|
[
"Apache-2.0"
] | null | null | null |
fakenet/diverters/fnconfig.py
|
AzzOnFire/flare-fakenet-ng
|
bafd7e97b61cd43190dee7f1d2c3f4388488af76
|
[
"Apache-2.0"
] | null | null | null |
class Config(object):
"""Configuration primitives.
Inherit from or instantiate this class and call configure() when you've got
a dictionary of configuration values you want to process and query.
Would be nice to have _expand_cidrlist() so blacklists can specify ranges.
"""
def __init__(self, config_dict=None, portlists=[]):
if config_dict is not None:
self.configure(config_dict, portlists)
def configure(self, config_dict, portlists=[], stringlists=[]):
"""Parse configuration.
Does three things:
1.) Turn dictionary keys to lowercase
2.) Turn string lists into arrays for quicker access
3.) Expand port range specifications
"""
self._dict = dict((k.lower(), v) for k, v in config_dict.items())
for entry in portlists:
portlist = self.getconfigval(entry)
if portlist:
expanded = self._expand_ports(portlist)
self.setconfigval(entry, expanded)
for entry in stringlists:
stringlist = self.getconfigval(entry)
if stringlist:
expanded = [s.strip() for s in stringlist.split(',')]
self.setconfigval(entry, expanded)
def reconfigure(self, portlists=[], stringlists=[]):
"""Same as configure(), but allows multiple callers to sequentially
apply parsing directives for port and string lists.
For instance, if a base class calls configure() specifying one set of
port lists and string lists, but a derived class knows about further
configuration items that will need to be accessed samewise, this
function can be used to leave the existing parsed data alone and only
re-parse the new port or string lists into arrays.
"""
self.configure(self._dict, portlists, stringlists)
def _expand_ports(self, ports_list):
ports = []
for i in ports_list.split(','):
if '-' not in i:
ports.append(int(i))
else:
l, h = list(map(int, i.split('-')))
ports += list(range(l, h + 1))
return ports
def _fuzzy_true(self, value):
return value.lower() in ['yes', 'on', 'true', 'enable', 'enabled']
def _fuzzy_false(self, value):
return value.lower() in ['no', 'off', 'false', 'disable', 'disabled']
def is_configured(self, opt):
return opt.lower() in list(self._dict.keys())
def is_unconfigured(self, opt):
return not self.is_configured(opt)
def is_set(self, opt):
return (self.is_configured(opt) and
self._fuzzy_true(self._dict[opt.lower()]))
def is_clear(self, opt):
return (self.is_configured(opt) and
self._fuzzy_false(self._dict[opt.lower()]))
def getconfigval(self, opt, default=None):
return self._dict[opt.lower()] if self.is_configured(opt) else default
def setconfigval(self, opt, obj):
self._dict[opt.lower()] = obj
| 36.903614
| 79
| 0.614757
| 3,062
| 0.999674
| 0
| 0
| 0
| 0
| 0
| 0
| 1,080
| 0.352595
|
a2c331cfd9f663070b5e40ecc3ae373845f2e7c4
| 662
|
py
|
Python
|
plotly/validators/layout/xaxis/_constraintoward.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/layout/xaxis/_constraintoward.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/layout/xaxis/_constraintoward.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class ConstraintowardValidator(
_plotly_utils.basevalidators.EnumeratedValidator
):
def __init__(
self,
plotly_name='constraintoward',
parent_name='layout.xaxis',
**kwargs
):
super(ConstraintowardValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'plot'),
role=kwargs.pop('role', 'info'),
values=kwargs.pop(
'values',
['left', 'center', 'right', 'top', 'middle', 'bottom']
),
**kwargs
)
| 26.48
| 70
| 0.560423
| 623
| 0.941088
| 0
| 0
| 0
| 0
| 0
| 0
| 110
| 0.166163
|
a2c3c1fae06adac5e17ba36e3e5bcfafc2b96e97
| 4,057
|
py
|
Python
|
openmdao/core/test/test_group_derivatives.py
|
jcchin/project_clippy
|
ed38e11a96848a81c024c5a0e5821bc5db04fdc7
|
[
"Apache-2.0"
] | null | null | null |
openmdao/core/test/test_group_derivatives.py
|
jcchin/project_clippy
|
ed38e11a96848a81c024c5a0e5821bc5db04fdc7
|
[
"Apache-2.0"
] | null | null | null |
openmdao/core/test/test_group_derivatives.py
|
jcchin/project_clippy
|
ed38e11a96848a81c024c5a0e5821bc5db04fdc7
|
[
"Apache-2.0"
] | null | null | null |
""" Testing group-level finite difference. """
import unittest
import numpy as np
from openmdao.components.param_comp import ParamComp
from openmdao.core.component import Component
from openmdao.core.group import Group
from openmdao.core.problem import Problem
from openmdao.test.converge_diverge import ConvergeDivergeGroups
from openmdao.test.simple_comps import SimpleCompDerivMatVec
from openmdao.test.util import assert_rel_error
class TestGroupDerivatves(unittest.TestCase):
def test_simple_matvec(self):
class VerificationComp(SimpleCompDerivMatVec):
def jacobian(self, params, unknowns, resids):
raise RuntimeError("Derivative functions on this comp should not run.")
def apply_linear(self, params, unknowns, dparams, dunknowns,
dresids, mode):
raise RuntimeError("Derivative functions on this comp should not run.")
sub = Group()
sub.add('mycomp', VerificationComp())
prob = Problem()
prob.root = Group()
prob.root.add('sub', sub)
prob.root.add('x_param', ParamComp('x', 1.0))
prob.root.connect('x_param.x', "sub.mycomp.x")
sub.fd_options['force_fd'] = True
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['x_param.x'], ['sub.mycomp.y'], mode='fwd',
return_format='dict')
assert_rel_error(self, J['sub.mycomp.y']['x_param.x'][0][0], 2.0, 1e-6)
J = prob.calc_gradient(['x_param.x'], ['sub.mycomp.y'], mode='rev',
return_format='dict')
assert_rel_error(self, J['sub.mycomp.y']['x_param.x'][0][0], 2.0, 1e-6)
def test_converge_diverge_groups(self):
prob = Problem()
prob.root = Group()
prob.root.add('sub', ConvergeDivergeGroups())
param_list = ['sub.p.x']
unknown_list = ['sub.comp7.y1']
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(param_list, unknown_list, mode='fwd', return_format='dict')
assert_rel_error(self, J['sub.comp7.y1']['sub.p.x'][0][0], -40.75, 1e-6)
J = prob.calc_gradient(param_list, unknown_list, mode='rev', return_format='dict')
assert_rel_error(self, J['sub.comp7.y1']['sub.p.x'][0][0], -40.75, 1e-6)
def test_group_fd(self):
class SimpleComp(Component):
""" A simple component that provides derivatives. """
def __init__(self):
super(SimpleComp, self).__init__()
# Params
self.add_param('x', 2.0)
# Unknowns
self.add_output('y', 0.0)
def solve_nonlinear(self, params, unknowns, resids):
""" Doesn't do much. Just multiply by 3"""
unknowns['y'] = 3.0*params['x']
def jacobian(self, params, unknowns, resids):
"""Analytical derivatives."""
J = {}
J[('y', 'x')] = 3.0
return J
class Model(Group):
""" Simple model to experiment with finite difference."""
def __init__(self):
super(Model, self).__init__()
self.add('px', ParamComp('x', 2.0))
self.add('comp1', SimpleComp())
sub = self.add('sub', Group())
sub.add('comp2', SimpleComp())
sub.add('comp3', SimpleComp())
self.add('comp4', SimpleComp())
self.connect('px.x', 'comp1.x')
self.connect('comp1.y', 'sub.comp2.x')
self.connect('sub.comp2.y', 'sub.comp3.x')
self.connect('sub.comp3.y', 'comp4.x')
self.sub.fd_options['force_fd'] = True
prob = Problem()
prob.root = Model()
prob.setup(check=False)
prob.run()
J = prob.calc_gradient(['px.x'], ['comp4.y'])
assert_rel_error(self, J[0][0], 81.0, 1e-6)
if __name__ == "__main__":
unittest.main()
| 32.456
| 90
| 0.562731
| 3,568
| 0.879468
| 0
| 0
| 0
| 0
| 0
| 0
| 804
| 0.198176
|
a2c3dab8b5c5b5fea5c21366ad80b3c046f70f38
| 2,235
|
py
|
Python
|
rsbroker/core/user.py
|
land-pack/RsBroker
|
d556fda09582e0540cac0eabc163a984e8fc1c44
|
[
"Apache-2.0"
] | null | null | null |
rsbroker/core/user.py
|
land-pack/RsBroker
|
d556fda09582e0540cac0eabc163a984e8fc1c44
|
[
"Apache-2.0"
] | null | null | null |
rsbroker/core/user.py
|
land-pack/RsBroker
|
d556fda09582e0540cac0eabc163a984e8fc1c44
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import
import ujson
from rsbroker.core.upstream import RTCWebSocketClient
class BaseUserManager(object):
room_to_uid = {}
uid_to_handler = {}
def register(self, obj):
"""
Dispatch all resource which user need!
:param obj:
:return:
"""
raise NotImplementedError
def unregister(self, obj):
"""
Release all resource if user out!
:param obj:
:return:
"""
raise NotImplementedError
def send(self, request):
"""
Send news to room-server by web socket!
:param request: a dict type
Example:
{
'method': 'check_in',
'uid': uid
}
:return: a dict type
Example:
{
'status': '100',
'mid': '1001',
'body': {'info':'check in failure'}
}
"""
raise NotImplementedError
class UserManager(BaseUserManager):
"""
Implementation all declare method from parent class!
"""
def register(self, obj):
room = obj.room
uid = obj.uid
request = {'method': 'check_in', 'uid': uid}
response = self.send(request)
data = ujson.loads(response)
mid = data.get("mid")
if mid == "1001":
# check in failure
raise ValueError("Check in failure, no source for uid [%s]" % uid)
else:
if room in self.room_to_uid:
self.room_to_uid[room].add(uid)
else:
self.room_to_uid[obj.room] = set()
self.uid_to_handler[uid] = obj
def unregister(self, obj):
room = obj.room
uid = obj.uid
request = {'method': 'check_in', 'uid': uid}
response = self.send(request)
data = ujson.loads(response)
mid = data.get("mid")
if mid == '1003':
raise ValueError("Check out failure, the user may already check out!")
else:
if room in self.room_to_uid:
self.room_to_uid[room].remove(uid)
if uid in self.uid_to_handler:
del self.uid_to_handle[uid]
| 25.988372
| 82
| 0.521253
| 2,122
| 0.949441
| 0
| 0
| 0
| 0
| 0
| 0
| 840
| 0.375839
|
a2c3e3fd647b669204ee60f34d14ceb1b5e30f77
| 12,756
|
py
|
Python
|
src/constraint_solver.py
|
khairulislam/phys
|
fc702520fcd3b23022b9253e7d94f878978b4500
|
[
"MIT"
] | null | null | null |
src/constraint_solver.py
|
khairulislam/phys
|
fc702520fcd3b23022b9253e7d94f878978b4500
|
[
"MIT"
] | null | null | null |
src/constraint_solver.py
|
khairulislam/phys
|
fc702520fcd3b23022b9253e7d94f878978b4500
|
[
"MIT"
] | null | null | null |
from pgm.pgmplayer import PGMPlayer
import cps_constraints as con
from operator import itemgetter
import uuid
import os
class ConstraintSolver:
def __init__(self, my_con_collector, my_con_scoper, SHOULD_USE_CONSTRAINT_SCOPING=False):
self.con_collector = my_con_collector
self.con_scoper = my_con_scoper
self.SHOULD_PRINT_VARIABLE_TYPES = False
self.SHOULD_USE_CONSTRAINT_SCOPING = SHOULD_USE_CONSTRAINT_SCOPING
self.ENABLE_SCOPER = False
self.pred2pgmvar = {}
self.pgmvar2pred = {}
self.uuid = str(uuid.uuid4())
def solve(self):
#print con.units
#print con.non_unit_variables
#print "Dimensionless:"
#print con.dimensionless_variables
self.pred2pgmvar = {}
self.pgmvar2pred = {}
var2unitproba = {}
for unit in con.units:
fg_filename = "pgm/predict_" + str(unit).replace(" ", "") + self.uuid + ".fg"
player = self.prepare(fg_filename, unit)
pgmvar2proba = player.compute_marginals()
#print {v.name: '%.4f' % (1.0 - p) for v, p in pgmvar2proba.items()}
os.remove(fg_filename)
for pred, pgmvar in self.pred2pgmvar.items():
self.pgmvar2pred[pgmvar] = pred
#print '---------------------'
#print 'Probabilistic Units:'
#print '---------------------'
for v, p in pgmvar2proba.items():
if v.name in self.pgmvar2pred:
(token, name, u) = self.pgmvar2pred[v.name]
#print '%s: %s = %s = %.4f' % (v.name, name, unit, 1.0-p)
if (token, name) in var2unitproba:
var2unitproba[(token, name)].append((unit, 1.0-p))
else:
var2unitproba[(token, name)] = [(unit, 1.0-p)]
#print '---------------------' + '\n'
for v in var2unitproba:
var2unitproba[v].sort(key=itemgetter(1), reverse=True)
if (var2unitproba[v][0][1] == 0.5):
continue
if self.SHOULD_PRINT_VARIABLE_TYPES:
print('%s:\n%s\n' % (v[1], var2unitproba[v]))
con.variable2unitproba = var2unitproba
#con.reset_constraints()
return var2unitproba
def prepare(self, fg_filename, unit):
if self.SHOULD_USE_CONSTRAINT_SCOPING and self.con_scoper.constraint_scope_list:
self.ENABLE_SCOPER = True
player = PGMPlayer(fg_filename)
self.process_nm_constraints(player, unit)
self.process_cu_constraints(player, unit)
self.process_df_constraints(player, unit)
self.process_cf_constraints(player, unit)
self.process_ks_constraints(player, unit)
return player
def process_nm_constraints(self, pgm_player, unit):
for var, nm_con in con.naming_constraints.items():
(lt, lname, unitprobalist) = nm_con
var = con.variables.get((lt.variable, lname))
if var:
nv = 'n'+ str(var)
pv = 'p'+ str(var)
p = 0.0
for (un, pr) in unitprobalist:
if (un == unit):
p = pr
break
pgm_player.add_factor(left=[], right=[nv],
states=[0, 1],
proba=p,
comment=nv + ' = 1')
pgm_player.add_factor(left=[nv], right=[pv],
states=[1, 0, 1, 1],
proba=0.7,
comment=nv + ' -> ' + pv)
#print nv + ': (' + lname + ', ' + str(unit) + ', ' + str(p) + ')'
#print nv + ' -> ' + pv
if (lt.variable, lname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(lt.variable, lname, str(unit))] = pv
def process_cu_constraints(self, pgm_player, unit):
for var, cu_con in con.computed_unit_constraints.items():
(lt, lname, units, isKnown) = cu_con[0]
var = con.variables.get((lt.variable, lname))
if var:
cv = 'c'+ str(var)
pv = 'p'+ str(var)
p = 0.0
p_fwd = 0.95 if con.found_ros_units else 0.7
no_factor = False
for (t, n, un, isKnown) in cu_con:
if self.ENABLE_SCOPER and self.con_scoper.should_exclude_constraint([t]):
continue
if con.should_exclude_constraint((t, n, un, isKnown)):
no_factor = True
continue
if (unit in un):
p = 1.0 if isKnown else 0.8
if isKnown:
break
if no_factor and p == 0.0:
continue
pgm_player.add_factor(left=[], right=[cv],
states=[0, 1],
proba=p,
comment=cv + ' = 1')
pgm_player.add_factor(left=[cv], right=[pv],
states=[1, 0, 1, 1],
proba=p_fwd,
comment=cv + ' -> ' + pv)
#print cv + ' = 1: (' + lname + ', ' + str(unit) + ', ' + str(p) + ')'
#print cv + ' -> ' + pv
if (lt.variable, lname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(lt.variable, lname, str(unit))] = pv
for (lt, lname, un, isKnown) in con.derived_cu_constraints:
var = con.variables.get((lt.variable, lname))
if var:
cv = 'c'+ str(var)
pv = 'p'+ str(var)
p = 0.0
p_fwd = 0.95 if con.found_ros_units else 0.7
if (unit == un):
p = 1.0 if isKnown else 0.8
pgm_player.add_factor(left=[], right=[cv],
states=[0, 1],
proba=p,
comment=cv + ' = 1')
pgm_player.add_factor(left=[cv], right=[pv],
states=[1, 0, 1, 1],
proba=p_fwd,
comment=cv + ' -> ' + pv)
#print cv + ' = 1: (' + lname + ', ' + str(unit) + ', ' + str(p) + ')'
#print cv + ' -> ' + pv
if (lt.variable, lname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(lt.variable, lname, str(unit))] = pv
def process_df_constraints(self, pgm_player, unit):
for (lt, lname, rt, rname, df_type) in con.df_constraints:
if self.ENABLE_SCOPER and self.con_scoper.should_exclude_constraint([lt, rt]):
continue
var1 = con.variables.get((lt.variable, lname))
var2 = con.variables.get((rt.variable, rname))
if var1 and var2 and (var1 != var2):
pv1 = 'p'+ str(var1)
pv2 = 'p'+ str(var2)
pgm_player.add_factor(left=[pv1], right=[pv2],
states=[1, 0, 1, 1],
proba=0.95,
comment=pv1 + ' -> ' + pv2)
pgm_player.add_factor(left=[pv2], right=[pv1],
states=[1, 0, 1, 1],
proba=0.95,
comment=pv2 + ' -> ' + pv1)
#print pv1 + ' -> ' + pv2
#print pv2 + ' -> ' + pv1
if (lt.variable, lname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(lt.variable, lname, str(unit))] = pv1
if (rt.variable, rname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(rt.variable, rname, str(unit))] = pv2
else:
if lt.isKnown and (not rt.isKnown):
dv2 = 'd'+ str(var2)
pv2 = 'p'+ str(var2)
p = 0.95 if (lt.units[0] == unit) else 0.0
pgm_player.add_factor(left=[], right=[dv2],
states=[0, 1],
proba=p,
comment=dv2 + ' = 1')
pgm_player.add_factor(left=[dv2], right=[pv2],
states=[1, 0, 1, 1],
proba=0.95,
comment=dv2 + ' -> ' + pv2)
#print dv2 + ' = 1: (' + rname + ', ' + str(unit) + ', ' + str(p) + ')'
#print dv2 + ' -> ' + pv2
if (rt.variable, rname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(rt.variable, rname, str(unit))] = pv2
elif rt.isKnown and (not lt.isKnown):
dv1 = 'd'+ str(var1)
pv1 = 'p'+ str(var1)
p = 0.95 if (rt.units[0] == unit) else 0.0
pgm_player.add_factor(left=[], right=[dv1],
states=[0, 1],
proba=p,
comment=dv1 + ' = 1')
pgm_player.add_factor(left=[dv1], right=[pv1],
states=[1, 0, 1, 1],
proba=0.95,
comment=dv1 + ' -> ' + pv1)
#print dv1 + ' = 1: (' + lname + ', ' + str(unit) + ', ' + str(p) + ')'
#print dv1 + ' -> ' + pv1
if (lt.variable, lname, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(lt.variable, lname, str(unit))] = pv1
def process_cf_constraints(self, pgm_player, unit):
for (t, name, units, cf_type) in con.conversion_factor_constraints:
var = con.variables.get((t.variable, name))
if var:
fv = 'f'+ str(var)
pv = 'p'+ str(var)
p = 0.0
if (units[0] == unit):
p = 0.95 if (cf_type == con.CF_3) else 0.9
pgm_player.add_factor(left=[], right=[fv],
states=[0, 1],
proba=p,
comment=fv + ' = 1')
pgm_player.add_factor(left=[fv], right=[pv],
states=[1, 0, 1, 1],
proba=0.95,
comment=fv + ' -> ' + pv)
#print fv + ' = 1: (' + name + ', ' + str(unit) + ', ' + str(p) + ')'
#print fv + ' -> ' + pv
if (t.variable, name, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(t.variable, name, str(unit))] = pv
def process_ks_constraints(self, pgm_player, unit):
for var, ks_con in con.known_symbol_constraints.items():
(token, name, units) = ks_con[0]
var = con.variables.get((token.variable, name))
if var:
kv = 'k'+ str(var)
pv = 'p'+ str(var)
p = 0.0
for (t, n, un) in ks_con:
p = 0.0
if (un[0] == unit):
p = 0.95
pgm_player.add_factor(left=[], right=[kv],
states=[0, 1],
proba=p,
comment=kv + ' = 1')
pgm_player.add_factor(left=[kv], right=[pv],
states=[1, 0, 1, 1],
proba=0.95,
comment=kv + ' -> ' + pv)
#print kv + ' = 1: (' + name + ', ' + str(unit) + ', ' + str(p) + ')'
#print kv + ' -> ' + pv
if (token.variable, name, str(unit)) not in self.pred2pgmvar:
self.pred2pgmvar[(token.variable, name, str(unit))] = pv
| 43.835052
| 93
| 0.409768
| 12,632
| 0.990279
| 0
| 0
| 0
| 0
| 0
| 0
| 1,257
| 0.098542
|
a2c43ed1aafc32a3c7c2532f7e7717a9aecd874b
| 1,901
|
py
|
Python
|
cpu_ver/funkyyak/tests/test_util.py
|
bigaidream-projects/drmad
|
a4bb6010595d956f29c5a42a095bab76a60b29eb
|
[
"MIT"
] | 119
|
2016-02-24T17:20:50.000Z
|
2021-05-28T21:35:16.000Z
|
cpu_ver/funkyyak/tests/test_util.py
|
LinZichuan/drmad
|
a4bb6010595d956f29c5a42a095bab76a60b29eb
|
[
"MIT"
] | 8
|
2016-02-25T03:13:38.000Z
|
2017-09-15T00:54:52.000Z
|
cpu_ver/funkyyak/tests/test_util.py
|
LinZichuan/drmad
|
a4bb6010595d956f29c5a42a095bab76a60b29eb
|
[
"MIT"
] | 31
|
2016-03-10T04:57:11.000Z
|
2021-05-02T01:00:04.000Z
|
import numpy as np
import itertools as it
from funkyyak import grad
from copy import copy
def nd(f, *args):
unary_f = lambda x : f(*x)
return unary_nd(unary_f, args)
def unary_nd(f, x):
eps = 1e-4
if isinstance(x, np.ndarray):
nd_grad = np.zeros(x.shape)
for dims in it.product(*map(range, x.shape)):
nd_grad[dims] = unary_nd(indexed_function(f, x, dims), x[dims])
return nd_grad
elif isinstance(x, tuple):
return tuple([unary_nd(indexed_function(f, list(x), i), x[i])
for i in range(len(x))])
elif isinstance(x, dict):
return {k : unary_nd(indexed_function(f, x, k), v) for k, v in x.iteritems()}
elif isinstance(x, list):
return [unary_nd(indexed_function(f, x, i), v) for i, v in enumerate(x)]
else:
return (f(x + eps/2) - f(x - eps/2)) / eps
def indexed_function(fun, arg, index):
local_arg = copy(arg)
def partial_function(x):
local_arg[index] = x
return fun(local_arg)
return partial_function
def eq_class(dtype):
return float if dtype == np.float64 else dtype
def check_equivalent(A, B):
assert eq_class(type(A)) == eq_class(type(B)),\
"Types are: {0} and {1}".format(eq_class(type(A)), eq_class(type(B)))
if isinstance(A, (tuple, list)):
for a, b in zip(A, B): check_equivalent(a, b)
elif isinstance(A, dict):
assert len(A) == len(B)
for k in A: check_equivalent(A[k], B[k])
else:
if isinstance(A, np.ndarray):
assert A.shape == B.shape, "Shapes are {0} and {1}".format(A.shape, B.shape)
assert np.allclose(A, B, rtol=1e-4, atol=1e-6), "Diffs are: {0}".format(A - B)
def check_grads(fun, *args):
A = nd(fun, *args)
B = tuple([grad(fun, i)(*args) for i in range(len(args))])
check_equivalent(A, B)
def to_scalar(x):
return np.sum(np.sin(x))
| 33.350877
| 88
| 0.602315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 64
| 0.033666
|
a2c46503127480012c6964e3561a31835e11fb15
| 2,772
|
py
|
Python
|
game_main.py
|
smarTHh2019/melody_path_finder
|
79cf0108afa12dd18be099d2c8c6291be992ff0d
|
[
"MIT"
] | null | null | null |
game_main.py
|
smarTHh2019/melody_path_finder
|
79cf0108afa12dd18be099d2c8c6291be992ff0d
|
[
"MIT"
] | null | null | null |
game_main.py
|
smarTHh2019/melody_path_finder
|
79cf0108afa12dd18be099d2c8c6291be992ff0d
|
[
"MIT"
] | null | null | null |
import time
import random
import pygame
import pygame.midi
import numpy as np
from typing import Tuple
__author__ = "Thomas Heller"
AV_SIZE = 20
WIN_X = 30 * AV_SIZE
WIN_Y = 30 * AV_SIZE
DIFF_MAX = np.sqrt(WIN_X**2 + WIN_Y**2)
def adapt_avatar_position(event, user_x_pos:int, user_y_pos:int) -> Tuple[int, int]:
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
if user_x_pos >= AV_SIZE:
user_x_pos -= AV_SIZE
if event.key == pygame.K_RIGHT:
if user_x_pos < WIN_X-AV_SIZE:
user_x_pos += AV_SIZE
if event.key == pygame.K_UP:
if user_y_pos >= AV_SIZE:
user_y_pos -= AV_SIZE
if event.key == pygame.K_DOWN:
if user_y_pos < WIN_Y-AV_SIZE:
user_y_pos += AV_SIZE
return user_x_pos, user_y_pos
def calculate_difference(win_position:tuple, user_x_pos:int, user_y_pos:int):
difference = abs(win_position[0] - user_x_pos), abs(win_position[1] - user_y_pos)
return np.sqrt(np.sqrt(difference[0]**2 + difference[1]**2) / DIFF_MAX)
def main():
# setup
pygame.init()
pygame.midi.init()
player = pygame.midi.Output(0)
player.set_instrument(0)
current_note = random.randint(60,84)
window = pygame.display.set_mode((WIN_X,WIN_Y))
user_x_pos, user_y_pos = int(WIN_X/2), int(WIN_Y/2)
pos_x = [ii for ii in range(0,WIN_X-AV_SIZE,AV_SIZE)]
pos_y = [ii for ii in range(0,WIN_Y-AV_SIZE,AV_SIZE)]
win_position = (random.choice(pos_x), random.choice(pos_y))
difference = calculate_difference(win_position, user_x_pos, user_y_pos)
player.note_on(current_note, int(127*(1-difference)))
old_time = time.time()
# program loop
running = True
while running:
if win_position == (user_x_pos, user_y_pos):
window.fill((255,255,0))
else:
window.fill((255,255,255))
difference = calculate_difference(win_position, user_x_pos, user_y_pos)
pygame.draw.rect(window,(0,50,255,50),(user_x_pos,user_y_pos,AV_SIZE,AV_SIZE)) # Rect(left, top, width, height)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
user_x_pos, user_y_pos = adapt_avatar_position(event, user_x_pos, user_y_pos)
pygame.display.flip() # Documentation: Update the full display Surface to the screen
if time.time()-old_time > 1:
player.note_off(current_note)
current_note = random.randint(60,84)
player.note_on(current_note, int(127*(1-difference)))
old_time = time.time()
# teardown
del player
pygame.midi.quit()
if __name__=="__main__":
main()
| 31.862069
| 121
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 151
| 0.054473
|
a2c538a523c20cc3cd74501dba0bcc96fa5757c1
| 4,806
|
py
|
Python
|
lib/preproc.py
|
ayshrv/paperpile-notion
|
0fe73aee1e6bfcf3105b9a417182736a285ec797
|
[
"Apache-2.0"
] | null | null | null |
lib/preproc.py
|
ayshrv/paperpile-notion
|
0fe73aee1e6bfcf3105b9a417182736a285ec797
|
[
"Apache-2.0"
] | null | null | null |
lib/preproc.py
|
ayshrv/paperpile-notion
|
0fe73aee1e6bfcf3105b9a417182736a285ec797
|
[
"Apache-2.0"
] | null | null | null |
from typing import Dict, List
def match(string, candidates) -> str:
for c in candidates:
if c[0].lower() in string.lower() or c[1].lower() in string.lower():
return c
for c in candidates:
if string.lower() in c[0].lower() or string.lower() in c[1].lower():
return c
return [None, None]
def format_entry(entry: Dict[str, str], journals: List[Dict[str, str]], conferences: List[Dict[str, str]]) -> Dict[str, Dict[str, str]]:
""" Produces a dictionary in format column_name: {type: x, value: y} for each value in the entry"""
########## VENUE ################################
conference_tuple = [ [c['short'], c['name']] for c in conferences]
# Select the conference shortname based on proceedings
if entry['Item type'] == 'Journal Article':
if 'Full journal' in entry.keys() and entry['Full journal']:
venue = [j['short'] for j in journals if j['name'] == entry['Full journal'].strip()]
else:
venue = [j['short'] for j in journals if j['name'] == entry['Journal'].strip()]
if not venue:
venue = [entry['Journal'].strip()[:100]]
elif entry['Item type'] == 'Conference Paper':
venue = [
c['short'] for c in conferences if c['name'] == match(
entry['Proceedings title'].strip().replace('{','').replace('}',''), conference_tuple
)[1]]
if not venue:
venue = [entry['Proceedings title'].strip()[:100]]
elif entry['Item type'] == 'Preprint Manuscript':
if "openreview" in entry['URLs'].strip().split(';')[0]:
venue = ["OpenReview"]
else:
venue = [entry['Archive prefix'].strip()]
elif entry['Item type'] == 'Book Chapter':
venue = [entry['Book title'].strip()]
else:
venue = []
# Arxiv links are privileged
links = [x for x in entry['URLs'].strip().split(';')]
arxiv_links = [x for x in links if 'arxiv' in x]
if len(arxiv_links) > 0:
selected_link = arxiv_links[0]
venue.append('arXiv')
else:
selected_link = links[0]
# Multichoice don't accept commas and maybe other punctuation, too
for i, v in enumerate(venue):
exclude = set([','])
venue[i] = ''.join(ch for ch in v if ch not in exclude)
###################################################
############## DATE #################################
date = ''
if 'Date published' in entry.keys():
if entry['Date published'].strip() != '':
date = entry['Date published'].strip()
if date == '':
if 'Publication year' in entry.keys():
if entry['Publication year'].strip() != '':
date = entry['Publication year'].strip() + '-01-01'
if len(date) > 10: # YYYY-MM-DD....
date = date[:10]
if len(date) == 4: # YYYY
date = entry['Publication year'].strip() + '-01-01'
if len(date) == 7: # YYYY-MM
date = date + '-01'
if date == '':
date = '2000-01-01'
######################################################
all_labels = [x.strip() for x in entry['Labels filed in'].strip().split(';')]
all_folders = [x.strip() for x in entry['Folders filed in'].strip().split(';')]
if len(all_labels) == 1 and len(all_labels[0]) == 0:
all_labels = []
if len(all_folders) == 1 and len(all_folders[0]) == 0:
all_folders = []
# categories = [x for x in all_labels if ' - ' not in x]
# methods = [x.split(' - ')[1] for x in all_labels if ' - ' in x]
formatted_entry = {
'Item type': {'type': 'select', 'value': entry['Item type'].strip()},
'Authors': {'type': 'multi_select', 'value': entry['Authors'].strip().split(',')},
'Title': {'type': 'title', 'value': entry['Title'].strip().replace('{','').replace('}','')},
'Venues': {'type': 'multi_select', 'value': venue},
'Date': {'type': 'date', 'value': date},
'Link': {'type': 'url', 'value': selected_link},
'Labels': {'type': 'multi_select', 'value': all_labels}, #, 'color': [COLOR_MAP[cat]['color'] for cat in categories]}
'Folders': {'type': 'multi_select', 'value': all_folders}
}
# if "reading-list" in all_labels:
status_value = 'to-be-read'
formatted_entry['Status'] = {'type': 'select', 'value': status_value}
filtered_formatted_entry = formatted_entry.copy()
keys_delete = []
for key, value in filtered_formatted_entry.items():
if value["value"] in ['', "", [], [''], [""]]:
keys_delete.append(key)
for key in keys_delete:
del filtered_formatted_entry[key]
return formatted_entry, filtered_formatted_entry
| 38.448
| 136
| 0.52913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,613
| 0.335622
|
a2c5873d31b6625bca48f32ced06274ab2625243
| 12,181
|
py
|
Python
|
twitter/Update.py
|
bhargavz/py-twitter-sentiment-analysis
|
fc611df592ed607e58c2600bd20fceffa309108c
|
[
"MIT"
] | null | null | null |
twitter/Update.py
|
bhargavz/py-twitter-sentiment-analysis
|
fc611df592ed607e58c2600bd20fceffa309108c
|
[
"MIT"
] | null | null | null |
twitter/Update.py
|
bhargavz/py-twitter-sentiment-analysis
|
fc611df592ed607e58c2600bd20fceffa309108c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# FILE: Update.py
#
# This class provides mechanisms to update, reply to, retweet status
# messages and send direct messages
#
# Copyright by Author. All rights reserved. Not for reuse without
# express permissions.
#
import sys, time, json, logging
from sochi.twitter.Login import Login
from sochi.twitter.TwitterBase import TwitterBase
from sochi.twitter.auth_settings import *
class Update(TwitterBase):
def __init__(self,
name="Update",
logger=None,
args=(),
kwargs={}):
TwitterBase.__init__(self, name=name, logger=logger,
args=args, kwargs=kwargs)
self.update_url ="https://api.twitter.com/1.1/statuses/update.json"
self.retweet_url ="https://api.twitter.com/1.1/statuses/retweet/"
self.direct_url ="https://api.twitter.com/1.1/direct_messages/new.json"
self.status_update = False
self.status_retweet = False
self.direct_message = False
self.max_status_len = 140
self.set_request_type_as_status_update()
##
# Sets the type of request to a status update
#
def set_request_type_as_status_update(self):
if( not self.querying ):
self.status_update = True
self.status_retweet = False
self.direct_message = False
self.clear_request_params()
self.set_request_domain(self.update_url)
##
# Sets the type of request to a retweet request
#
def set_request_type_as_retweet(self):
if( not self.querying ):
self.status_update = False
self.status_retweet = True
self.direct_message = False
self.clear_request_params()
self.set_request_domain(self.retweet_url)
##
# Sets the type of request to direct message
#
def set_request_type_as_direct_message(self):
if( not self.querying ):
self.status_update = False
self.status_retweet = False
self.direct_message = True
self.clear_request_params()
self.set_request_domain(self.direct_url)
##
# Sets the status to be set when the request is made
#
def set_status(self, status=None, doit=False):
if( not self.querying ):
if( status and self.status_update):
status = self._trim_status(status)
self.set_request_param(kw="status", val=status)
if( doit ):
if( self.running ):
self.start_request()
else:
self.make_request()
else:
self.clear_request_params()
##
# Sets whether or not this status message is in reply to another message
#
def set_in_reply_to(self, status_id=None):
if( not self.querying ):
if( status_id and self.status_update):
self.set_request_param(kw="in_reply_to_status_id", val=str(status_id))
else:
self.clear_request_params()
##
# Sets the latitude and longitude
#
def set_location(self, lat=None, lon=None):
if( not self.querying ):
if( lat and lon and self.status_update ):
self.set_request_param(kw="lat", val=str(lat))
self.set_request_param(kw="long", val=str(lon))
else:
self.clear_request_params()
##
# Sets the status to be an @ reply to the specified user
#
def set_at_reply_message(self, username=None, status=None, doit=False):
if( not self.querying ):
if( user and status and self.status_update ):
status = "@"+str(username)+" "+str(status)
status = self._trim_status(status)
self.set_request_param(kw="status", val=status)
if( doit ):
if( self.running ):
self.start_request()
else:
self.make_request()
elif( status ):
self.set_status(status=status,doit=doit)
else:
self.clear_request_params()
##
# Sets a direct message to be sent to a specific user either using
# username or user_id
#
def set_direct_message(self, username=None, user_id=None, status=None, doit=False):
if( not self.querying ):
if( (username or user_id) and status and self.direct_message ):
status = self._trim_status(status)
self.set_request_param(kw="text", val=status)
if( username ):
self.set_request_param(kw="screen_name", val=username)
if( user_id ):
self.set_request_param(kw="user_id", val=user_id)
if( doit ):
if( self.running ):
self.start_request()
else:
self.make_request()
else:
self.clear_request_params()
##
# Will retweet the specified tweet ID
#
def set_retweet(self, tweet_id=None, doit=False):
if( not self.querying ):
if( tweet_id and self.status_retweet ):
url = self.retweet_url+str(tweet_id)+".json"
self.clear_request_params()
self.set_request_domain(url)
if( doit ):
if( self.running ):
self.start_request()
else:
self.make_request()
else:
self.clear_request_params()
##
# Trim the status message to fit 140 character limit of Twitter
#
def _trim_status(self, status=None):
if( status ):
status = unicode(status)
if( len(status) > self.max_status_len ):
mesg = "Status too long, truncated."
self.logger.info(mesg)
mesg = "Old status: \"%s\""%(status)
self.logger.info(mesg)
status = status[:self.max_status_len]
mesg = "New status: \"%s\""%(status)
self.logger.info(mesg)
return status
##
# Basically a cheap version of make_request for a status update
#
def update_status(self):
if( self.running ):
self.start_request()
else:
self.make_request()
##
#
#
def make_request(self):
# this code is not reentrant, don't make the request twice
if( self.querying ):
return
self.querying = True
try:
# this must be a POST request as defined by the "Update" spec
#print "domain",self.get_request_domain()
#print "payload",self.get_request_params()
self.set_request(domain=self.get_request_domain(),
method="POST",
payload=self.get_request_params())
request_results = self._make_request(request=self._request_data)
js = None
if( request_results or request_results.text ):
try:
#print request_results.text
js = request_results.json()
except ValueError, e:
mesg = "JSON ValueError: "+str(e)
self.logger.info(mesg)
js = None
if( js ):
#print json.dumps(js, sort_keys=True, indent=4)
self.put_message(m=js)
self.querying = False
except:
self.querying = False
raise
return
def parse_params(argv):
auth = None
user = None
status = None
direct = None
retweet = None
favorite = None
json = False
limits = False
debug = False
logging = False
pc = 1
while( pc < len(argv) ):
param = argv[pc]
if( param == "-auth"):
pc += 1
auth = argv[pc]
if( param == "-user"):
pc += 1
user = argv[pc]
if( param == "-status"):
pc += 1
status = argv[pc]
if( param == "-s"):
pc += 1
status = argv[pc]
if( param == "-direct"):
pc += 1
direct = argv[pc]
if( param == "-d"):
pc += 1
direct = argv[pc]
if( param == "-retweet"):
pc += 1
retweet = argv[pc]
if( param == "-r"):
pc += 1
retweet = argv[pc]
if( param == "-favorite"):
pc += 1
favorite = argv[pc]
if( param == "-f"):
pc += 1
favorite = argv[pc]
if( param == "-log"):
logging = True
if( param == "-debug"):
debug = True
if( param == "-json"):
json = True
if( param == "-limits"):
limits = True
pc += 1
return {'auth':auth, 'user':user,
'status':status, 'direct':direct, 'retweet':retweet, 'favorite':favorite,
'logging':logging, 'debug':debug, 'json':json, 'limits':limits }
def usage(argv):
print "USAGE: python %s -auth <appname> -user <auth_user> -status \"<message>\" [-direct <username>] [-retweet <tweet_id>] [-log] [-json] "%(argv[0])
sys.exit(0)
def main(argv):
if len(argv) < 4:
usage(argv)
p = parse_params(argv)
print p
twit = Update()
twit.set_user_agent(agent="random")
if( p['logging'] ):
log_fname = twit.get_preferred_logname()
fmt='[%(asctime)s][%(module)s:%(funcName)s():%(lineno)d] %(levelname)s:%(message)s'
logging.basicConfig(filename=log_fname,format=fmt,level=logging.INFO)
log = logging.getLogger("twit_tools")
lg = None
if( not p['auth'] and not p['user'] ):
print "Must have authenticating User and Application!"
usage(argv)
return
if( p['auth'] ):
app = p['auth']
app_keys = TWITTER_APP_OAUTH_PAIR(app=p['auth'])
app_token_fname = TWITTER_APP_TOKEN_FNAME(app=p['auth'])
lg = Login( name="StatusUpdateLogin",
app_name=p['auth'],
app_user=p['user'],
token_fname=app_token_fname)
if( p['debug'] ):
lg.set_debug(True)
## Key and secret for specified application
lg.set_consumer_key(consumer_key=app_keys['consumer_key'])
lg.set_consumer_secret(consumer_secret=app_keys['consumer_secret'])
lg.login()
twit.set_auth_obj(obj=lg)
if( p['retweet'] ):
twit.set_request_type_as_retweet()
twit.set_retweet(tweet_id=p['retweet'])
elif( p['direct'] and p['status']):
twit.set_request_type_as_direct_message()
twit.set_direct_message(status=p['status'],username=p['direct'])
elif( p['status'] ):
twit.set_request_type_as_status_update()
twit.set_status(status=p['status'])
else:
print "Must supply a status message to post!"
return
twit.update_status()
twit.wait_request()
if( twit.messages()>0 ):
m = twit.get_message()
if( m ):
if( p['json'] ):
print json.dumps(m,indent=4,sort_keys=True)
else:
if( "created_at" in m and "user" in m ):
print "At %s, user %s posted:"%(m['created_at'],m['user']['name'])
print m['text'].encode('utf-8')
elif( "error" in m or "errors" in m ):
print "Error response."
else:
print "Not clear what this response was!"
print json.dumps(m,indent=4,sort_keys=True)
else:
print "Nothing returned!"
if( twit.had_warning() ):
print "WARNING:",twit.get_last_warning()
if( twit.had_error() ):
print "ERROR:",twit.get_last_error()
return
if __name__ == '__main__':
main(sys.argv)
| 33.190736
| 153
| 0.531566
| 7,352
| 0.603563
| 0
| 0
| 0
| 0
| 0
| 0
| 2,436
| 0.199984
|
a2c66a4215e8e48df86487ea705d0d4b4b919ca2
| 654
|
py
|
Python
|
library/utils/time.py
|
zjjott/html
|
68429832d8b022602915a267a62051f4869f430f
|
[
"MIT"
] | null | null | null |
library/utils/time.py
|
zjjott/html
|
68429832d8b022602915a267a62051f4869f430f
|
[
"MIT"
] | null | null | null |
library/utils/time.py
|
zjjott/html
|
68429832d8b022602915a267a62051f4869f430f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
from __future__ import unicode_literals, absolute_import
from datetime import datetime
from pytz import UTC
from dateutil.parser import parse
fmt = '%Y-%m-%d %H:%M:%S'
utc_fmt = "%Y-%m-%dT%H:%M:%SZ"
def get_utcnow():
at = datetime.utcnow()
at = at.replace(tzinfo=UTC)
return at
def isotime(at=None):
"""Stringify time in ISO 8601 format"""
if not at:
at = datetime.utcnow()
if not at.tzinfo: # 默认认为是UTC
at.replace(tzinfo=UTC)
at_utc = at
else: # 否则转换时区
at_utc = at.astimezone(UTC)
return at_utc.strftime(utc_fmt)
def parse_timestr(timestr):
return parse(timestr)
| 21.8
| 56
| 0.652905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 132
| 0.195266
|
a2ce4992ffbad38fcb41d65444677ff2a942a09e
| 5,612
|
py
|
Python
|
aguas_altas/build_gdb/build_gdb_table.py
|
PEM-Humboldt/caracterizacion_paisajes_sonoros_ppii
|
2b99a69faeb5cc094e582a2b6929ef18bd4a3c4e
|
[
"MIT"
] | null | null | null |
aguas_altas/build_gdb/build_gdb_table.py
|
PEM-Humboldt/caracterizacion_paisajes_sonoros_ppii
|
2b99a69faeb5cc094e582a2b6929ef18bd4a3c4e
|
[
"MIT"
] | null | null | null |
aguas_altas/build_gdb/build_gdb_table.py
|
PEM-Humboldt/caracterizacion_paisajes_sonoros_ppii
|
2b99a69faeb5cc094e582a2b6929ef18bd4a3c4e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Build Geo Database for PaisajesSonorosTB
----------------------------------------
This script compiles output from multiple scripts that should be executed previously:
- audio_metadata
- acoustic_indices
- graphical_soundscapes
- soundscape_composition
"""
import numpy as np
import pandas as pd
import glob
import os
#%% Load acoustic indices, graphical soundscapes, and manual annotations to build the GDB
# Audio metadata
df_metadata = pd.read_csv('../audio_metadata/audio_metadata_lluvias.csv')
# Acoustic indices
df_indices = pd.read_csv('../acoustic_community_characterization/acoustic_indices/dataframes/allsites_acoustic_indices_env.csv')
# Graphical soundscapes
flist = glob.glob('../acoustic_community_characterization/graphical_soundscapes/dataframes/*.csv')
df_graph = pd.DataFrame()
for fname in flist:
aux = pd.read_csv(fname)
aux.drop(columns='hour', inplace=True)
aux = pd.Series(aux.values.ravel(), name=os.path.basename(fname)[0:4])
df_graph = df_graph.append(aux)
# Soundscape components using manual annotations
df_comp = pd.read_csv('../soundscape_composition/dataframes/presence_absence_global_components.csv')
# Environmental data ANH_to_GXX data
df_eventID = pd.read_csv('../env_data/ANH_to_GXX.csv')[['sensor_name', 'eventID']]
#%% Process dataframes to meet GDB criteria
# Compute metadata per site
df_site_metadata = pd.DataFrame()
for site_idx, site in df_metadata.groupby('site'):
site_metadata = pd.Series({'sensor_name': site_idx,
'TASA_MUEST': site['sample.rate'].unique()[0].astype(int),
'RES_BITS': site['bits'].unique()[0].astype(int),
'MICROFONO': 'Audiomoth v1.20',
'REF_GRAB': 'Audiomoth v1.20',
'FECHA_INI': site.date.sort_values().iloc[0][0:10],
'FECHA_FIN': site.date.sort_values().iloc[-1][0:10],
'HORA_INI': site.date.sort_values().iloc[0][11:],
'HORA_FIN': site.date.sort_values().iloc[-1][11:],
'NUM_GRAB': len(site),
'TASA_GRAB': '60 segundos cada 1800 segundos',
'ESTACIONAL': 'Húmedo',
'ALTURA': 1.5
})
df_site_metadata = df_site_metadata.append(site_metadata, ignore_index=True)
# Compute proportion of components per site
df_comp['sensor_name'] = df_comp['fname'].str[0:4]
df_site_comp = pd.DataFrame()
for site_idx, site in df_comp.groupby('sensor_name'):
site_comp = pd.Series({'sensor_name': site_idx,
'GEOFONIA': (site['GEO'].sum()/len(site) * 100).round(3),
'ANTROPOFON': (site['ANT'].sum()/len(site) * 100).round(3),
'BIOFONIA': (site['BIO'].sum()/len(site) * 100).round(3)
})
df_site_comp = df_site_comp.append(site_comp, ignore_index=True)
# Acoustic indices per site
df_site_indices = pd.DataFrame()
for site_idx, site in df_indices.groupby('sensor_name'):
site_indices = pd.Series({'sensor_name': site_idx,
'ACI_Q25': site.ACI.quantile(q=0.25),
'ACI_Q50': site.ACI.quantile(q=0.5),
'ACI_Q75': site.ACI.quantile(q=0.75),
'ADI_Q25': site.ADI.quantile(q=0.25),
'ADI_Q50': site.ADI.quantile(q=0.5),
'ADI_Q75': site.ADI.quantile(q=0.75),
'NDSI_Q25': site.NDSI.quantile(q=0.25),
'NDSI_Q50': site.NDSI.quantile(q=0.5),
'NDSI_Q75': site.NDSI.quantile(q=0.75),
'BIO_Q25': site.BI.quantile(q=0.25),
'BIO_Q50': site.BI.quantile(q=0.5),
'BIO_Q75': site.BI.quantile(q=0.75),
'AEI_Q25': site.H.quantile(q=0.25),
'AEI_Q50': site.H.quantile(q=0.5),
'AEI_Q75': site.H.quantile(q=0.75),
'NP_Q25': site.NP.quantile(q=0.25),
'NP_Q50': site.NP.quantile(q=0.5),
'NP_Q75': site.NP.quantile(q=0.75),
'SC_Q25': site.SC.quantile(q=0.25),
'SC_Q50': site.SC.quantile(q=0.5),
'SC_Q75': site.SC.quantile(q=0.75),
'HF_Q25': site.Hf.quantile(q=0.25),
'HF_Q50': site.Hf.quantile(q=0.5),
'HF_Q75': site.Hf.quantile(q=0.75),
'HT_Q25': site.Ht.quantile(q=0.25),
'HT_Q50': site.Ht.quantile(q=0.5),
'HT_Q75': site.Ht.quantile(q=0.75),
'ASU': (df_graph.loc[site_idx,:]>0).sum()/df_graph.shape[1]
})
df_site_indices = df_site_indices.append(site_indices, ignore_index=True)
df_eventID.rename(columns={'eventID': 'ID_MUEST_PT'}, inplace=True)
#%% Build GDB
df_gdb = df_eventID.merge(df_site_metadata, on='sensor_name')
df_gdb = df_gdb.merge(df_site_comp, on='sensor_name')
df_gdb = df_gdb.merge(df_site_indices, on='sensor_name')
df_gdb.to_csv('./dataframes/gdb_site.csv', index=False)
| 48.37931
| 128
| 0.541162
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,751
| 0.311954
|