blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e312fb490ae757aa717461c6ed782f2015e967f6
|
efa04e5831e6defadabf8dc8f506a415c54710f5
|
/visitstat/migrations/0001_initial.py
|
95e4fa3eb384fd44a5974ca48c978f06598def83
|
[] |
no_license
|
fomalhaut88/django-visitstat
|
c3e9e7b6b9643474342e87ba065624602bd742c9
|
c642d6602e1e0bd4d786493a2a61cfa1dfa60b6c
|
refs/heads/master
| 2021-05-04T13:15:02.949612
| 2020-07-29T16:05:19
| 2020-07-29T16:05:19
| 120,310,020
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,022
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-02-05 12:22
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Visit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('datetime', models.DateTimeField()),
('ip', models.CharField(max_length=15)),
('method', models.CharField(max_length=6)),
('url', models.CharField(max_length=1000)),
('referer', models.CharField(blank=True, default=None, max_length=1000, null=True)),
('querystring', models.TextField(blank=True, default=None, null=True)),
('status', models.IntegerField()),
('reason', models.CharField(max_length=64)),
],
),
]
|
[
"a.khlebushchev@cashoff.ru"
] |
a.khlebushchev@cashoff.ru
|
2cadfee2afa2e0b3e38f70d81d48f2c2bdfd4658
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5636311922769920_0/Python/Adipt/my_code4.py
|
47e9f815cf1aa2439e447299359df6fe1901dd12
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992
| 2017-05-23T09:23:38
| 2017-05-23T09:23:38
| 84,313,442
| 2
| 4
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,251
|
py
|
t = int(raw_input().strip())
i = 0
while t>0:
t-=1
i+=1
k, c, s = tuple(map(int, raw_input().strip().split()))
"""if c==1:
if k>s:
print "Case #%d:" %(i), "IMPOSSIBLE"
continue
else:
print "Case #%d:" %(i), ' '.join(map(str, range(1, k+1)))
continue
elif s<(int(k/2)+(k%2)):
print "Case #%d:" %(i), "IMPOSSIBLE"
continue
else:
arr = ['2']
while s>0:
temp = int(arr[-1])+2+2*(k**(c-1))
if temp<=(k**c):
arr.append(str(temp))
s-=1
else:
if temp == (k**c)+1:
arr.append(str(temp-1))
break
r = ' '.join(arr)"""
if k>=s+c:
print "Case #%d:" %i, "IMPOSSIBLE"
continue
elif k<=s:
print "Case #%d:" %i, " ".join(map(str, range(1, k+1)))
continue
a = []
b = 1
d = 1
f = 1
while d<=s:
e=c
b = f
while e>1:
e-= 1
b = (b-1)*k+b+1
f+=1
a.append(str(b))
if f>=k:
break
a = ' '.join(a)
print "Case #%d:" %(i), a
|
[
"alexandra1.back@gmail.com"
] |
alexandra1.back@gmail.com
|
9aac6a17890049f6aa5499a52d28bd3164c3e9ff
|
fa6c05962e814b4d9cbb8f41ccf58229150ef9d6
|
/controllers/webswarm/asb/behavior_module.py
|
ed74f4a4e1e54371736b2b70ded8bd2c48f92a2e
|
[] |
no_license
|
mikaelbr/SwarmWebots
|
86dc02ca6ee6403ed8ecd50e599e6f6fe4e7d017
|
7a66ff876a2fb338bf6b9cba11a8a3ddfe97a639
|
refs/heads/master
| 2023-08-30T04:58:13.102013
| 2012-04-20T11:24:50
| 2012-04-20T11:24:50
| 4,014,348
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,273
|
py
|
"""
A module used by the behavior controller. This is the base
layer ment to be extended by different implementations.
All layers must implement their own do() method.
When either left or right wheel speed is set/altered,
the reacted flag will be set, and the controller know
that this layer has been executed (reacted).
"""
class BehaviorModule(object):
def __init__(self):
self.reacted = False
self.robot = None
self._left_wheel_speed = 0
self._right_wheel_speed = 0
def reset(self):
self._left_wheel_speed = 0
self._right_wheel_speed = 0
self.reacted = False
@property
def left_wheel_speed(self):
return self._left_wheel_speed
@left_wheel_speed.setter
def left_wheel_speed(self, value):
self.reacted = True
self._left_wheel_speed = value
@property
def right_wheel_speed(self):
return self._right_wheel_speed
@right_wheel_speed.setter
def right_wheel_speed(self, value):
self.reacted = True
self._right_wheel_speed = value
def do(self):
"""
Implementation method. Abstract in this case,
but must be implementet by the sub classes.
"""
pass
|
[
"mikaelb@mikaelb.net"
] |
mikaelb@mikaelb.net
|
e0aa72c3a4ebf1441caee7340dc892d5993aa0f8
|
57acf349a3438613baa1754551ddc1469aed7692
|
/Neural_network1_coursera/propagate.py
|
615a0bc4b133ce9dc5e5cddafcda66356b908e2f
|
[] |
no_license
|
girish010789/Deep_learning.ai
|
4821df16bd982cb85fb05ac070b8509f34eccd3e
|
0693c9feaf6fb9bab8835086f7b2581b4e6f0c8c
|
refs/heads/master
| 2020-08-05T20:35:51.288073
| 2019-10-04T00:16:29
| 2019-10-04T00:16:29
| 212,700,800
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,446
|
py
|
import numpy as np
from dataset import load_dataset
from calc_sigmoid import sigmoid
def propagate(w, b, X, Y):
"""
Implement the cost function and its gradient for the propagation explained above
Arguments:
w -- weights, a numpy array of size (num_px * num_px * 3, 1)
b -- bias, a scalar
X -- data of size (num_px * num_px * 3, number of examples)
Y -- true "label" vector (containing 0 if non-cat, 1 if cat) of size (1, number of examples)
Return:
cost -- negative log-likelihood cost for logistic regression
dw -- gradient of the loss with respect to w, thus same shape as w
db -- gradient of the loss with respect to b, thus same shape as b
Tips:
- Write your code step by step for the propagation. np.log(), np.dot()
"""
m = X.shape[1]
# FORWARD PROPAGATION (FROM X TO COST)
A = sigmoid(np.dot(w.T,X) + b) # compute activation
# cost = -1/m * (np.dot(Y,np.log(A).T) + np.dot((1-Y),np.log(1 - A).T))
cost = np.sum(Y * np.log(A) + (1 - Y) * np.log(1 - A))/(-m) # compute cost
# BACKWARD PROPAGATION (TO FIND GRAD)
dw = (1/m) * np.dot(X,(A-Y).T)
db = (1/m) * np.sum(A-Y)
assert(dw.shape == w.shape)
assert(db.dtype == float)
cost = np.squeeze(cost)
assert(cost.shape == ())
grads = {"dw": dw,
"db": db}
return grads, cost
|
[
"gr959202@c02x1ab9jg5j.vpn.broadcom.net"
] |
gr959202@c02x1ab9jg5j.vpn.broadcom.net
|
a316eb324713181356194142ff00bc2577481b9e
|
df6c33d4c960b30e5965ceee1d6cf76d7ff7d7b4
|
/general.py
|
db9b0cfa1a56c0b6bccf0cafe96d21de040e802d
|
[] |
no_license
|
selinachua/dbn_xml_scraper
|
6eef49f4b084206ce5f195141da3bb1a8d3ab7f2
|
28cee3c0a5595262a36b4fe35e23c7f5ff5ddc9c
|
refs/heads/master
| 2020-05-09T17:40:46.019697
| 2019-04-14T14:19:02
| 2019-04-14T14:19:02
| 181,310,366
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 703
|
py
|
'''
# Created by:
# Selina Chua
# selina.a.chua@gmail.com
#
# This file contains the class declaration for a general service.
# It contains the information required for any general service.
'''
class GeneralService():
def __init__(self, name, cover, wait, limits, max_ben):
self.name = name
self.cover = cover
self.wait = wait
self.limits = limits
self.max_ben = max_ben
def __str__(self):
string = (
f"--- {self.name} ---\n"
f"Covered = {self.cover}\n"
f"Waiting Period = {self.wait}\n"
f"Limits = {self.limits}\n"
f"Max Benefits = {self.max_ben}\n"
)
return string
|
[
"selina.a.chua@gmail.com"
] |
selina.a.chua@gmail.com
|
88357c0c8134c695dca8dadeb9dba7b97228a29a
|
82762d776e2400948af54ca2e1bdf282885d922c
|
/914. 卡牌分组.py
|
7b4635bd0aab03aa3ab0d89be36c4fbb70288ae4
|
[] |
no_license
|
dx19910707/LeetCode
|
f77bab78bcba2d4002c9662c122b82fc3c9caa46
|
624975f767f6efa1d7361cc077eaebc344d57210
|
refs/heads/master
| 2020-03-17T02:50:46.546878
| 2019-06-25T09:22:13
| 2019-06-25T09:22:13
| 133,208,490
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 579
|
py
|
class Solution(object):
def hasGroupsSizeX(self, deck):
"""
:type deck: List[int]
:rtype: bool
40ms, beats: 82.81%
"""
lens = len(deck)
if lens <= 1:
return False
d = {}
for i in deck:
if i in d:
d[i] += 1
else:
d[i] = 1
middle = lens // 2
for i in range(2, middle + 2):
if lens % i == 0:
if all(map(lambda x: x % i == 0, d.values())):
return True
return False
|
[
"duxi@aircos.com"
] |
duxi@aircos.com
|
71d863a94ff6db89e2905cf7a8bdb64fe5ef4f22
|
316a46f8fc4969fe2bbe457107778bddff7dc224
|
/Bookord/Bookord/urls.py
|
3d567867c20845fcf50723610cf3e1d9af424311
|
[
"MIT"
] |
permissive
|
MahtaFetrat/Bookord
|
18344eb91273bdfbeb148f93de6d0809ee1d6aae
|
9f19d9caf777e3ba6a6aa587ddd65738671a9703
|
refs/heads/main
| 2023-07-04T15:54:19.492892
| 2021-08-06T17:11:28
| 2021-08-06T17:23:31
| 349,975,663
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 943
|
py
|
"""Bookord URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls.conf import include
from user import views
urlpatterns = [
path('', views.homepage_view, name='homepage'),
path('book/', include('core.urls')),
path('user/', include('user.urls')),
path('admin/', admin.site.urls),
]
|
[
"77fetrat@gmail.com"
] |
77fetrat@gmail.com
|
88b8c6b5252f23faf07d481d2c75f34d80c9e455
|
7f2511240539b1327a5a97060fa59f811bdc2889
|
/django_functest/tests/test_utils.py
|
e8d876c67348a46b9591772a47dfbe9a8f346ac3
|
[] |
no_license
|
liushilive/django-functest
|
e1080c2e8b9031ba9b271bfd56029f0b77082e5a
|
8cffd4ae01dd9a004acc0f37088a34ce5b5e0983
|
refs/heads/master
| 2021-01-20T14:16:23.133597
| 2018-03-30T07:06:22
| 2018-03-30T07:06:22
| 82,748,549
| 0
| 0
| null | 2018-03-30T07:06:23
| 2017-02-22T01:57:10
|
Python
|
UTF-8
|
Python
| false
| false
| 4,299
|
py
|
from __future__ import absolute_import, print_function, unicode_literals
import inspect
from unittest import TestCase
from django.contrib.auth import get_user_model
from django_functest import AdminLoginMixin, FuncBaseMixin, FuncSeleniumMixin, FuncWebTestMixin, ShortcutLoginMixin
from .base import ChromeBase, FirefoxBase, PhantomJSBase, WebTestBase
LOGGED_OUT_URL = "/admin/login/?next=/admin/"
class TestShortcutLoginBase(ShortcutLoginMixin):
def setUp(self):
super(TestShortcutLoginBase, self).setUp()
User = get_user_model()
self.user = User.objects.create_superuser("admin", "admin@example.com", "password")
def test_login_succeeds(self):
self.shortcut_login(username=self.user.username, password="password")
self.get_url("admin:index")
self.assertUrlsEqual("/admin/")
def test_login_raises_exception_with_wrong_password(self):
self.assertRaises(ValueError, lambda: self.shortcut_login(username=self.user.username, password="foo"))
def test_logout_succeeds(self):
self.shortcut_login(username=self.user.username, password="password")
self.shortcut_logout()
self.get_url("admin:index")
self.assertUrlsEqual(LOGGED_OUT_URL)
class TestShortcutLoginWebTest(TestShortcutLoginBase, WebTestBase):
pass
class TestShortcutLoginFirefox(TestShortcutLoginBase, FirefoxBase):
pass
class TestShortcutLoginChrome(TestShortcutLoginBase, ChromeBase):
pass
class TestShortcutLoginPhantomJS(TestShortcutLoginBase, PhantomJSBase):
pass
class TestAdminLoginBase(AdminLoginMixin):
def setUp(self):
super(TestAdminLoginBase, self).setUp()
User = get_user_model()
self.user = User.objects.create_superuser("admin", "admin@example.com", "password")
def test_login_succeeds(self):
self.do_login(username="admin", password="password", shortcut=False)
self.get_url("admin:index")
self.assertUrlsEqual("/admin/")
def test_login_shortcut_succeeds(self):
self.do_login(username="admin", password="password", shortcut=True)
self.get_url("admin:index")
self.assertUrlsEqual("/admin/")
def test_login_raises_exception_with_wrong_password(self):
self.assertRaises(ValueError, lambda: self.do_login(username="admin", password="password_2"))
def test_logout_succeeds(self):
self.shortcut_login(username="admin", password="password")
self.do_logout(shortcut=True)
self.get_url("admin:index")
self.assertUrlsEqual(LOGGED_OUT_URL)
def test_logout_shortcut_succeeds(self):
self.shortcut_login(username="admin", password="password")
self.do_logout(shortcut=False)
self.get_url("admin:index")
self.assertUrlsEqual(LOGGED_OUT_URL)
class TestAdminLoginWebTest(TestAdminLoginBase, WebTestBase):
pass
class TestAdminLoginFirefox(TestAdminLoginBase, FirefoxBase):
pass
class TestAdminLoginChrome(TestAdminLoginBase, ChromeBase):
pass
class TestAdminLoginPhantomJS(TestAdminLoginBase, PhantomJSBase):
pass
class TestDocStrings(TestCase):
def test_doc_strings(self):
bad_docstrings = []
for cls in [FuncSeleniumMixin, FuncWebTestMixin]:
for name, member in inspect.getmembers(cls):
if name.startswith('__'):
continue
if hasattr(member, 'im_func'):
member = member.im_func # Python 2 method
member_doc = getattr(member, '__doc__', None)
base_member = getattr(FuncBaseMixin, name, None)
if base_member is not None:
if hasattr(base_member, 'im_func'):
base_member = base_member.im_func # Python 2 method
base_doc = getattr(base_member, '__doc__', None)
if base_doc is not None and member_doc != base_doc:
bad_docstrings.append((cls, name))
if bad_docstrings:
self.fail("The following methods have incorrect or missing docstrings "
"compared to FuncBaseMixin: \n" +
"\n".join("{0}.{1}".format(cls.__name__, name)
for cls, name in bad_docstrings))
|
[
"L.Plant.98@cantab.net"
] |
L.Plant.98@cantab.net
|
b0d34e5bfa233e736dd8f800f1f405ae4197c355
|
b3f74be8c7456cc58969f0767022ff4036ad2594
|
/movement.py
|
57ce2fc98fea4c3eaa9c5a6a97639a7f95cca3b1
|
[
"MIT"
] |
permissive
|
deadrobots/Create-17
|
2cf2abafa7ed1f580a2a64df8c3e8f82b8fb47cd
|
c90c985c75f5994e17b6b9e5f1754e582b2d1310
|
refs/heads/master
| 2021-01-12T03:58:00.038999
| 2017-07-09T16:00:00
| 2017-07-09T16:00:00
| 81,684,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,540
|
py
|
from wallaby import *
from math import pi
def drive_timed(left, right, time):
create_drive_direct(-right, -left)
msleep(time)
create_drive_direct(0, 0)
def spin_cw(power, time):
create_drive_direct(power, -power)
msleep(time)
create_drive_direct(0, 0)
def spin_ccw(power, time):
create_drive_direct(-power, power)
msleep(time)
create_drive_direct(0, 0)
def rotate(power, time):
if power > 0:
spin_ccw(power, time)
else:
spin_cw(abs(power), time)
def split_drive(left, right, time, increments, turnTime):
power = -100
if turnTime < 0:
turnTime = abs(turnTime)
power = abs(power)
if turnTime == 0:
drive_timed(left, right, time)
else:
for _ in range(0, increments):
drive_timed(left, right, int(time / increments))
rotate(power, turnTime)
def split_drive_condition(left, right, min, time, turnTime, condition, state=True):
start = seconds() + time
create_drive_direct(-left, -right)
msleep(min)
while condition() is state:
current = seconds()
if current > start:
print turnTime
start = current + time
rotate(-100, turnTime)
create_drive_direct(left, right)
msleep(min)
create_drive_direct(0, 0)
def drive_conditional(left, right, testFunction, state=True):
create_drive_direct(-right, -left)
while testFunction() is state:
pass
stop()
def drive_forever(left, right):
create_drive_direct(-right, -left)
def stop():
create_stop()
INCH_TO_MIL = 25.4
def drive_distance(distance, speed):
dist_mil = INCH_TO_MIL * distance
time = dist_mil / speed
drive_timed(speed, speed, time)
def rotate_degrees(degrees, speed):
if degrees < 0:
speed = -speed
degrees = abs(degrees)
degrees = degrees * 1.13
set_create_total_angle(0)
drive_forever(-speed, speed)
while abs(get_create_total_angle()) < degrees:
pass
stop()
# diameter_inch = 9
# diameter_mil = diameter_inch * INCH_TO_MIL
# if degrees < 0:
# speed = -speed
# degrees = -degrees
# angle = abs(degrees / 360.0)
# circ = pi * diameter_mil
# drive_mil = angle * circ
# time = drive_mil / speed
# rotate(speed, time)
def drive_accel(speed, time):
for sub_speed in range(0, speed+1, 100):
create_drive_direct(-sub_speed, -sub_speed)
msleep(100)
msleep(time)
create_drive_direct(0, 0)
|
[
"botball@deadrobots.com"
] |
botball@deadrobots.com
|
cde51ef8d77d7eea0628333be77b2fe4484394bf
|
ae0c46da3a3a741a1fdec4cfafe61be60c02708d
|
/utils/plot/general.py
|
1b1576fdca3c8109708a9f0c4f54e280012800f1
|
[
"MIT"
] |
permissive
|
kvanderwijst/IAMDiagnostics
|
9baae6dea403a74cf41d1e128c9e2d28e434dd56
|
caf28a69c669e0aea2170827913ba56de163c7e2
|
refs/heads/main
| 2023-04-19T05:25:50.538539
| 2021-11-25T14:47:49
| 2021-11-25T14:47:49
| 345,969,617
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,512
|
py
|
"""
General utils
- add_legend_item: Adds Plotly legend item manually
"""
import numpy as np
import plotly.io as pio
pio.templates.default = "none"
try:
# Bugfix for Plotly default export size
pio.kaleido.scope.default_width = None
pio.kaleido.scope.default_height = None
except:
pass
GRIDCOLOR = "rgba(.2,.2,.2,.1)"
def add_model_comparison(
fig,
meta,
models,
fig_col,
meta_col,
label_posx=None,
label_width=None,
narrative_left=None,
narrative_right=None,
shared_yaxes=False,
showlegendlines=True,
labelshift=1,
exclude_models=None,
):
"""
Used to create the right part of each figure: the indicator comparison.
This is shared code for each indicator.
fig: Plotly subplot figure
fig_col: typically will be 2 if the right subplot is used
meta_col: column from meta df used to get the indicator values
label_posx [None]: by default, the position (relative to the x-axis of col fig_col)
is calculated automatically based on the spread of indicator values. This can
be overrided manually if this calculation doesn't work properly.
narrative_left: string for arrow pointing left of median value of indicators
narrative_right: same for right
shared_yaxes: if the left subplot has to share the same y-axis, set to True
showlegendlines: while the legend is always shown, the coloured lines are not always
necessary. Setting this to False hides these lines.
labelshift: shift the labels a bit more ( > 1) or less ( < 1) to the left.
exclude_models: list of model names that should be excluded from this plot
models: by default the normal `models` dataframe, but can be used as override.
"""
if exclude_models is not None:
models = models[~models["Full model"].str.contains(exclude_models)].copy()
models["i"] = np.arange(len(models))
n = models["i"].max()
meta_selection = meta[meta["Stripped model"].isin(models.index)]
# Add legend items
for name, symbol, size in [("Newest", "star", 8), ("Older version", "circle", 4)]:
add_legend_item(
fig,
name,
marker={"symbol": symbol, "size": size, "color": "black"},
legendgroup="Age",
)
# Add shade for 1-sigma range
q0, median, q1 = meta_selection[meta_col].quantile([0.16, 0.5, 0.84])
# mean = meta_selection[meta_col].mean()
fig.add_scatter(
x=[q0, q0, q1, q1],
y=[-1, n + 1, n + 1, -1],
fill="toself",
fillcolor="rgba(0,0,0,.1)",
line_width=0,
mode="lines",
name="16-84th perc.",
row=1,
col=fig_col,
)
# Add lines for median and mean
fig.add_scatter(
x=[median, median],
y=[-1, n + 1],
mode="lines",
line={"color": "#888", "width": 2},
name="Median",
row=1,
col=fig_col,
)
# Calculate position of legend items
vmin, vmax = meta_selection[meta_col].min(), meta_selection[meta_col].max()
label_posx = (
vmin - 0.66 * labelshift * (vmax - vmin) if label_posx is None else label_posx
)
label_width = 0.15 * (vmax - vmin) if label_width is None else label_width
for model, (modeltype, fullmodel, i, color) in models.iterrows():
selection = meta_selection[meta_selection["Stripped model"] == model]
# Add dots and stars
fig.add_scatter(
x=selection[meta_col],
y=[i] * len(selection),
marker={
"color": color,
"opacity": 1,
"symbol": [
"star" if is_newest else "circle"
for is_newest in selection["Newest"]
],
"size": [12 if is_newest else 7 for is_newest in selection["Newest"]],
"line": {"color": "#FFF", "width": 1},
},
mode="markers",
showlegend=False,
row=1,
col=fig_col,
)
# Add legend line
if showlegendlines:
fig.add_scatter(
x=[label_posx - label_width, label_posx],
y=[i, i],
mode="lines",
line={"color": color, "width": 3},
row=1,
col=fig_col,
showlegend=False,
)
# Name of model
fig.add_annotation(
text=model,
x=label_posx,
y=i,
xanchor="left",
row=1,
col=fig_col,
bgcolor="#FFF",
showarrow=False,
)
# Add model type brackets
x_max = meta_selection[meta_col].max()
dx = x_max - meta_selection[meta_col].min()
x_right = 0.05 * dx + x_max # 6% to the right of the most right point
x_width = 0.03 * dx
for modeltype, selection in models.groupby("Type"):
first, last = selection["i"].min(), selection["i"].max()
# Bracket itself
dy = 0.3
fig.add_scatter(
x=[x_right, x_right + x_width, x_right + x_width, x_right],
y=[first - dy, first - dy, last + dy, last + dy],
mode="lines",
line_color="#999",
showlegend=False,
row=1,
col=fig_col,
)
# Name of model type
fig.add_annotation(
x=x_right + 1.25 * x_width,
y=(first + last) / 2,
text=modeltype,
textangle=90,
bgcolor="#FFF",
showarrow=False,
yanchor="middle",
xanchor="left",
row=1,
col=fig_col,
)
# Add narrative arrows
for label, toLeft in [(narrative_left, True), (narrative_right, False)]:
if label is None:
continue
arrowlength = 65
multiplier = -1 if toLeft else 1
fig.add_annotation(
xref=f"x{fig_col}",
yref="paper",
xanchor="center",
yanchor="top",
x=median,
y=-0.08,
ax=arrowlength * multiplier,
ay=0,
xshift=multiplier * 5,
width=arrowlength * 2,
align="right" if toLeft else "left",
text=label,
showarrow=True,
arrowside="start",
)
# Update layout
fig.update_yaxes(
col=None if shared_yaxes else fig_col,
gridcolor=GRIDCOLOR,
tickvals=models["i"],
range=[n + 1, -1],
zeroline=False,
showticklabels=False,
).update_layout(legend={"tracegroupgap": 0, "y": 0.5},)
def add_legend_item(fig, name="", mode="markers", **kwargs):
"""
In Plotly, a legend item can be added manually by adding an empty trace
"""
fig.add_scatter(x=[None], y=[None], name=name, mode=mode, **kwargs)
##################
## Functions required to generate confidence ellipse
##################
def ellipse(a, b, npoints):
x = np.linspace(-a, a, npoints)
y1 = b * np.sqrt(1 - (x / a) ** 2)
y2 = -y1
return np.concatenate([x, x[::-1]]), np.concatenate([y1, y2[::-1]])
def rotate(x, y, theta):
return x * np.cos(theta) - y * np.sin(theta), x * np.sin(theta) + y * np.cos(theta)
def confidence_ellipse(x_values, y_values, nsigma, npoints=300):
# Calculate center of confidence ellipse
mu_x, mu_y = np.mean(x_values), np.mean(y_values)
# Calculate correlation coefficient and covariances
cov_matrix = np.cov([x_values, y_values])
cov_xy = cov_matrix[0, 1]
sigma_x, sigma_y = np.sqrt(cov_matrix[0, 0]), np.sqrt(cov_matrix[1, 1])
rho = cov_xy / (sigma_x * sigma_y)
# Get the x-y points for the default ellipse with a=sqrt(1+rho), b=sqrt(1-rho)
ellipse_x, ellipse_y = ellipse(np.sqrt(1 + rho), np.sqrt(1 - rho), npoints)
# Rotate ellipse 45 degrees counter-clockwise
ellipse_x, ellipse_y = rotate(ellipse_x, ellipse_y, np.pi / 4)
# Scale ellipse horizontally by (2*n*sigma_x) and vertically by (2*n*sigma_y)
# Note: scaling by 2*n*sigma_x means that the x_values (centered around 0) should
# be multiplied by n*sigma_x, not 2*n*sigma_x
ellipse_x = nsigma * sigma_x * ellipse_x
ellipse_y = nsigma * sigma_y * ellipse_y
# Shift ellipse such that its center is situated at the point mu_x, mu_y
ellipse_x += mu_x
ellipse_y += mu_y
return ellipse_x, ellipse_y
|
[
"k.i.vanderwijst@gmail.com"
] |
k.i.vanderwijst@gmail.com
|
b0c6a18cbd8dccdb343c2f23d8cb296436d7a3a7
|
1ff77c29479170c5fb09aa3bbf06f0026e8f5a13
|
/W2D4/knn.py
|
a5d1639a8150a15bf9e69475d54c3cebe959b3dd
|
[] |
no_license
|
nwoch/Artificial-Intelligence
|
9eb5e74d63db662c886d3eae9fd2e397ca90930e
|
1dca1d1b1f56803249f5c29e46a55095dea67f6c
|
refs/heads/master
| 2020-04-08T01:39:47.896097
| 2018-11-24T06:07:44
| 2018-11-24T06:07:44
| 158,904,110
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,934
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Nov 3 19:52:53 2018
@author: Nicole
"""
import numpy as np
import pandas as pd
from scipy.spatial import distance
class Knn:
def __init__(self):
self.x = None
self.y = None
def predict(self, k = 5):
df = pd.read_csv('https://archive.ics.uci.edu/ml/'
'machine-learning-databases/iris/iris.data', header=None)
self.x = df.iloc[0:150, 0:4].values
self.y = df.iloc[0:150, [4]].values
predicted_labels = []
for x in self.x:
# Find distance between x and each point in training set
euclidean_distances = distance.cdist(self.x, x.reshape(1, -1))
euclidean_distances[np.where(euclidean_distances == 0)] = np.nan
k_labels = []
# Find k closest neighbors to x
for i in range(k):
min_index = np.nanargmin(euclidean_distances, axis=0)
euclidean_distances[min_index] = np.nan
k_labels.append(self.y[min_index])
# Choose label which appears the most among the k closest neighbors
unique, counts = np.unique(k_labels, return_counts=True)
k_label_counts = dict(zip(unique, counts))
keys = list(k_label_counts.keys())
values = list(k_label_counts.values())
predicted_labels.append(keys[values.index(max(values))])
return predicted_labels
def calc_classification_error(self, predicted):
errors = 0
for i in range(len(self.y)):
if self.y[i] != predicted[i]:
errors += 1
print("Errors: ", errors, "/", len(self.y))
return errors / len(self.y)
def main():
knn = Knn()
error_rate = knn.calc_classification_error(knn.predict(k=20))
print("Final classification error rate: ", error_rate)
if __name__== "__main__":
main()
|
[
"nwoch101@gmail.com"
] |
nwoch101@gmail.com
|
5b684616d65a8a1c6b38aac44c6267f16b24e5bd
|
24fb4447def3fab6a64f8e44f21a0371179a248b
|
/for_smartphone/try_1003.py
|
ab43dd051cfb28a7062f4bb7706c7c133fd2cdb8
|
[
"MIT"
] |
permissive
|
TakenokoEmpire/mypj
|
28f3c2d208e6ee139318a8e02c77d842c72eb38f
|
fcc56c1113c88d6611b60d0855d6c79df7add7fb
|
refs/heads/main
| 2023-08-28T14:42:28.148268
| 2021-10-29T02:47:32
| 2021-10-29T02:47:32
| 407,075,672
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 36,325
|
py
|
# coding: UTF-8
# このファイルは、run.pyと同じ階層に入れないとバグる
# import send_recieve
from send_recieve import SendReceive
import random
import math
import copy
"""
やること
【万一のバグに備える】
もし同じ数字でループするなら、「3回cond!=9の中でランダムに実行する」をやってみるといいかも?
なんかの手違いでhit=4になったら、blowlistの残りをぶち込むシステムを入れていいかも
【より強くする】
位置情報の強化:確定hitが発生したら、過去のデータを遡り、そのhitが該当してかつhit数が1である探索回があれば、その探索回におけるhit以外の数字の位置情報を1から2にする(これが一番コスパよさそう)
終盤の強化:探索数を2にする(平均1ターンくらい早くなりそう。ただ、かなりめんどくさそう)
中盤の強化:終盤のように、位置情報の和が大きいものを優先する(これは早く…なるのか?(終盤とは状況が違うので、これで早くなるかは分からない))
"""
class AutoPlay():
"""自動解答プログラム"""
def __init__(self, print_mode):
self.print_mode = print_mode
self.ram = []
self.gnum_count = -1
self.ans_list = [0, 0, 0, 0, 0]
self.ans_str = ""
# room_id = input("input room id ->")
# player_id = input("input your name ->")
# グローバル関数
self.numberinfo = [{"number": j, 0: 0, 1: 0, 2: 0, 3: 0,
4: 0, "cond": 0, "gnum": -1} for j in range(16)]
self.groupinfo = []
self.history = []
self.wholeinfo = {"phase": 0, "turn": 1, "confirmed_blow_and_hit": 0,
"confirmed_hit": 0, "noob": 0, "rough_blow": 0, "special_mode": 0}
self.blow_list = []
self.hit_list = [-1, -1, -1, -1, -1]
self.phase_list = {0: "Early phase", 1: "Early phase",
2: "Early phase", 3: "Middle phase", 4: "FINAL phase"}
self.game_record = []
# {"Early_phase_turns": 0, "Middle_phase_turns": 0, "Final_phase_turns": 0,}
def pos(self, x):
if x > 0:
return True
else:
return False
def zero_checker(self, x):
if x == 0:
return 0
else:
return 1
def curse(self, x):
if x == 3 and self.wholeinfo["special_mode"] >= 1:
return 1
else:
return 100
def fugaku(self, ninfo, ginfo, hist, winfo):
"""
更新されたデータをもとに、回答を自動生成する。
序盤、中盤、終盤で振る舞いが異なる。
※序盤:0~4,5~9,a~eを総当りで試す3ターン
中盤:当たりの数字5つを探索する
終盤:探索された5つの数字を並び替える
"""
"""序盤"""
if winfo["phase"] < 3:
if winfo["phase"] == 0:
return ("01234")
elif winfo["phase"] == 1:
return ("56789")
elif winfo["phase"] == 2:
return ("abcde")
else:
print("unexpected error 001")
exit()
"""中盤"""
elif winfo["phase"] == 3:
guess_mid_list = [-1, -1, -1, -1, -1] # 中盤用アルゴリズムの答えを入力する箱
guess_mid_str = ""
# # # #どのグループから探索するかを決める。ハズレを4つ見つけるまでは、ハズレ発見を優先する
# # # if winfo["noob"] < 4:
# # # priority_value = [(1000 - 100 * j["leng"] - (j["blow"]+j["hit"]))* self.zero_checker((j["blow"]+j["hit"])*(j["leng"]-(j["blow"]+j["hit"]))) for j in ginfo]
# # # else:
# # # priority_value = [(1000 - 100 * j["leng"] - 10 * j["hit"] + (j["blow"]+j["hit"]))*self.zero_checker((j["blow"]+j["hit"])*(j["leng"]-(j["blow"]+j["hit"]))) for j in ginfo]
# # # print(priority_value)
# どのグループから探索するかを決める。簡略化のため、常にハズレ発見を優先する。
# ゼロチェッカーは、blowとhitの和が0になるか、lengthとblow+hitの差が0になったときに0を返す(探索の必要がなくなったグループを意味する)
priority_value = [(1000 - 100 * j["leng"] - (j["blow"]+j["hit"])) * self.zero_checker(
(j["blow"]+j["hit"])*(j["leng"]-(j["blow"]+j["hit"])))*self.curse(j["gnum"]) for j in ginfo]
# if winfo["special_mode"] == 1:
# for j in gnifo:
# if j["gnum"] ==3:
# 全てのpriority_valueが0になっていたら、このターンに来る前のinfo_updaterでphaseが4になっているはずであり、ここには来ていないはず。
if sum(priority_value) == 0:
print("error:phaseが更新されていませんby fugaku")
exit()
target_gnum_order = priority_value.index(max(priority_value))
target_gnum = ginfo[target_gnum_order]["gnum"]
default_length = ginfo[target_gnum_order]["leng"]
target_length = max(
math.ceil(default_length / 2), 5 - winfo["noob"])
"""特別モードの中盤への分岐
0~4,5~9,a~e,fそれぞれにblowがある場合(ハズレなし、つまりtarget_length=5の場合)は、特別な処理をする必要がある。
具体的には、確定blowである「f」を「ハズレ」として用い、後で全体のblow数を1追加する。"""
# 場合分けが簡単そうだったから、全パターン記述した。
if target_length == 5:
winfo["special_mode"] = 1
print("You are in special mode...")
self.get_gnum()
if target_gnum == 0:
guess_mid_str = "f0123"
self.ram.append({"turn": winfo["turn"], "group_member": [0, 1, 2, 3], "non_group_member": [
4], "group_qty": 1, "gnum0": 0, "gnum1": 3, "gnum2": -1, "guess_mid_list": [15, 0, 1, 2, 3]})
for i in [0, 1, 2, 3]:
ninfo[i]["gnum"] = 3
elif target_gnum == 1:
guess_mid_str = "f5678"
self.ram.append({"turn": winfo["turn"], "group_member": [5, 6, 7, 8], "non_group_member": [
9], "group_qty": 1, "gnum0": 1, "gnum1": 3, "gnum2": -1, "guess_mid_list": [15, 5, 6, 7, 8]})
for i in [5, 6, 7, 8]:
ninfo[i]["gnum"] = 3
else:
guess_mid_str = "fabcd"
self.ram.append({"turn": winfo["turn"], "group_member": [10, 11, 12, 13], "non_group_member": [
14], "group_qty": 1, "gnum0": 2, "gnum1": 3, "gnum2": -1, "guess_mid_list": [15, 10, 11, 12, 13]})
for i in [10, 11, 12, 13]:
ninfo[i]["gnum"] = 3
if target_gnum != 2:
print("message: target_gnumの値がおかしい(続行可能)")
return (guess_mid_str)
"""通常モードの中盤続き"""
# 新たなグループをつくる。探索された側とされなかった側で、2つのグループに分割される。分割後の長さが1の場合、グループ解除(gnum=-1)する。
if target_length == 1:
gnum1 = -1
else:
gnum1 = self.get_gnum()
if default_length - target_length <= 1:
gnum2 = -1
else:
gnum2 = self.get_gnum() # ターゲットの情報を集めるための空リスト
target = []
# メンバーの記録用リスト
member = []
bocchi = []
# 探索したい数字を登録し、ninfoのgnumを更新する。
# ninfo更新は、できればinfo_updaterに移したほうがいい
for i in range(16):
if ninfo[i]["gnum"] == target_gnum and len(target) < target_length:
target.append(ninfo[i])
ninfo[i]["gnum"] = gnum1
member.append(ninfo[i]["number"])
elif ninfo[i]["gnum"] == target_gnum:
ninfo[i]["gnum"] = gnum2
bocchi.append(ninfo[i]["number"])
# 【未実装事項(機能拡張)】
# 終盤のように、それぞれの位置の位置情報値(0,1,2)の合計が大きい順に(つまり、より多くのターゲットに使われている数字から順に)選ぶようにする
# 余ったスペースに、確定blowを乱入させたい(これは負担が膨大なので無理そう)
# どの位置にどの数字を入力するかを決める。
# 偏らないように、どの位置から決めるかはターンによって異なるようにする
input_order = [(winfo["turn"] + _ - 3) % 5 for _ in range(5)]
for p in input_order:
for k in range(len(target)):
if target[k][p] == 2: # ==1のときも考慮したい(未実装)
pass
else:
guess_mid_list[p] = target[k]["number"]
target.pop(k)
# print(guess_mid_list)
break # これで二重に登録してしまうケースを防げるはず
# if target != []:
# for p in input_order:
# for k in range(len(target)):
# print("judging"+str(p)+str(k))
# if target[k][p] == 1: #←ここが1になってる、これで位置情報が1のケースも考慮できるか?後で確認
# pass
# else:
# guess_mid_list[p] = target[k]["number"]
# target.pop(k)
# print("written")
# print(guess_mid_list)
# print(target)
# break #これで二重に登録してしまうケースを防げるはず
# 上記のやり方で決まらなかった場合。位置情報に関係なく決める。
if target != []:
for p in input_order:
for k in range(len(target)):
guess_mid_list[p] = target[k]["number"]
target.pop(k)
break # これで二重に登録してしまうケースを防げるはず
# 余ったところにハズレを入れる
# ハズレ一覧のリストをつくる
nooblist = []
noobcount = 0
for i in range(16):
if ninfo[i]["cond"] == 9:
nooblist.append(ninfo[i]["number"])
for p in range(5):
if guess_mid_list[p] == -1:
# nooblistの範囲外までnoobcountをforループしてそう
guess_mid_list[p] = ninfo[nooblist[noobcount]]["number"]
noobcount += 1
# 最後に、list型のguessをstr型にする
for p in range(5):
guess_mid_str += str(hex(guess_mid_list[p]))[2]
# 次のinfo_updateで処理するために必要な情報を、グローバル関数に残す
self.ram.append({"mode": "normal", "turn": winfo["turn"], "group_member": member, "non_group_member": bocchi, "group_qty": len(
list(filter(self.pos, [gnum1, gnum2]))), "gnum0": target_gnum, "gnum1": gnum1, "gnum2": gnum2, "guess_mid_list": guess_mid_list})
return (guess_mid_str)
"""終盤"""
elif winfo["phase"] == 4:
guess_mid_list = copy.copy(self.hit_list)
guess_mid_str = ""
memb = ginfo[-1]["final_member"]
posi = ginfo[-1]["position"]
hits = 5 - len(memb)
# 並び替えの優先順位を決める。残り場所の候補が少ない順、つまり、位置情報の和の少ない順に行う。
order_value = []
for i in memb:
memo_order = 0
for p in range(5):
memo_order += ninfo[i][p]
order_value.append([i, memo_order])
order_value.sort(key=lambda x: x[1], reverse=True)
order = [order_value[i][0] for i in range(len(order_value))]
# hits=0のとき、targets=3にしてもいいかも(時間あれば)。hits=3ならガチャ(位置情報次第では確定)
# 位置情報を見て、多く残ってるようであればそこから探索するのが良さそう…
if hits < 3:
targets = 1
elif hits == 3:
targets = 2
else:
print("error:hit4...?blowの組間違えてない?")
winfo["phase"] -= 1
counter = 0
reg_count = 0
memo_mem = []
memo_pos = []
while counter < 60:
for k in order:
for p in posi:
if reg_count >= targets:
counter += 999
break
if ninfo[k][p] == 2:
pass
else:
guess_mid_list[p] = ninfo[k]["number"]
reg_count += 1
memo_mem.append(k)
memo_pos.append(p)
posi.remove(p)
order.remove(k)
break
counter += 1
# ハズレ一覧のリストをつくる
nooblist = []
noobcount = 0
for i in range(16):
if ninfo[i]["cond"] == 9:
nooblist.append(ninfo[i]["number"])
for p in range(5):
if guess_mid_list[p] < 0:
guess_mid_list[p] = nooblist[p]
self.ram.append({"hits": hits, "done_number": memo_mem,
"done_position": memo_pos})
# 最後に、list型のguessをstr型にする
for p in range(5):
guess_mid_str += str(hex(guess_mid_list[p]))[2]
return (guess_mid_str)
else:
print("unexpected error 101")
exit()
def info_update(self, ninfo, ginfo, hist, winfo):
new_hit_checker = 0
"""序盤の処理"""
if winfo["phase"] < 3:
# 位置情報(ninfo)の更新
guessa = hist[-1]["guess"]
hita = hist[-1]["hit"]
blowa = hist[-1]["blow"]
# その回の調査で、hit=0場合は、同じ位置に数字が来ないことが確定する。このとき、それぞれの位置の情報を「2」とする。
if hita == 0:
for order, num in enumerate(guessa):
ninfo[int(num, base=16)][order] = 2
# その回の調査で、hitが発生した場合は、その位置に数字が来る可能性がある。しかし、その回の他の数字がhitし、他全てがblowであった場合、その位置情報は有効となる。この状態の位置情報は「1」とする(この状態では情報に価値はない)
else:
for order, num in enumerate(guessa):
if ninfo[int(num, base=16)][order] < 2:
ninfo[int(num, base=16)][order] = 1
# 発見blow数の更新
winfo["rough_blow"] += hita + blowa
# 偵察した数について、ninfoのblow可能性の更新
if hita + blowa == 0:
for num in guessa:
ninfo[int(num, base=16)]["cond"] = 9
else:
for num in guessa:
ninfo[int(num, base=16)]["cond"] = 1
# グループ情報の更新
group_num = self.get_gnum()
for num in guessa:
ninfo[int(num, base=16)]["gnum"] = group_num
ginfo.append({"gnum": group_num, "leng": 5,
"blow": blowa, "hit": hita})
# 「56789」までで5blow出尽くした場合の処理
if winfo["phase"] < 2 and winfo["rough_blow"] == 5:
for num in range(16):
# これが0のときは、一回も偵察されていないことを示す。5blowが出尽くした時点で一回も偵察されていないのはハズレ確定
if ninfo[num]["cond"] == 0:
ninfo[num]["cond"] = 9
winfo["phase"] = 2
# 「abcde」まで終わったとき、fがどうなってるかを判別
elif winfo["phase"] == 2:
if winfo["rough_blow"] == 5:
ninfo[15]["cond"] = 9
elif winfo["rough_blow"] == 4:
ninfo[15]["cond"] = 1
winfo["confirmed_blow_and_hit"] += 1
self.blow_list.append(15)
else:
print("unexpected error 003")
exit()
winfo["phase"] += 1
"""中盤の処理"""
elif winfo["phase"] == 3:
# 探索対象のグループ(分割前)を、hit,blowを一時期録してginfoから削除
for i in range(100):
try:
if ginfo[i]["gnum"] == self.ram[0]["gnum0"]:
hit_past = ginfo[i]["hit"]
blow_past = ginfo[i]["blow"]
ginfo.pop(i)
except IndexError:
break
# 「探索対象グループ」のうち、「探索された部分(表)」と「探索されなかった部分(裏)」の2つに分けて考える。
# 例:「01234」を探索対象とし、「012」を探索した場合、「012」が表、「34」が裏
"""「表」の処理"""
# 現状、探索対象グループ以外の数字は全てハズレにしているので、(グループ外のBlow確定の数字を乱入させることはしていない、ということ)
# 「5文字全体のhit,blow」=「表のhit,blow」となる。
guessb = self.ram[0]["guess_mid_list"]
memberb = self.ram[0]["group_member"]
hitb = hist[-1]["hit"]
blowb = hist[-1]["blow"]
# 「Special mode」の場合。hit+blowが最小になるグループを探索したので、「探索対象グループ」(01234とか)のhit+blowは必ず1。
# fはblow確定なので、hitb+blowbは必ず1か2になる。
if winfo["special_mode"] == 1:
# hitb+blowbが1なら脱出、2なら残留(ゲーム終了までspecial_modeのまま)
if hitb + blowb == 1:
winfo["special_mode"] = 0
# 今回の処理ではhitかblowの値を1引く(ハズレ代わりに入れたfが確定blowなので)。
"""「hitbが1以上のときはhitbから1引く」としてしまうと、hitが発生したにも関わらず位置情報に2が入力されてしまう場合がある
これを防ぐために、blowbから常に1引くことにする。
blowbが使用される箇所は、以下の3種類
blowb+hitbの形(これは、hitbから引いても変わらないので問題なし)
この回からは絶対に分岐しない位置
ginfoへの登録
おそらく、ginfo以外では問題を起こさないはず。"""
blowb -= 1
# このループには1度しか入らないようにする必要があるので、値を変更しておく。
winfo["special_mode"] = 2
# その回の調査で、hit=0場合は、同じ位置に数字が来ないことが確定する。このとき、それぞれの位置の情報を「2」とする。
if hitb == 0:
for order, num in enumerate(guessb):
ninfo[num][order] = 2
# その回の調査で、hitが発生した場合は、その位置に数字が来る可能性がある。しかし、その回の他の数字がhitし、他全てがblowであった場合、その位置情報は有効となるため、情報を保存したい。そこで、この状態の位置情報は「1」とする(この状態では情報に価値はない)
else:
for order, num in enumerate(guessb):
if ninfo[num][order] < 2:
ninfo[num][order] = 1
# hit,blowなしの場合
if hitb + blowb == 0:
for num in memberb:
ninfo[num]["cond"] = 9
# 全てblow以上確定の場合
elif hitb + blowb == len(memberb):
# 全てhit確定の場合
if blowb == 0:
for num in memberb:
order = guessb.index(num)
ninfo[num]["cond"] = 3
# 位置情報更新。一度全てを2にして、hit位置のみ0にする
for p in range(5):
ninfo[num][p] = 2
ninfo[num][order] = 0 # hit確定でも位置情報は2にしてる。3にしてもいいかも?
# hitlist,blowlist等を更新
self.blow_list.append(num)
self.hit_list[order] = num
winfo["confirmed_hit"] += 1
winfo["confirmed_blow_and_hit"] += 1
new_hit_checker = 1
# そうでない場合(blow確定)
else:
for num in memberb:
ninfo[num]["cond"] = 2
self.blow_list.append(num)
winfo["confirmed_blow_and_hit"] += 1
# 探索続行の場合
else:
for num in memberb:
ninfo[num]["cond"] = 1
# 分裂後のグループをginfoに登録(分裂後の大きさが2以上場合のみ)
if len(memberb) >= 2:
ginfo.append({"gnum": self.ram[0]["gnum1"], "leng": len(
memberb), "hit": hitb, "blow": blowb})
"""「裏」の処理"""
# guessb,hitb,blowbの値を更新した後は、「表」の処理と似ているように思うが、いくつか違う点があるので注意。
guessb = self.ram[0]["guess_mid_list"]
memberb = self.ram[0]["non_group_member"]
hitb = hit_past - hitb
blowb = blow_past - blowb
# 「裏」については、実際に探索したわけではないので、位置情報は更新できない。
# hit,blowなしの場合
if hitb + blowb == 0:
for num in memberb:
ninfo[num]["cond"] = 9
# 全てblow以上確定の場合
elif hitb + blowb == len(memberb):
# 「表」と異なる処理(裏では、hit確定は存在しない)
# (blow確定)
for num in memberb:
ninfo[num]["cond"] = 2
self.blow_list.append(num)
winfo["confirmed_blow_and_hit"] += 1
# 探索続行の場合
else:
for num in memberb:
ninfo[num]["cond"] = 1
# 分裂後のグループをginfoに登録(分裂後の大きさが2以上場合のみ)
# 「表」の処理と異なる(gnum1とgnum2)
if len(memberb) >= 2:
ginfo.append({"gnum": self.ram[0]["gnum2"], "leng": len(
memberb), "hit": hitb, "blow": blowb})
"""共通の処理(中盤終了判定)"""
# 「探索の必要がないグループについては0になる関数」であるpriority_valueを使い、全てのグループについて0になった場合に終了とする。
priority_value = [(1000 - 100 * j["leng"] - (j["blow"]+j["hit"])) * self.zero_checker(
(j["blow"]+j["hit"])*(j["leng"]-(j["blow"]+j["hit"]))) for j in ginfo]
if sum(priority_value) == 0:
winfo["phase"] = 4
# もし終了してたら、既存のginfoを全て削除し、確定blowのうちhit確定していない部分のみで構成したグループをginfoに登録
final_position = []
ginfo = []
gnum = self.get_gnum()
final_member = copy.copy(self.blow_list)
# self.hit_listに記載がない(つまり-1)場合、その位置をfinal_positionに追加し、final_memberから消す
for p, num in enumerate(self.hit_list):
if num < 0:
final_position.append(p)
else:
final_member.pop(final_member.index(num))
ginfo.append({"gnum": gnum, "leng": len(final_member), "hit": 0, "blow": len(
final_member), "final_member": final_member, "position": final_position})
# グループ情報をninfoに登録
for i in final_member:
ninfo[i]["gnum"] = gnum
"""終盤の処理"""
# 終盤に入った瞬間の処理はすでに終えているので、elifで繋いでOK
elif winfo["phase"] == 4:
hits_before = self.ram[0]["hits"]
hits_after = hist[-1]["hit"]
memb = self.ram[0]["done_number"]
posi = self.ram[0]["done_position"]
"""
hits=3以外のときは、探索数を1とすることを前提に作られている(簡単に拡張できるようにはしてある)。
"""
if hits_after == hits_before:
# hitを探せなかったとき。位置情報を更新して次へ。
for _, i in enumerate(memb):
ninfo[i][posi[_]] = 2
elif hits_after == hits_before + 1:
if len(memb) == 1:
# hit確定
ninfo[memb[0]]["cond"] = 3
# 位置情報更新。一度全てを2にして、hit位置のみ0にする
for p in range(5):
ninfo[memb[0]][p] = 2
ninfo[memb[0]][posi[0]] = 0 # この3行いる?
# self.hit_list更新
self.hit_list[posi[0]] = memb[0]
winfo["confirmed_hit"] += 1
new_hit_checker = 1
else:
print("hitが2以上増えてるゾ(その1)")
elif hits_before == 3:
pass
else:
print("hitが2以上増えてるゾ(その2)")
# ginfoの更新。基本的には、中盤の最後にやったのと同じ
final_position = []
ginfo = []
gnum = self.get_gnum()
final_member = copy.copy(self.blow_list)
# self.hit_listに記載がない(つまり-1)場合、その位置をfinal_positionに追加し、final_memberから消す
for p, num in enumerate(self.hit_list):
if num < 0:
final_position.append(p)
else:
final_member.pop(final_member.index(num))
ginfo.append({"gnum": gnum, "leng": len(final_member), "hit": 0, "blow": len(
final_member), "final_member": final_member, "position": final_position})
# グループ情報をninfoに登録
for i in final_member:
ninfo[i]["gnum"] = gnum
else:
print("error:phaseがおかしいです。")
exit()
# 共通の更新事項
# ハズレ数の更新
winfo["noob"] = sum([i == 9 for i in [j["cond"] for j in ninfo]])
# hit発生時の処理(全ての数字の位置情報を更新)
if new_hit_checker == 1:
for p in range(5):
if self.hit_list[p] != -1:
for i in range(16):
ninfo[i][p] = 2
new_hit_checker = 0
# ターン数の更新
winfo["turn"] += 1
# ninfo,ginfoは辞書表記だと見づらいのでprintするときはlist化
nlist = ([list(ninfo[j].values()) for j in range(16)])
glist = ([list(ginfo[p].values()) for p in range(len(ginfo))])
# 終盤だけ辞書表記で表示する場合は↓を有効化
# if winfo["phase"] == 4:
# print(ninfo)
# print(ginfo)
# else:
if self.print_mode == "on":
print(nlist)
print(glist)
print(winfo)
print(self.hit_list)
print(self.blow_list)
return ninfo, ginfo, hist, winfo
def get_gnum(self):
"""
新たなグループナンバーgnumを得る関数
"""
if self.wholeinfo["phase"] >= 4:
gnum_boost = 50
elif self.wholeinfo["phase"] < 4:
gnum_boost = 0
else:
print("error code 494")
self.gnum_count += 1
return self.gnum_count + gnum_boost
def define_answer(self):
"""
"""
# if type(defined_answer) == str and len(defined_answer) == 5:
# self.ans_str = defined_answer
# return defined_answer
# else:
digit_kari = 0
count = 0
check = 0
while count < 5:
if count == 0:
self.ans_list[count] = random.randint(0, 15)
count += 1
else:
digit_kari = random.randint(0, 15)
for j in range(count):
if self.ans_list[j] == digit_kari:
check = -1
if check == 0:
self.ans_list[count] = digit_kari
count += 1
else:
check = 0
for i in range(5):
self.ans_str += str(hex(self.ans_list[i]))[2]
print("answer:"+self.ans_str) # あらかじめ答えを知りたいときのみ有効化する
def ending(self):
# それぞれのステージにかかったターン数を計算
used_turn = []
for i in range(len(self.game_record) - 1):
if self.game_record[i] != 3 and self.game_record[i + 1] == 3:
used_turn.append(i + 1)
elif self.game_record[i] != 4 and self.game_record[i + 1] == 4:
used_turn.append(i + 1)
# ズレを補正
used_turn[0] += 2
used_turn[1] -= 2
used_turn.append(len(self.game_record) - sum(used_turn))
if self.print_mode == "on":
print()
print("ANSWER: "+self.ans_str)
print("CONGRATULATIONS!!!!")
print(" * + 巛 ヽ")
print(" 〒 ! + 。 + 。")
print(" + 。 | |")
print(" * + / / イヤッッホォォォオオォオウ!")
print(" ∧_∧ / /")
print(" (´∀` / / + 。 + 。 *")
print(" ,- f")
print(" / ュヘ | * + 。 + 。 +")
print(" 〈_} ) |")
print(" / ! + 。 + + *")
print(" ./ ,ヘ |")
print(" ガタン ||| j / | | |||")
print("――――――――――――")
print("You have solved at " +
str(self.wholeinfo["turn"]-1) + " turns!")
print(" Early phase: "+str(used_turn[0]) + " turns")
print(" Middle phase:"+str(used_turn[1]) + " turns")
print(" FINAL phase: "+str(used_turn[2]) + " turns")
print(" T H A N K Y O U F O R P L A Y I N G ! ! !")
# 別ファイルにゲームデータを書き込み
path_w = 'self.game_record.txt'
s = "\n["+self.ans_str + ", " + \
str(sum(used_turn))+", " + str(used_turn)+"]"
with open(path_w, mode='a') as f:
f.write(s)
# exit()
def run(self):
if self.print_mode == "off":
print("Autoplay is on progress...")
while True:
try:
room = random.randint(8400, 8800)
drun = SendReceive(room_id=room)
drun.enter_room()
d2run = SendReceive(room_id=room, player_name="D2")
d2run.enter_room()
break
except KeyError:
pass
drun.get_room()
drun.get_table()
drun.post_hidden(ans="1a2b3")
# 指定する場合はdefine_answer()のカッコ内に入力。ランダムでよい場合は""と入力
self.define_answer()
d2run.post_hidden(ans=self.ans_str)
drun.get_table()
# 368af
while True:
# drun.post_guess("12345")
drun.post_guess(self.fugaku(
self.numberinfo, self.groupinfo, self.history, self.wholeinfo))
result = drun.get_table()['table'][-1]
self.history.append(result)
# 情報のアップデート
self.numberinfo, self.groupinfo, self.history, self.wholeinfo = self.info_update(
self.numberinfo, self.groupinfo, self.history, self.wholeinfo)
if self.print_mode == "on":
for tt in range(len(self.history)):
print(self.history[tt])
self.game_record.append(self.wholeinfo["phase"])
"""クリア判定"""
if self.history[-1]["hit"] == 5:
self.ending()
"""ここで終了"""
return self.history
if self.print_mode == "on":
print()
print("--------------------------------------------")
print("You are in "+self.phase_list[self.wholeinfo["phase"]])
print("TURN" + str(self.wholeinfo["turn"]))
elif self.print_mode == "off":
if self.wholeinfo["turn"] % 3 == 0:
print("TURN" + str(self.wholeinfo["turn"]))
# 文字の入力
# guess2 = input("player1 guess ->")
"""ショートカット
中盤まで:3
終盤まで:4
最後まで:5"""
if self.wholeinfo["phase"] < 5:
guess2 = 12345
else:
guess2 = input("player1 guess ->")
if guess2 == "":
guess2 = 12345
d2run.post_guess(guess2)
drun.get_table()
# self.ramの消去
self.ram = []
|
[
"taka.satoh@keio.jp"
] |
taka.satoh@keio.jp
|
852dc9368213b72f365c73d8188cab2dfbe9cfbe
|
c65705dcc8b8fa5f033873da60dbd5ab85b8e394
|
/compare_py_web_frameworks/web/migrations/0006_externalapicallmeasurement.py
|
4f181926141de8ba344de8aba3374102d6400681
|
[] |
no_license
|
comparepywebframework/ComparePyWebFrameworks
|
127f56b9afe6c4c6e210b886c05c89f8b3f74f19
|
41fbe5e4d639c97b2415c7871084747c72c87ec7
|
refs/heads/master
| 2021-06-23T16:54:48.839532
| 2019-07-15T06:27:10
| 2019-07-15T06:27:10
| 184,575,076
| 0
| 0
| null | 2021-06-10T21:25:19
| 2019-05-02T12:14:55
|
HTML
|
UTF-8
|
Python
| false
| false
| 750
|
py
|
# Generated by Django 2.1 on 2019-04-29 19:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('web', '0005_auto_20190426_1939'),
]
operations = [
migrations.CreateModel(
name='ExternalApiCallMeasurement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True)),
('execution_time', models.FloatField()),
('framework', models.CharField(max_length=20)),
],
options={
'db_table': 'external_api_call_measurement',
},
),
]
|
[
"mariuszozdoba@e902ab1f.local"
] |
mariuszozdoba@e902ab1f.local
|
562b5276ac476e24cb34dba56014d146d59cfdc5
|
63f6f3f054e7135076f3f55dcccbcd076a960d49
|
/main.py
|
62fe8793957dda1d4d08db2faa5660209fd0fae0
|
[] |
no_license
|
MrMohammadY/FinancialManagement
|
4691b458888c6db757a1ddb45412eec8603ba9e7
|
3b4c78f12612a9d701b4ba4ddf7c8f4ff33808e6
|
refs/heads/master
| 2023-01-07T21:55:19.306927
| 2020-11-01T10:02:49
| 2020-11-01T10:02:49
| 309,067,427
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,992
|
py
|
import os
from create_files import create_folder_user, create_folder_product
from login import login
from Users import User
from Delete_Edit_User import DeleteUser, EditUser
from Delete_Edit_Product import DeleteProduct, EditProduct
from Show_Product_User import show_user, search_user, show_product, \
search_product
from Data import Data
def run():
cwd_usr = os.getcwd() + '\\user'
try:
with open(f'{cwd_usr}/user.json', 'r') as fr:
if len(fr.read()) == 0:
create_folder_user()
except:
create_folder_user()
cwd_data = os.getcwd() + '\\data'
try:
with open(f'{cwd_data}/data.json', 'r') as fr:
if len(fr.read()) == 0:
create_folder_product()
except:
create_folder_product()
def log():
username = input('please enter your username: ').strip()
password = input('please enter your password: ').strip()
result = login(username, password)
if result is not None:
return result[0], result[1], result[2]
def menu():
print(f'1. Create User\n'
f'2. Delete User\n'
f'3. Edit User\n'
f'4. Show User\n'
f'5. Search User\n'
f'6. Create Product\n'
f'7. Delete Product\n'
f'8. Edit Product\n'
f'9. Show Product\n'
f'10. Search Product'
)
try:
choice = int(input('please choice a number(1-8): '))
except ValueError:
print('Enter a Number')
return menu()
if choice in range(1, 11):
return choice
else:
return menu()
if __name__ == '__main__':
run()
login_user = log()
choice = menu()
if choice == 1:
if login_user[2]:
username = input('please enter your username: ').strip()
password = input('please enter your password: ').strip()
confirm_pass = input('please confirm your password: ').strip()
user_type = int(input('please enter 0(guest) or 1(admin): '))
new = User(username, password, confirm_pass, user_type)
create_user = new.create_user(new.check_pass(),
new.check_username())
new.insert_to_file(create_user)
if choice == 2:
if login_user[2]:
username = input('please enter your username for delete: ').strip()
password = input('please enter your password for delete: ').strip()
delete = DeleteUser(username, password)
delete.delete()
if choice == 3:
if login_user[2]:
username = input('please enter your username for edit: ').strip()
password = input('please enter your password for edit: ').strip()
new_username = input(
'please enter your new username for edit: ').strip()
new_password = input(
'please enter your new password for edit: ').strip()
confirm_pass = input('please confirm your password: ').strip()
user_type = int(input('please enter 0(guest) or 1(admin):'))
edit = EditUser(username, password, new_username,
new_password, confirm_pass, user_type)
edit.edit()
if choice == 4:
show_user()
if choice == 5:
username = input('please enter your username for search: ').strip()
search_user(username)
if choice == 6:
if login_user[2]:
upc = int((input('please enter upc product: ')))
name = input('please enter name product: ').strip()
price = int(input('please enter price product: '))
number = int(input('please enter number product: '))
new = Data(upc, name, price, number)
create_product = new.create_product(new.check_upc(),
new.check_name())
new.insert_to_file(create_product)
if choice == 7:
if login_user[2]:
upc = int((input('please enter upc product to delete: ')))
name = input('please enter name product to delete: ').strip()
delete = DeleteProduct(upc, name)
delete.delete()
if choice == 8:
if login_user[2]:
upc = int((input('please enter upc product: ')))
name = input('please enter name product: ').strip()
new_upc = int((input('please enter upc product for edit: ')))
new_name = input('please enter name product for edit: ').strip()
new_price = int(input('please enter price product for edit: '))
new_number = int(input('please enter number product for edit: '))
edit = EditProduct(upc, name, new_upc,
new_name, new_price, new_number)
edit.edit_pro()
if choice == 9:
show_product()
if choice == 10:
upc = int(input('please enter upc for search: '))
search_product(upc)
|
[
"m.yazdani000@gmail.com"
] |
m.yazdani000@gmail.com
|
de8b455b1a89a21b140b39faf0fcf2fd66da035c
|
2c3ae2052fcf07f7dbe2bb1c1da221df61545b02
|
/app/migrations/0023_merge_20180705_1402.py
|
eadfb92e95fd126663a560b18298550d94908cc7
|
[] |
no_license
|
ManuelBilbao/Prode
|
afd1e53c3e43b6fb6aabaddedc31eb2cf1d806c4
|
836565c4a272801c2028dcd301bf9567707d7ecb
|
refs/heads/master
| 2020-03-21T23:37:09.622152
| 2018-07-06T22:13:01
| 2018-07-06T22:13:01
| 139,196,482
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 265
|
py
|
# Generated by Django 2.0.6 on 2018-07-05 17:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('app', '0004_auto_20180705_1336'),
('app', '0022_auto_20180702_1825'),
]
operations = [
]
|
[
"bilbaomanuel98@gmail.com"
] |
bilbaomanuel98@gmail.com
|
3e9d8ec048755bc1b7308f48bac5f252d8872da9
|
2581df170f614c1324b881f0462837ef09d66728
|
/wsgi/openshift/comment/migrations/0003_auto__del_field_comment_submessage.py
|
5cd6467385d0d3a25f1c28122063fee88b0cef46
|
[] |
no_license
|
neudesk/20thst
|
eeb43e2351dab06eec448273d8a137cd1efc7509
|
01c30230c2bd981befa9a7b1225df417b726cd1b
|
refs/heads/master
| 2021-01-10T18:53:02.055952
| 2014-01-30T23:59:16
| 2014-01-30T23:59:16
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,221
|
py
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Comment.submessage'
db.delete_column(u'comment_comment', 'submessage_id')
# Adding M2M table for field submessage on 'Comment'
m2m_table_name = db.shorten_name(u'comment_comment_submessage')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_comment', models.ForeignKey(orm[u'comment.comment'], null=False)),
('to_comment', models.ForeignKey(orm[u'comment.comment'], null=False))
))
db.create_unique(m2m_table_name, ['from_comment_id', 'to_comment_id'])
def backwards(self, orm):
# Adding field 'Comment.submessage'
db.add_column(u'comment_comment', 'submessage',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['comment.Comment'], null=True, blank=True),
keep_default=False)
# Removing M2M table for field submessage on 'Comment'
db.delete_table(db.shorten_name(u'comment_comment_submessage'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'comment.comment': {
'Meta': {'object_name': 'Comment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_pub': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '350'}),
'posted': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'submessage': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'submessage_rel_+'", 'null': 'True', 'to': u"orm['comment.Comment']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['comment']
|
[
"neumerance@neudev.(none)"
] |
neumerance@neudev.(none)
|
cee97c47a3ba17ae565e2903c5d57423032c94ed
|
5c9115e44fda400a669aca27333c7ef8de93d7b4
|
/db_downgrade.py
|
fb699d4d5cb7e8bdb6b3c4538b0ee1006b8c47f6
|
[] |
no_license
|
weijiayun/blog
|
26963cdd0dea87eb3300cc63d83c5062def0e12a
|
2388545c3e06ca63a04e93b94fd9ce4f86d27cc2
|
refs/heads/master
| 2021-01-17T19:17:43.911255
| 2016-06-25T06:39:46
| 2016-06-25T06:39:46
| 59,274,249
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 447
|
py
|
#!/opt/Apps/local/Python/anaconda/bin/python2.7
from migrate.versioning import api
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
def dbdowngrade():
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
api.downgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, v - 1)
print 'Current database version: ' + str(api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO))
|
[
"jiayun.wei@foxmail.com"
] |
jiayun.wei@foxmail.com
|
9055c72993e785a6a3143bb062dd04af9f5dc404
|
56915113a742e8bc3d8b2260300311e0bb22afa0
|
/django_blog/blog/migrations/0001_initial.py
|
fb0ab4c1c3f28084e6c102c4d8425b1a18da12e3
|
[] |
no_license
|
tenkeyvey/django_blog
|
2bcdf33471ec0686fa3e619d43c9516ee35bd251
|
19ee26db0c852f0d9b2728ca81e845bae889fb6f
|
refs/heads/main
| 2023-06-29T07:33:30.736142
| 2021-07-29T11:56:00
| 2021-07-29T11:56:00
| 390,700,427
| 0
| 1
| null | 2021-07-29T11:56:01
| 2021-07-29T11:25:12
|
Python
|
UTF-8
|
Python
| false
| false
| 558
|
py
|
# Generated by Django 3.2.4 on 2021-07-01 11:13
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('content', models.TextField()),
],
),
]
|
[
"noreply@github.com"
] |
tenkeyvey.noreply@github.com
|
a3075584a19f72349d8cd754df7b70208a2217cd
|
6d3da15e989b8448d15ea0e47cf320d8051a2715
|
/data/gvdb/conversion_scripts/convert_gvdb_to_json.py
|
5c9457d9c80f476f43dfabc31e45f9b9cc3faf1f
|
[
"Apache-2.0"
] |
permissive
|
pitrack/arglinking
|
7881dee77ad883100d88f769253b22d65c207e2e
|
ebc84d15e9a46cd0a65a42ee391e9f249dfeda72
|
refs/heads/master
| 2023-02-16T18:02:54.591148
| 2022-08-06T18:07:45
| 2022-08-06T18:07:45
| 254,181,282
| 23
| 5
|
Apache-2.0
| 2023-02-11T01:17:22
| 2020-04-08T19:27:52
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 5,991
|
py
|
import csv
import sys
import json
def convert_gvdb_to_span(gvdb_dict, role):
if gvdb_dict['startIndex'] != -1:
# +2 is for the ". " added to separate title and body
return ((int(gvdb_dict['startIndex']) + 2,
int(gvdb_dict['endIndex']) + 2,
role,
gvdb_dict['value']))
else:
return None
cgts = convert_gvdb_to_span
def convert_json_to_char_spans(blob, full_text, tok_map, tokenized):
# circumstances
all_roles = []
all_roles.append(cgts(blob['circumstances']['number-of-shots-fired'], "CIR-NUM-SHOTS"))
all_roles.append(cgts(blob['circumstances']['type-of-gun'], "CIR-WEAPON"))
all_roles.append(cgts(blob['date-and-time']['city'], "DAT-CITY"))
all_roles.append(cgts(blob['date-and-time']['clock-time'], "DAT-CLOCK"))
all_roles.append(cgts(blob['date-and-time']['details'], "DAT-LOC"))
all_roles.append(cgts(blob['date-and-time']['time-day'], "DAT-TIME"))
for victim in blob['victim-section']:
all_roles.append(cgts(victim['age'], "VIC-AGE"))
all_roles.append(cgts(victim['name'], "VIC-NAME"))
all_roles.append(cgts(victim['race'], "VIC-RACE"))
break # only look at first one
for shooter in blob['shooter-section']:
all_roles.append(cgts(shooter['age'], "SHO-AGE"))
all_roles.append(cgts(shooter['name'], "SHO-NAME"))
all_roles.append(cgts(shooter['race'], "SHO-RACE"))
break # only look at first one
all_roles = [role for role in all_roles if role is not None]
checksums = [role[3] == full_text[min(role[0], role[1]):max(role[0], role[1])]
for role in all_roles]
checksums_2 = [role[3] == full_text[min(role[0], role[1]) - 2:max(role[0], role[1]) - 2]
for role in all_roles]
for i, (isVerified, isVerified2) in enumerate(zip(checksums, checksums_2)):
if not isVerified and not isVerified2:
old_start = all_roles[i][0]
old_end = all_roles[i][1]
old_role = all_roles[i][2]
old_value = all_roles[i][3].strip()
start = full_text.find(old_value)
if start == -1 or old_value == "":
print ("LOST:\t\t {} [==x]".format(all_roles[i]))
all_roles[i] = None
else:
all_roles[i] = (start, start + len(old_value),
old_role, old_value)
print ("NO:\t\t{} [==>] {}".format(full_text[old_start:old_end], all_roles[i]))
elif not isVerified:
new_start = all_roles[i][0] - 2
new_end = all_roles[i][1] - 2
all_roles[i] = (new_start, new_end,
all_roles[i][2], all_roles[i][3])
print ("MAYBE:\t\t{} [=?=] {}".format(full_text[new_start:new_end],
all_roles[i]))
else:
print ("YES:\t\t{} [===] {}".format(full_text[all_roles[i][0]:all_roles[i][1]],
all_roles[i]))
for i, span in enumerate(all_roles):
if span is None:
continue
old_start = span[0]
old_end = span[1]
old_role = span[2]
old_value = span[3]
if old_value == "":
all_roles[i] = None
elif old_value[0] == " ":
all_roles[i] = (old_start + 1, old_end,
old_role, old_value)
# retokenize
def retokenize(span):
start_idx = int(tok_map[span[0]])
# If you hit an exception here, check the original data, maybe the document is empty?
end_idx = int(tok_map[span[1] - 1]) + 1
return (start_idx, end_idx, span[2], span[3], tokenized[start_idx:end_idx])
return [retokenize(role) for role in all_roles if role is not None]
from spacy.lang.en import English
import numpy as np
nlp = English()
nlp.add_pipe(nlp.create_pipe('sentencizer'))
def process_full_text(text):
text = text.replace('\x96', '-').replace('\x97', '-')
tokenized_text = nlp(text)
sents = list(tokenized_text.sents)
sentences = [[w.text for w in s] for s in sents]
flat_char_offs = [w.idx for s in sents for w in s] + [len(text)]
flat_token_offs = [w.i for s in sents for w in s]
diffs = list(np.diff(flat_char_offs))
char_tok_map = np.repeat(flat_token_offs, diffs)
continuous = [w.text for s in sents for w in s]
return sentences, continuous, char_tok_map
def process_file(file_it, doc_dict, avoid_dict):
num_spans = 0
next(file_it, None)
for i, row in enumerate(file_it):
title = row[-4]
text_body = row[-3].split(title)[-1]
full_text = title.strip() + ". " + text_body.strip()
json_blob = json.loads(row[-2])
new_json = {}
new_json["doc_key"] = str(i)
new_json["date"] = json_blob["date-and-time"]["date"]
if len(new_json["date"]) == 0 or str(new_json["date"])[0] == "{":
continue
new_json["full_text"], continuous, tok_map = process_full_text(full_text)
new_json["spans"] = convert_json_to_char_spans(json_blob, full_text, tok_map, continuous)
if full_text in doc_dict or full_text in avoid_dict:
continue
else:
doc_dict[full_text] = new_json
num_spans += len(new_json["spans"])
return num_spans
test_file = open(sys.argv[2], 'r', encoding='utf-8')
in_file = open(sys.argv[1], 'r', encoding='utf-8')
tsvtest = csv.reader(test_file, delimiter='\t')
tsvin = csv.reader(in_file, delimiter='\t')
test_docs = {}
train_docs = {}
num_test_spans = process_file(tsvtest, test_docs, {})
chron_test_spans = sorted(list(test_docs.values()), key=lambda x:str(x["date"]))
filtered = chron_test_spans
filtered_date = [x["date"] for x in filtered]
filtered_spans = [x["spans"] for x in filtered]
test = open("test.json", 'w+', encoding='utf-8')
dev = open("dev.json", 'w+', encoding='utf-8')
train = open("train.json", 'w+', encoding='utf-8')
for json_blob in filtered[:-1010]:
train.write(json.dumps(json_blob))
train.write("\n")
for json_blob in filtered[-1010:-610]:
dev.write(json.dumps(json_blob))
dev.write("\n")
for json_blob in filtered[-510:-10]:
test.write(json.dumps(json_blob))
test.write("\n")
# The top 10 have clearly incorrect dates, like something in the year 3000
|
[
"paxia@jhu.edu"
] |
paxia@jhu.edu
|
58d9792ff4177d3aea632b38fd8d7443be137527
|
174ef43bc002b2fb31b4b301d97f59d9ff848a51
|
/models/resnet.py
|
384074293df4729ed6479c8363fc4dff3915b3af
|
[] |
no_license
|
chandu-97/Invertible-GAN
|
d181eb465228c7c8e79cb6c049b4a83a1952a16a
|
6afb14c5272d857bbf2f55fab78113eda003c36d
|
refs/heads/master
| 2022-03-14T12:04:55.793902
| 2019-12-17T21:56:10
| 2019-12-17T21:56:10
| 222,968,114
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,107
|
py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.residual_block import ResidualBlock
from utils import WNConv2d
class ResNet(nn.Module):
"""ResNet for scale and translate factors in Real NVP.
Args:
in_channels (int): Number of channels in the input.
mid_channels (int): Number of channels in the intermediate layers.
out_channels (int): Number of channels in the output.
num_blocks (int): Number of residual blocks in the network.
kernel_size (int): Side length of each filter in convolutional layers.
padding (int): Padding for convolutional layers.
double_after_norm (bool): Double input after input BatchNorm.
"""
def __init__(self, in_channels, mid_channels, out_channels,
num_blocks, kernel_size, padding, double_after_norm):
super(ResNet, self).__init__()
self.in_norm = nn.BatchNorm2d(in_channels)
self.double_after_norm = double_after_norm
self.in_conv = WNConv2d(2 * in_channels, mid_channels, kernel_size, padding, bias=True)
self.in_skip = WNConv2d(mid_channels, mid_channels, kernel_size=1, padding=0, bias=True)
self.blocks = nn.ModuleList([ResidualBlock(mid_channels, mid_channels)
for _ in range(num_blocks)])
self.skips = nn.ModuleList([WNConv2d(mid_channels, mid_channels, kernel_size=1, padding=0, bias=True)
for _ in range(num_blocks)])
self.out_norm = nn.BatchNorm2d(mid_channels)
self.out_conv = WNConv2d(mid_channels, out_channels, kernel_size=1, padding=0, bias=True)
def forward(self, x):
x = self.in_norm(x)
if self.double_after_norm:
x *= 2.
x = torch.cat((x, -x), dim=1)
x = F.relu(x)
x = self.in_conv(x)
x_skip = self.in_skip(x)
for block, skip in zip(self.blocks, self.skips):
x = block(x)
x_skip += skip(x)
x = self.out_norm(x_skip)
x = F.relu(x)
x = self.out_conv(x)
return x
|
[
"ee14btech11014@iith.ac.in"
] |
ee14btech11014@iith.ac.in
|
b888d97751c1a71fe356a509e5e43f5733362ec1
|
1362b977fd45dcdc773c836e9895701a20152bba
|
/multilayer/1d/rarefaction.py
|
d2f2f3e724001e4c8584ba01f63f71573d7c5e52
|
[] |
no_license
|
nthakkar/apps
|
4cceacf85e5bdb505f7593fcb7e5c5f4bc5bc371
|
f195821e4c8d153a93062af3ecb0c787ed51207f
|
refs/heads/master
| 2021-01-18T11:59:18.972898
| 2013-08-13T00:28:33
| 2013-08-13T00:28:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,240
|
py
|
#!/usr/bin/env python
# encoding: utf-8
r""" Run the suite of tests for the 1d two-layer equations"""
import sys
import clawpack.riemann as riemann
import clawpack.clawutil.runclaw as runclaw
from clawpack.pyclaw.plot import plot
import multilayer as ml
def rarefaction(num_cells,eigen_method,entropy_fix,**kargs):
r"""docstring for oscillatory_wind"""
# Construct output and plot directory paths
prefix = 'ml_e%s_m%s_fix' % (eigen_method,num_cells)
if entropy_fix:
prefix = "".join((prefix,"T"))
else:
prefix = "".join((prefix,"F"))
name = 'all_rare'
outdir,plotdir,log_path = runclaw.create_output_paths(name,prefix,**kargs)
# Redirect loggers
# This is not working for all cases, see comments in runclaw.py
for logger_name in ['io','solution','plot','evolve','f2py','data']:
runclaw.replace_stream_handlers(logger_name,log_path,log_file_append=False)
# Load in appropriate PyClaw version
if kargs.get('use_petsc',False):
import clawpack.petclaw as pyclaw
else:
import clawpack.pyclaw as pyclaw
# =================
# = Create Solver =
# =================
if kargs.get('solver_type','classic') == 'classic':
solver = pyclaw.ClawSolver1D()
else:
raise NotImplementedError('Classic is currently the only supported solver.')
# Solver method parameters
solver.cfl_desired = 0.9
solver.cfl_max = 1.0
solver.max_steps = 5000
solver.fwave = True
solver.kernel_language = 'Fortran'
solver.num_waves = 4
solver.limiters = 3
solver.source_split = 1
# Boundary conditions
solver.bc_lower[0] = 1
solver.bc_upper[0] = 1
solver.aux_bc_lower[0] = 1
solver.aux_bc_upper[0] = 1
# Set the Riemann solver
solver.rp = riemann.rp1_layered_shallow_water
# Set the before step function
solver.before_step = lambda solver,solution:ml.step.before_step(solver,solution)
# Use simple friction source term
solver.step_source = ml.step.friction_source
# ============================
# = Create Initial Condition =
# ============================
num_layers = 2
x = pyclaw.Dimension('x',0.0,1.0,num_cells)
domain = pyclaw.Domain([x])
state = pyclaw.State(domain,2*num_layers,3+num_layers)
state.aux[ml.aux.kappa_index,:] = 0.0
# Set physics data
state.problem_data['g'] = 9.8
state.problem_data['manning'] = 0.0
state.problem_data['rho_air'] = 1.15e-3
state.problem_data['rho'] = [0.95,1.0]
state.problem_data['r'] = state.problem_data['rho'][0] / state.problem_data['rho'][1]
state.problem_data['one_minus_r'] = 1.0 - state.problem_data['r']
state.problem_data['num_layers'] = num_layers
# Set method parameters, this ensures it gets to the Fortran routines
state.problem_data['eigen_method'] = eigen_method
state.problem_data['dry_tolerance'] = 1e-3
state.problem_data['inundation_method'] = 2
state.problem_data['entropy_fix'] = entropy_fix
solution = pyclaw.Solution(state,domain)
solution.t = 0.0
# Set aux arrays including bathymetry, wind field and linearized depths
eta = [0.0,-0.5]
ml.aux.set_jump_bathymetry(solution.state,0.5,[-1.0,-1.0])
ml.aux.set_no_wind(solution.state)
ml.aux.set_h_hat(solution.state,0.5,eta,eta)
# Set sea at rest initial condition with diverging velocities
u_left = [0.0,-0.5]
u_right = [0.0,0.5]
h_hat = [eta[0] - eta[1],eta[1] + 1.0]
q_left = [h_hat[0] * state.problem_data['rho'][0],
u_left[0] * h_hat[0] * state.problem_data['rho'][0],
h_hat[1] * state.problem_data['rho'][1],
u_left[1] * h_hat[1] * state.problem_data['rho'][1]]
q_right = [h_hat[0] * state.problem_data['rho'][0],
u_right[0] * h_hat[0] * state.problem_data['rho'][0],
h_hat[1] * state.problem_data['rho'][1],
u_right[1] * h_hat[1] * state.problem_data['rho'][1]]
ml.qinit.set_riemann_init_condition(state,0.5,q_left,q_right)
# ================================
# = Create simulation controller =
# ================================
controller = pyclaw.Controller()
controller.solution = solution
controller.solver = solver
# Output parameters
controller.output_style = 3
controller.nstepout = 1
controller.num_output_times = 100
controller.write_aux_init = True
controller.outdir = outdir
controller.write_aux = True
# ==================
# = Run Simulation =
# ==================
state = controller.run()
# ============
# = Plotting =
# ============
plot_kargs = {'rho':solution.state.problem_data['rho'],
'dry_tolerance':solution.state.problem_data['dry_tolerance']}
plot(setplot_path="./setplot_drystate.py",outdir=outdir,plotdir=plotdir,
htmlplot=kargs.get('htmlplot',False),iplot=kargs.get('iplot',False),
file_format=controller.output_format,**plot_kargs)
if __name__ == "__main__":
rarefaction(100,2,False,iplot=False,htmlplot=True)
|
[
"kyle.mandli@gmail.com"
] |
kyle.mandli@gmail.com
|
50bb15ab47df9e53950a20c7fb3a9589dfa069c3
|
6197e65fffdd12dee288dc394d6c4a7b0a151172
|
/train.py
|
a5a6fe07819e7250487fd223b6a3102e54aba52e
|
[] |
no_license
|
yuntai/horse
|
0fc7ab868c6e12ac209929bc7bc2753508513533
|
b70f38c14c36331be2518300358f6ca8b802bdca
|
refs/heads/main
| 2022-12-27T03:37:38.935003
| 2020-10-07T07:38:57
| 2020-10-07T07:38:57
| 301,961,007
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,447
|
py
|
import torch
import settings
from torch.utils.data import TensorDataset, DataLoader
import argparse
from dataset import get_datasets
from graph_transformer import GraphTransformer
import numpy as np
import itertools
import torch.nn as nn
from torch import optim
import tqdm
parser = argparse.ArgumentParser(description='Graph Transformer on Horse Racing outcome')
parser.add_argument('--batch_size', type=int, default=128, help='batch size')
parser.add_argument('--max_epoch', type=int, default=1000, help='upper epoch limit')
parser.add_argument('--d_model', type=int, default=640, help='model dimension')
parser.add_argument('--n_layer', type=int, default=14, help='number of total layers')
parser.add_argument('--n_head', type=int, default=10, help='number of heads')
parser.add_argument('--d_inner', type=int, default=3800, help='inner dimension in posFF')
parser.add_argument('--d_embed', type=int, default=128, help='inner dimension in posFF')
parser.add_argument('--final_dim', type=int, default=280, help='final layer hidden dimension')
parser.add_argument('--lr', type=float, default=0.0005, help='initial learning rate (0.0001|5 for adam|sgd)')
parser.add_argument('--clip', type=float, default=0.25, help='gradient clipping')
parser.add_argument('--decay_rate', type=float, default=0.5, help='decay factor when ReduceLROnPlateau is used')
parser.add_argument('--patience', type=int, default=5, help='patience')
parser.add_argument('--lr_min', type=float, default=0.0, help='minimum learning rate during annealing')
parser.add_argument('--dropout', type=float, default=0.03, help='global dropout rate (applies to residual blocks in transformer)')
parser.add_argument('--dropatt', type=float, default=0.0, help='attention probability dropout rate')
parser.add_argument('--final_dropout', type=float, default=0.04, help='final layer dropout rate')
parser.add_argument('--wnorm', action='store_true', help='use weight normalization')
parser.add_argument('--cuda', type=bool, default=True, help='use CUDA')
parser.add_argument('--seed', type=int, default=1234, help='random seed')
parser.add_argument('--scheduler', default='cosine', type=str,
choices=['cosine', 'inv_sqrt', 'dev_perf', 'constant'],
help='lr scheduler to use.')
parser.add_argument('--eta_min', type=float, default=1e-7,
help='min learning rate for cosine scheduler')
args = parser.parse_args()
train_dataset, val_dataset = get_datasets()
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, drop_last=True)
val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, drop_last=True)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if torch.cuda.is_available() and args.cuda:
torch.set_default_tensor_type('torch.cuda.FloatTensor')
torch.cuda.manual_seed_all(args.seed)
device = torch.device('cuda' if args.cuda else 'cpu')
model = GraphTransformer(dim=args.d_model, n_layers=args.n_layer, d_inner=args.d_inner, n_toks=[3, 12],
d_embed=args.d_embed, final_dim=args.final_dim, dropout=args.dropout, n_feat=18,
dropatt=args.dropatt, final_dropout=args.final_dropout, n_head=args.n_head,
wnorm=args.wnorm).to(device)
bce_loss = nn.BCEWithLogitsLoss(reduction='none')
loss_func = nn.MSELoss(reduction='none')
def criterion(pred, y, mask, x):
loss = loss_func(pred, y)
res = (loss * mask).sum()/((mask>0).sum())
return res
args.max_step = args.max_epoch * len(train_loader)
# initlize optimizer and lr scheduler
optimizer = optim.Adam(model.parameters(), lr=args.lr)
if args.scheduler == 'dev_perf':
scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=args.decay_rate,
patience=args.patience, min_lr=args.lr_min)
elif args.scheduler == 'cosine':
scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, args.max_epoch, eta_min=args.eta_min)
#TODO: prper union with val_dataset
max_num_horses = train_dataset.tensors[1].max().long()
def accuracy(pred, y, mask, threshold=0.7):
pred = (torch.sigmoid(pred) > threshold).long()
return (pred * y).sum() / 5. / pred.size(0)
ix = 0
start_epoch = 0
train_step = 0
min_val_loss = float('inf')
for epoch_i in range(start_epoch, args.max_epoch):
losses = []
accs = []
model.train()
with torch.enable_grad():
for _batch in tqdm.tqdm(train_loader):
optimizer.zero_grad()
x, cnt = (b.to(device) for b in _batch)
bsz = x.size(0)
rix = torch.stack([torch.randperm(max_num_horses) for _ in range(bsz)])
# with gather possible?
x = x[torch.arange(bsz)[:,None,None], rix[...,None], torch.arange(x.size(-1))]
mask = (rix < cnt[:,None]).float()
feats, toks, y = x.split([18, 2, 1], dim=-1)
toks = toks.type(torch.long)
pred = model(feats, toks, mask)
loss = criterion(pred, y.squeeze(), mask, x)
#acc = accuracy(pred, y.squeeze(), mask)
#accs.append(acc.detach().cpu())
loss.backward()
losses.append(loss.detach().cpu())
torch.nn.utils.clip_grad_norm_(model.parameters(), args.clip)
optimizer.step()
train_step += 1
model.eval()
with torch.no_grad():
val_losses = []
for _batch in tqdm.tqdm(val_loader):
x, cnt = (b.to(device) for b in _batch)
bsz = x.size(0)
rix = torch.stack([torch.randperm(max_num_horses) for _ in range(bsz)])
# with gather possible?
x = x[torch.arange(bsz)[:,None,None], rix[...,None], torch.arange(x.size(-1))]
mask = (rix < cnt[:,None]).float()
feats, toks, y = x.split([18, 2, 1], dim=-1)
toks = toks.type(torch.long)
pred = model(feats, toks, mask)
loss = criterion(pred, y.squeeze(), mask, x)
val_losses.append(loss.detach().cpu())
scheduler.step()
lr = optimizer.param_groups[0]['lr']
tr_loss = np.array(losses).mean()
val_loss = np.array(val_losses).mean()
if val_loss < min_val_loss:
torch.save(model, 'model.ckpt')
print(f"Saving model ...")
min_val_loss = val_loss
print(f"epoch {epoch_i} loss({tr_loss:.7f}) lr({lr:.7f}) val_loss({val_loss:.7f})")
|
[
"yuntai.kyong@gmail.com"
] |
yuntai.kyong@gmail.com
|
03c3734ef7b7cc25a20cb6c977058b94d348cee0
|
228c54822400aaf033679fa49f2a7475e64b5a73
|
/mriutils/models/modules/metrics.py
|
36f0e7bc141f2b8ccf857090c565e5b3afd7c299
|
[
"MIT"
] |
permissive
|
kuangmeng/MRIUtils
|
3111da0c212b8bd74b4b35bb6a5fbf641c4d28ff
|
3a79e8104071deb0dc17c402ac878f94161d9b4a
|
refs/heads/master
| 2023-06-01T14:51:24.547215
| 2021-06-23T07:22:15
| 2021-06-23T07:22:15
| 322,794,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,631
|
py
|
#!/usr/bin/env python
from tensorflow.keras import metrics
from tensorflow.keras import backend as K
class Metric:
def __init__(self, metric = '', customization = None):
self.metric = metrics.categorical_accuracy
if 'dice' in metric:
self.metric = self.dice_score
elif 'iou' in metric:
self.metric = self.iou
elif 'sensitivity' in metric:
self.metric = self.sensitivity
elif 'ppv' in metric:
self.metric = self.ppv
elif customization != None:
self.metric = customization
def dice_coef(self, y_true_f, y_pred_f, smooth = 1):
intersection = K.sum(y_true_f * y_pred_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def iou_coef(self, y_true_f, y_pred_f, smooth = 1):
intersection = K.sum(y_true_f * y_pred_f)
return (intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth - intersection)
def dice_score(self, y_true, y_pred):
ret_loss = 0.0
for index in range(1, y_true.shape[-1]):
y_true_f = K.flatten(y_true[:,:,:,:,index])
y_pred_f = K.flatten(y_pred[:,:,:,:,index])
ret_loss += self.dice_coef(y_true_f, y_pred_f)
return ret_loss / (y_true.shape[-1] - 1)
def iou(self, y_true, y_pred):
ret_loss = 0.0
for index in range(1, y_true.shape[-1]):
y_true_f = K.flatten(y_true[:,:,:,:,index])
y_pred_f = K.flatten(y_pred[:,:,:,:,index])
ret_loss += self.iou_coef(y_true_f, y_pred_f)
return ret_loss / (y_true.shape[-1] - 1)
def S(self, y_true_f, y_pred_f, smooth = 1e-5):
intersection = K.sum(y_true_f * y_pred_f)
return intersection / (K.sum(y_pred_f) + smooth)
def sensitivity(self, y_true, y_pred):
ret_sen = 0.0
for index in range(1, y_true.shape[-1]):
y_true_f = K.flatten(y_true[:,:,:,:,index])
y_pred_f = K.flatten(y_pred[:,:,:,:,index])
ret_sen += self.S(y_true_f, y_pred_f)
return ret_sen / (y_true.shape[-1] - 1)
def ppv_cal(self, y_true_f, y_pred_f, smooth = 1e-5):
intersection = K.sum(y_true_f * y_pred_f)
return intersection / (K.sum(y_true_f) + smooth)
def ppv(self, y_true, y_pred):
ret_ppv = 0.0
for index in range(1, y_true.shape[-1]):
y_true_f = K.flatten(y_true[:,:,:,:,index])
y_pred_f = K.flatten(y_pred[:,:,:,:,index])
ret_ppv += self.ppv_cal(y_true_f, y_pred_f)
return ret_ppv / (y_true.shape[-1] - 1)
|
[
"kuangmeng@msn.com"
] |
kuangmeng@msn.com
|
069d23d123f3c38400dbad4b87ff53b3e7047772
|
a5be588253399c19c6f65f42a3d00324aa38a0e3
|
/HW5/Q4.py
|
e4df29d27002190b6f62ae267391f52da4012935
|
[] |
no_license
|
zahrahosseini99/computer-vision-course
|
b7e9e3869552e9395aa3e7ba2db2be2421b1bb0d
|
0c98d95cfc703acf08cbb62f42506d915905f8b2
|
refs/heads/main
| 2023-07-15T04:26:55.527095
| 2021-08-26T08:47:12
| 2021-08-26T08:47:12
| 338,807,780
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 939
|
py
|
def get_45_edges(image):
'''
Returns the image which shows the 45-degree edges.
Parameters:
image (numpy.ndarray): The input image.
Returns:
edges_45 (numpy.ndarray): The 45-degree edges of input image.
'''
kernel = None
edges_45 = image.copy()
#Writer your code here
kernel=np.array([[2,1,0],[1,0,-1],[0,-1,-2]],np.float)
edges_45=cv2.filter2D(image,-1,np.float32(kernel))
return edges_45
def get_135_edges(image):
'''
Returns the image which shows the 135-degree edges.
Parameters:
image (numpy.ndarray): The input image.
Returns:
edges_135 (numpy.ndarray): The 135-degree edges of input image.
'''
kernel = None
edges_135 = image.copy()
#Writer your code here
kernel=np.array([[0,-1,-2],[1,0,-1],[2,1,0]],np.float)
edges_135=cv2.filter2D(image,-1,np.float32(kernel))
return edges_135
|
[
"hosseini99.zahra@gmail.com"
] |
hosseini99.zahra@gmail.com
|
a4c6d9b46b276041c17bd2f1719a9186f8519449
|
35edf8f827bbb380e380c8cf1d0ce177754676e7
|
/Tercero/ISSBC/Practica 5/rdflib/samples-tutorial-old/s6.py
|
c5d7524d77b795f57801dd068d143af4c8f79ad4
|
[] |
no_license
|
IronSenior/PracticasUNI
|
e25182aaee9ceaad07cf2546879836c3fcbaea24
|
b5b2041e529923636eeba426a7efd32269ba6fc0
|
refs/heads/master
| 2021-07-11T01:48:18.915121
| 2020-06-07T00:07:38
| 2020-06-07T00:07:38
| 130,170,973
| 6
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 597
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Mar 18 20:25:07 2015
@author: acalvo
"""
"""
SPARQL Update statements can be applied with :meth:`rdflib.graph.Graph.update`
"""
import rdflib
if __name__=='__main__':
g = rdflib.Graph()
g.load("foaf.rdf")
g.update('''
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX dbpedia: <http://dbpedia.org/resource/>
INSERT
{ ?s a dbpedia:Human . }
WHERE
{ ?s a foaf:Person . }
''')
for x in g.subjects(
rdflib.RDF.type, rdflib.URIRef('http://dbpedia.org/resource/Human')):
print x
|
[
"jose.marquez@atlantyasoftware.com"
] |
jose.marquez@atlantyasoftware.com
|
22c3ed54ea958655229e920bed61a6cd380cabb1
|
57cf9b9dca994e1df417b7484c464a2826357254
|
/tipo_conjunto.py
|
7ee392cfe1c48261978bf725b1f0e3cc71769324
|
[] |
no_license
|
academia-szycho/clases
|
f471d9e48bff90c5a09ef78ca1635cbcb8b2d904
|
eac9117ec496ce35d1ed33631219e5b23a966fb3
|
refs/heads/master
| 2023-01-19T07:39:57.924852
| 2020-11-14T18:39:44
| 2020-11-14T18:39:44
| 293,975,311
| 1
| 2
| null | 2020-11-08T06:00:52
| 2020-09-09T02:00:08
|
Python
|
UTF-8
|
Python
| false
| false
| 355
|
py
|
lista = ["banana", "manzana", "naranja", "naranja"]
frutas_sin_repetir = set(lista)
'''
for fruta in lista:
if fruta not in frutas_sin_repetir:
frutas_sin_repetir.append(fruta)
'''
print(frutas_sin_repetir)
frutas_sin_repetir = {"naranja", "frutilla"}
print(frutas_sin_repetir)
frutas_sin_repetir.remove("naranja")
print(frutas_sin_repetir)
|
[
"licefim254@acceptmail.net"
] |
licefim254@acceptmail.net
|
79d221d5ee82105be91e5eb4e356874dc967f295
|
5408da89e75c5a79f858eb2c7159c0e5c3c6f615
|
/test.py
|
cd6d8f3812a756fed15afadd7c3a91836cf2df8e
|
[
"MIT"
] |
permissive
|
Spanfile/Dijkstra
|
848c65de580d494d643a073bd0049e7f54deba3b
|
ea4e7ba68b6b1a7feed29c09f6108db32c57fd17
|
refs/heads/master
| 2020-03-12T02:02:02.861489
| 2018-04-20T17:09:40
| 2018-04-20T17:09:40
| 130,391,460
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,060
|
py
|
from random import randint
from time import perf_counter
from multiprocessing import Process, JoinableQueue, current_process
from progressbar import ProgressBar
from dijkstra import Graph, dijkstra
def generate_random_graph(nodes, edges_per_node, max_weight):
graph = Graph()
for node_index in range(1, nodes + 1):
for edge_index in range(edges_per_node):
weight = randint(1, max_weight)
neighbour = node_index
while neighbour == node_index:
neighbour = randint(0, nodes)
graph.add_edge_with_cost(node_index, neighbour, weight)
return graph
def test(queue, output):
start = 1
edges_per_node = 6
max_weight = 100
pid = current_process().pid
while True:
graph_size, graph_index = queue.get()
if not graph_size:
break
end = graph_size
#print("PID {}: Starting graph_size={}, graph_index={}".format(pid, graph_size, graph_index))
#csv.write("Time,Steps,Length,Nodes\n")
while True:
graph = generate_random_graph(graph_size, edges_per_node, max_weight)
try:
start_time = perf_counter()
moves, weights, steps = dijkstra(graph, start)
end_time = perf_counter()
except:
continue
elapsed = end_time - start_time
path = []
length = weights[end]
current = end
complete = False
while current:
path.append(current)
if current == start:
complete = True
break
current = moves[current]
if not complete:
continue
row = "{},{},{},{},{},{}".format(graph_size, graph_index, elapsed, steps, length, len(path))
#print("PID {}: {}".format(pid, row))
output.put(row)
#csv.write(row + "\n")
break
queue.task_done()
print("PID {}: done".format(pid))
def writer(output):
with open("results.csv", "w") as f, ProgressBar(max_value=5000) as bar:
f.write("Graph size,Test index,Elapsed time,Steps,Path length,Path nodes\n")
i = 0
while True:
row = output.get()
if not row:
break
f.write(row + "\n")
i += 1
bar.update(i)
output.task_done()
def main():
queue = JoinableQueue()
output = JoinableQueue()
processes = [Process(target=test, args=(queue, output)) for index in range(8)]
write_proc = Process(target=writer, args=(output,))
for p in processes:
p.start()
write_proc.start()
for size in range(100, 5100, 100):
for index in range(0, 100):
queue.put((size, index))
queue.join()
output.join()
for i in range(100, 5000, 100):
queue.put((None, None))
output.put(None)
for p in processes:
p.join()
if __name__ == "__main__":
main()
|
[
"spansdev@gmail.com"
] |
spansdev@gmail.com
|
612580bde3a415ac4d0e860531c600f737b1f3fd
|
1b03fa00961eff21a8b0dea12ae2a78de5275765
|
/M5T1_COATS.py
|
687a3f88e62c391f67a74f68f2b98b1f871c7bb5
|
[] |
no_license
|
coatsb9112/M2T1
|
fd1c27c2a8a38e0820e069f2c489ea6f28a89a3a
|
5f667f1ad6c8a8dcc9754a4032c18eb01cebae33
|
refs/heads/master
| 2021-01-20T11:39:28.172419
| 2017-12-09T19:53:18
| 2017-12-09T19:53:18
| 101,679,082
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 378
|
py
|
# Brandon Coats
# CTI 110
# 10/22/17
# M5T1
import turtle
turtle.shape("turtle")
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(50)
turtle.left(90)
turtle.forward(100)
turtle.left(175)
turtle.forward(100)
turtle.left(90)
turtle.forward(10)
turtle.left(90)
turtle.right(2)
|
[
"noreply@github.com"
] |
coatsb9112.noreply@github.com
|
69c2c00b0870c64fb7a2debb3e6aa6790c731a80
|
bfa2796ba38793b88dce8515f01b4b315639d51e
|
/exercise_01/app.py
|
974428428abd7c481ea092df4422115f0d2339b0
|
[] |
no_license
|
EniDev911/Flask
|
c2a4ef45e7eccc10ec924241143d44dcbdb9ecaf
|
2d35af9b2ffe10803d0acdb3b46c27ecb7394fd4
|
refs/heads/master
| 2023-06-08T12:26:57.267470
| 2021-06-24T23:27:17
| 2021-06-24T23:27:17
| 309,044,877
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 353
|
py
|
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return '<h1>Hello World from view function!.</h1>'
@app.route('/user/<name>')
def user(name):
return '<h1>Hello %s welcome to Flask</h1>' % name
@app.route('/profile')
def profile():
return '<h2>Welcome to profile</h2>'
if __name__ == '__main__':
app.run(debug=True)
|
[
"enidev911@gmail.com"
] |
enidev911@gmail.com
|
7961fd11a9aeb2bf7b63127c7e250a4c4aa7994c
|
155f59e3aa788cdb7d9256e685bfe5446fe6c623
|
/FxStocks/Plot/DemoWithMatplotLib.py
|
2c1c859bd5061dca5f60b697b56466af61cc0d72
|
[] |
no_license
|
fictor/FxFinance
|
81eb2e15cb4d069ffc7b9e3a56edc1c598fa37a1
|
b0ff6d698d707d47c99808568b5527c4461eddf9
|
refs/heads/master
| 2021-04-30T09:11:33.852312
| 2021-01-09T16:19:09
| 2021-01-09T16:19:09
| 121,302,507
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 885
|
py
|
import quandl
import matplotlib.pyplot as plt
import pandas as pd
auth_token = 'QTxeC7MJ_NMJoFhfmMjG'
def MatPlotLibDemo():
# Example Two:
with open('quandlkey.txt', 'r') as quandl_key_file:
auth_token = quandl_key_file.read()
perth_silver = quandl.get("PERTH/SLVR_USD_M", authtoken = auth_token)
# Convert to pandas dataframe:
silver_df = pd.DataFrame(perth_silver)
print(silver_df.head(6))
print(silver_df.tail(6))
plt.subplot(2, 1, 1)
plt.plot(silver_df.index, silver_df['Bid Average'])
plt.title(''' Perth Mint Monthly Prices For Silver (USD) \n''')
plt.xticks(silver_df.index[0::75],[])
plt.xlabel('\n Year')
plt.ylabel('Avg. Bid Price \n')
plt.subplot(2, 1, 2)
plt.plot(silver_df.index, silver_df['Ask Average'])
plt.xlabel('\n Year')
plt.ylabel('Avg. Ask Price \n')
plt.show()
|
[
"felix.clap@gmail.com"
] |
felix.clap@gmail.com
|
146aa11c9c3cb3f4bed5cca81a0563e4850a8408
|
45936e8150b52167fb03d8cae528f99b7d802371
|
/compare/path.py
|
c66c563e407194f51d9779cc4540a9e5f8f35963
|
[
"MIT"
] |
permissive
|
reneschaub/opt_einsum
|
527dbd78ef8a46529debdbb88b7c0238994bc71b
|
8b4ba900eb6e216a9daae9f2d06791cec8049a1f
|
refs/heads/master
| 2021-01-25T08:07:59.813777
| 2018-04-09T07:35:32
| 2018-04-09T07:35:32
| 93,721,957
| 0
| 0
| null | 2017-06-08T07:49:59
| 2017-06-08T07:49:59
| null |
UTF-8
|
Python
| false
| false
| 2,545
|
py
|
import numpy as np
import pandas as pd
import sys, traceback
import timeit
import time
import test_helper as th
from opt_einsum import contract
pd.set_option('display.width', 200)
import resource
rsrc = resource.RLIMIT_DATA
limit = int(1e10)
resource.setrlimit(rsrc, (limit, limit))
test_einsum = False
test_paths = True
opt_path_time = True
term_thresh = 4
tdot=True
#scale_list = [1.0, 1.1, 1.2, 1.3, 1.4, 1.5]
scale_list = [2]
out = []
for key in th.tests.keys():
sum_string, index_size = th.tests[key]
for scale in scale_list:
views = th.build_views(sum_string, index_size, scale=scale)
# At this point lets assume everything works correctly
t = time.time()
opt_path = contract(sum_string, *views, path='optimal', return_path=True)
opt_time = time.time() - t
opp_path = contract(sum_string, *views, path='opportunistic', return_path=True)
if opt_path_time and (len(views) > term_thresh):
print 'Path optimal took %3.5f seconds for %d terms.' % (opt_time, len(views))
# If identical paths lets just skip them
if all(x==y for x, y in zip(opp_path, opt_path)):
break
setup = "import numpy as np; from opt_einsum import contract; \
from __main__ import sum_string, views, opt_path, opp_path, tdot"
opportunistic_string = "contract(sum_string, *views, path=opp_path, tensordot=tdot)"
optimal_string = "contract(sum_string, *views, path=opt_path, tensordot=tdot)"
# Optional test
if test_paths:
opp = contract(sum_string, *views, path=opp_path)
opt = contract(sum_string, *views, path=opt_path)
assert np.allclose(opp, opt)
if test_einsum and test_paths:
assert np.allclose(opp, np.einsum(sum_string, *views))
num_loops = 5
optimal_time = timeit.timeit(opportunistic_string, setup=setup, number=num_loops) / num_loops
opportunistic_time = timeit.timeit(optimal_string, setup=setup, number=num_loops) / num_loops
out.append([key, sum_string, scale, optimal_time, opportunistic_time])
df = pd.DataFrame(out)
df.columns = ['Key', 'String', 'Scale', 'Optimal time', 'Opportunistic time']
df['Ratio'] = np.around(df['Opportunistic time']/df['Optimal time'], 2)
df = df.set_index(['Key', 'Scale'])
df = df.sort_index()
print df
print '\nDescription of speedup:'
print df['Ratio'].describe()
print '\nNumber of optimal paths slower than opportunistic paths: %d.' % np.sum(df['Ratio']<0.8)
|
[
"malorian@me.com"
] |
malorian@me.com
|
327232060f0e15512e93eebf1f7d39b82e1718d7
|
926b3c52070f6e309567c8598248fd5c57095be9
|
/src/mmdeploy/mmdeploy/codebase/mmdet/deploy/object_detection.py
|
ede6a08b4e991848bdcc9c02f7fbbeaa39d40a09
|
[
"Apache-2.0"
] |
permissive
|
fengbingchun/PyTorch_Test
|
410f7cd2303707b0141d433fb9d144a961e1f4c8
|
df5c2169f0b699bcd6e74adb4cb0e57f7dcd9348
|
refs/heads/master
| 2023-05-23T16:42:29.711338
| 2023-03-25T11:31:43
| 2023-03-25T11:31:43
| 167,339,907
| 15
| 4
| null | 2023-03-25T11:31:45
| 2019-01-24T09:24:59
|
C++
|
UTF-8
|
Python
| false
| false
| 12,325
|
py
|
# Copyright (c) OpenMMLab. All rights reserved.
from typing import Any, Dict, Optional, Sequence, Tuple, Union
import mmcv
import numpy as np
import torch
from mmcv.parallel import DataContainer
from torch.utils.data import Dataset
from mmdeploy.utils import Task
from mmdeploy.utils.config_utils import get_input_shape, is_dynamic_shape
from ...base import BaseTask
from .mmdetection import MMDET_TASK
def process_model_config(model_cfg: mmcv.Config,
imgs: Union[Sequence[str], Sequence[np.ndarray]],
input_shape: Optional[Sequence[int]] = None):
"""Process the model config.
Args:
model_cfg (mmcv.Config): The model config.
imgs (Sequence[str] | Sequence[np.ndarray]): Input image(s), accepted
data type are List[str], List[np.ndarray].
input_shape (list[int]): A list of two integer in (width, height)
format specifying input shape. Default: None.
Returns:
mmcv.Config: the model config after processing.
"""
from mmdet.datasets import replace_ImageToTensor
cfg = model_cfg.copy()
if isinstance(imgs[0], np.ndarray):
cfg = cfg.copy()
# set loading pipeline type
cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam'
# for static exporting
if input_shape is not None:
cfg.data.test.pipeline[1]['img_scale'] = tuple(input_shape)
transforms = cfg.data.test.pipeline[1]['transforms']
for trans in transforms:
trans_type = trans['type']
if trans_type == 'Resize':
trans['keep_ratio'] = False
elif trans_type == 'Pad':
trans['size_divisor'] = 1
cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline)
return cfg
@MMDET_TASK.register_module(Task.OBJECT_DETECTION.value)
class ObjectDetection(BaseTask):
def __init__(self, model_cfg: mmcv.Config, deploy_cfg: mmcv.Config,
device: str) -> None:
super().__init__(model_cfg, deploy_cfg, device)
def init_backend_model(self,
model_files: Optional[str] = None,
**kwargs) -> torch.nn.Module:
"""Initialize backend model.
Args:
model_files (Sequence[str]): Input model files.
Returns:
nn.Module: An initialized backend model.
"""
from .object_detection_model import build_object_detection_model
model = build_object_detection_model(
model_files, self.model_cfg, self.deploy_cfg, device=self.device)
return model.eval()
def init_pytorch_model(self,
model_checkpoint: Optional[str] = None,
cfg_options: Optional[Dict] = None,
**kwargs) -> torch.nn.Module:
"""Initialize torch model.
Args:
model_checkpoint (str): The checkpoint file of torch model,
defaults to `None`.
cfg_options (dict): Optional config key-pair parameters.
Returns:
nn.Module: An initialized torch model generated by other OpenMMLab
codebases.
"""
if self.from_mmrazor:
from mmrazor.apis import init_mmdet_model as init_detector
else:
from mmdet.apis import init_detector
model = init_detector(self.model_cfg, model_checkpoint, self.device,
cfg_options)
return model.eval()
def create_input(self,
imgs: Union[str, np.ndarray, Sequence],
input_shape: Sequence[int] = None) \
-> Tuple[Dict, torch.Tensor]:
"""Create input for detector.
Args:
imgs (str|np.ndarray): Input image(s), accpeted data type are
`str`, `np.ndarray`.
input_shape (list[int]): A list of two integer in (width, height)
format specifying input shape. Defaults to `None`.
Returns:
tuple: (data, img), meta information for the input image and input.
"""
from mmcv.parallel import collate, scatter
from mmdet.datasets.pipelines import Compose
if isinstance(imgs, (str, np.ndarray)):
imgs = [imgs]
dynamic_flag = is_dynamic_shape(self.deploy_cfg)
cfg = process_model_config(self.model_cfg, imgs, input_shape)
# Drop pad_to_square when static shape. Because static shape should
# ensure the shape before input image.
if not dynamic_flag:
transform = cfg.data.test.pipeline[1]
if 'transforms' in transform:
transform_list = transform['transforms']
for i, step in enumerate(transform_list):
if step['type'] == 'Pad' and 'pad_to_square' in step \
and step['pad_to_square']:
transform_list.pop(i)
break
test_pipeline = Compose(cfg.data.test.pipeline)
data_list = []
for img in imgs:
# prepare data
if isinstance(img, np.ndarray):
# directly add img
data = dict(img=img)
else:
# add information into dict
data = dict(img_info=dict(filename=img), img_prefix=None)
# build the data pipeline
data = test_pipeline(data)
data_list.append(data)
data = collate(data_list, samples_per_gpu=len(imgs))
for k, v in data.items():
# batch_size > 1
if isinstance(v[0], DataContainer):
data[k] = v[0].data
if self.device != 'cpu':
data = scatter(data, [self.device])[0]
return data, data['img']
def visualize(self,
model: torch.nn.Module,
image: Union[str, np.ndarray],
result: list,
output_file: str,
window_name: str,
show_result: bool = False,
score_thr: float = 0.3):
"""Visualize predictions of a model.
Args:
model (nn.Module): Input model.
image (str | np.ndarray): Input image to draw predictions on.
result (list): A list of predictions.
output_file (str): Output file to save drawn image.
window_name (str): The name of visualization window. Defaults to
an empty string.
show_result (bool): Whether to show result in windows, defaults
to `False`.
score_thr (float): The score threshold to display the bbox.
Defaults to 0.3.
"""
show_img = mmcv.imread(image) if isinstance(image, str) else image
output_file = None if show_result else output_file
model.show_result(
show_img,
result=result,
win_name=window_name,
show=show_result,
out_file=output_file,
score_thr=score_thr)
@staticmethod
def run_inference(model: torch.nn.Module,
model_inputs: Dict[str, torch.Tensor]) -> list:
"""Run inference once for a object detection model of mmdet.
Args:
model (nn.Module): Input model.
model_inputs (dict): A dict containing model inputs tensor and
meta info.
Returns:
list: The predictions of model inference.
"""
return model(**model_inputs, return_loss=False, rescale=True)
@staticmethod
def get_partition_cfg(partition_type: str) -> Dict:
"""Get a certain partition config for mmdet.
Args:
partition_type (str): A string specifying partition type.
Returns:
dict: A dictionary of partition config.
"""
from .model_partition_cfg import MMDET_PARTITION_CFG
assert (partition_type in MMDET_PARTITION_CFG), \
f'Unknown partition_type {partition_type}'
return MMDET_PARTITION_CFG[partition_type]
@staticmethod
def get_tensor_from_input(input_data: Dict[str, Any]) -> torch.Tensor:
"""Get input tensor from input data.
Args:
input_data (dict): Input data containing meta info and image
tensor.
Returns:
torch.Tensor: An image in `Tensor`.
"""
img_tensor = input_data['img'][0]
if isinstance(img_tensor, DataContainer):
img_tensor = img_tensor.data[0]
return img_tensor
@staticmethod
def evaluate_outputs(model_cfg: mmcv.Config,
outputs: Sequence,
dataset: Dataset,
metrics: Optional[str] = None,
out: Optional[str] = None,
metric_options: Optional[dict] = None,
format_only: bool = False,
log_file: Optional[str] = None):
"""Perform post-processing to predictions of model.
Args:
model_cfg (mmcv.Config): Model config.
outputs (list): A list of predictions of model inference.
dataset (Dataset): Input dataset to run test.
metrics (str): Evaluation metrics, which depends on
the codebase and the dataset, e.g., "bbox", "segm", "proposal"
for COCO, and "mAP", "recall" for PASCAL VOC in mmdet.
out (str): Output result file in pickle format, defaults to `None`.
metric_options (dict): Custom options for evaluation, will be
kwargs for dataset.evaluate() function. Defaults to `None`.
format_only (bool): Format the output results without perform
evaluation. It is useful when you want to format the result
to a specific format and submit it to the test server. Defaults
to `False`.
log_file (str | None): The file to write the evaluation results.
Defaults to `None` and the results will only print on stdout.
"""
from mmcv.utils import get_logger
logger = get_logger('test', log_file=log_file)
if out:
logger.debug(f'writing results to {out}')
mmcv.dump(outputs, out)
kwargs = {} if metric_options is None else metric_options
if format_only:
dataset.format_results(outputs, **kwargs)
if metrics:
eval_kwargs = model_cfg.get('evaluation', {}).copy()
# hard-code way to remove EvalHook args
for key in [
'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best',
'rule', 'dynamic_intervals'
]:
eval_kwargs.pop(key, None)
eval_kwargs.update(dict(metric=metrics, **kwargs))
logger.info(dataset.evaluate(outputs, **eval_kwargs))
def get_preprocess(self) -> Dict:
"""Get the preprocess information for SDK.
Return:
dict: Composed of the preprocess information.
"""
input_shape = get_input_shape(self.deploy_cfg)
model_cfg = process_model_config(self.model_cfg, [''], input_shape)
preprocess = model_cfg.data.test.pipeline
return preprocess
def get_postprocess(self) -> Dict:
"""Get the postprocess information for SDK.
Return:
dict: Composed of the postprocess information.
"""
postprocess = self.model_cfg.model.test_cfg
if 'rpn' in postprocess:
postprocess['min_bbox_size'] = postprocess['rpn']['min_bbox_size']
if 'rcnn' in postprocess:
postprocess['score_thr'] = postprocess['rcnn']['score_thr']
if 'mask_thr_binary' in postprocess['rcnn']:
postprocess['mask_thr_binary'] = postprocess['rcnn'][
'mask_thr_binary']
return postprocess
def get_model_name(self) -> str:
"""Get the model name.
Return:
str: the name of the model.
"""
assert 'type' in self.model_cfg.model, 'model config contains no type'
name = self.model_cfg.model.type.lower()
return name
|
[
"fengbingchun@163.com"
] |
fengbingchun@163.com
|
43ac288294f7307ef6b7397e48bc66fde0727010
|
5093e1a9ea2c8619965e5e071f3b4a70f3b7dfc0
|
/src/infra/orm/entities/user.py
|
42b82a3429fb7ab76100246bd4126bffde1b0db3
|
[] |
no_license
|
Genial-Ideias/poc_multi_tenant_sqlalchemy
|
891df3ec48ee5cc2bfd85d8a24e2be25b5c0f8a1
|
dc8b5c0f95107b2b2674c4509d8ad3123989c14c
|
refs/heads/main
| 2023-06-03T18:05:25.153427
| 2021-06-14T21:31:26
| 2021-06-14T21:31:26
| 376,933,111
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 365
|
py
|
from sqlalchemy import Column, String, Integer, Boolean
from src.config.database import TenantBase
class User(TenantBase):
__tablename__ = 'users'
id = Column(Integer, primary_key=True, index=True)
email = Column(String, unique=True, index=True)
password = Column(String)
name = Column(String)
is_active = Column(Boolean, default=True)
|
[
"leonardofreitasdev@gmail.com"
] |
leonardofreitasdev@gmail.com
|
dc8e0abea47e97f42d6101bb38afad1d85cc9a18
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_182/ch11_2019_03_01_14_42_49_680696.py
|
986f9bb900d027d3760911518e83cb97aa2593fb
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 65
|
py
|
def celsius_para_fahrenheit(x):
y=(((9*x)/5)+32)
return y
|
[
"you@example.com"
] |
you@example.com
|
25c701924f1d7d293fb25a0f20ffcea9adec6f4e
|
1d7c4b6ea4c761dffd432cc289ba4f8216543a30
|
/bsc/auth/views.py
|
21e81b4b1c547060041941f53f1bfab01aa33eda
|
[] |
no_license
|
gitter-badger/bsc
|
7f99a457b08d108f1c405df49e065fedf226301e
|
ccc3e1db1213d33407967be17da81ce2d8544b23
|
refs/heads/master
| 2021-01-21T15:43:00.908560
| 2010-10-08T09:50:54
| 2010-10-08T09:50:54
| 48,218,508
| 0
| 0
| null | 2015-12-18T06:28:18
| 2015-12-18T06:28:16
|
CSS
|
UTF-8
|
Python
| false
| false
| 690
|
py
|
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from django.contrib import messages
from bsc.auth.forms import AuthLoginForm
def auth_login(request):
if (request.POST):
user = authenticate(username=request.POST['username'], password=request.POST['password'])
if user is not None:
if user.is_active:
login(request, user)
# Redirect.
f = AuthLoginForm()
ctx = RequestContext(request, {'form': f.as_table()})
return render_to_response('auth/login_form.html', ctx)
|
[
"rutcreate@gmail.com"
] |
rutcreate@gmail.com
|
ac5ac60276052ded09478de66bf53d6a7da4485b
|
e81e949d4897370cec3c7307f7dbd2054e91087c
|
/main/serializers.py
|
69d89d5c819f95e0288637ca799a4f697a5be8c8
|
[] |
no_license
|
Niteshks247/BookAppointment
|
c72e9520e334b999b60f933e277e152c64f4a806
|
aeafe34ff682e19f9559d88bcd92692cc546e9bb
|
refs/heads/main
| 2023-03-29T12:08:13.636633
| 2021-04-07T19:26:49
| 2021-04-07T19:26:49
| 355,494,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,605
|
py
|
from django.contrib.auth.models import User
from rest_framework import serializers
from main.models import *
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['email']
class patientSerializer(serializers.ModelSerializer):
email = serializers.StringRelatedField()
class Meta:
model = patient
fields = ['email', 'name']
class AppointmentSerializer(serializers.ModelSerializer):
user = patientSerializer()
class Meta:
model = appointment
fields = ['user', 'venue','id']
def to_representation(self, obj):
representation = super().to_representation(obj)
user_representation = representation.pop('user')
for key in user_representation:
representation[key] = user_representation[key]
return representation
def to_internal_value(self, data):
user_internal = {}
for key in patientSerializer.Meta.fields:
if key in data:
user_internal[key] = data.pop(key)
internal = super().to_internal_value(data)
internal['user'] = user_internal
return internal
def update(self, instance, validated_data):
user_data = validated_data.pop('user')
super().update(instance, validated_data)
user = instance.user
for attr, value in user_data.items():
setattr(user, attr, value)
user.save()
return instance
class hospitalSerializer(serializers.ModelSerializer):
class Meta:
model = hospital
fields = ['id', 'name']
|
[
"niteshks247@gmail.com"
] |
niteshks247@gmail.com
|
0f7084ce8ff2a815b81a9be6c27c5c103b3e645e
|
7b9dbd6fadd79a6bf061e1879140ac6d8604c0a6
|
/tests/relaymotortest.py
|
da54e0b4f6fc0abb8994fdfca7b7ed83a11804f1
|
[] |
no_license
|
diegorodriguezv/Dora
|
8743482762a5d182a442e635132994298128ea89
|
209bd7de60d0fdfb7fbc5687db3325ac62fc701e
|
refs/heads/master
| 2020-04-19T09:33:14.924662
| 2017-03-06T22:07:02
| 2017-03-06T22:07:02
| 67,390,700
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,198
|
py
|
import unittest
import time
import logging
from dora.motor.relaymotor import RelayMotor
class TestRelayMotor(unittest.TestCase):
on = 0
off = 0
on_time = 0.0
off_time = 0.0
def setUp(self):
self.motor = RelayMotor(self.count_on, self.count_off, .03)
self.is_first = True
logging.getLogger().setLevel(logging.DEBUG)
def tearDown(self):
self.motor.alive = False
self.motor.control_thread.join()
def count_on(self):
now = time.time()
if self.is_first:
self.start_time = self.last_time = now
self.is_first = False
self.off_time += now - self.last_time
self.last_time = now
elapsed = now - self.start_time
self.on += 1
# print "{:f} off: {:f} ".format(elapsed, self.off_time)
def count_off(self):
now = time.time()
self.on_time += now - self.last_time
self.last_time = now
elapsed = now - self.start_time
self.off += 1
# print "{:f} on: {:f} ".format(elapsed, self.on_time)
def test_init(self):
self.assertEqual(self.motor.throttle, 0)
self.assertEqual(self.on, 0)
self.assertEqual(self.off, 0)
# def test_shape(self):
# precision = 10
# cycles = 3
# throttle = .2
# ones_ok = 0
# zeros_ok = 0
# delta = .2
# self.motor = RelayMotor(self.countOn, self.countOff, 1)
# self.assertEqual(self.on, 0)
# self.assertEqual(self.off, 0)
# self.motor.set_throttle(throttle)
# time.sleep(self.motor.period / precision / 2.0)
# for c in range(cycles):
# for i in range(precision):
# if self.on == c + 1:
# ones_ok += 1
# if float(i) / precision < throttle:
# if self.off == c:
# zeros_ok += 1
# else:
# if self.off == c + 1:
# zeros_ok += 1
# time.sleep(self.motor.period / precision)
# ones_rate = float(ones_ok) / float(cycles * precision)
# ones_error = abs(ones_rate - 1)
# zeros_rate = float(zeros_ok) / float(cycles * precision)
# zeros_error = abs(zeros_rate - 1)
# # print "ones_error: {} zeros_error: {}".format(ones_error, zeros_error)
# self.assertLess(ones_error, delta)
# # self.assertGreater(zeros_error, -delta)
def test_distribution(self, throttle=.0):
now = time.time()
self.start_time = self.last_time = now
elapsed = now - self.start_time
# print "{} start throttle: {} ".format(elapsed, throttle)
cycles = 5
delta = 5 / 100.0
self.assertEqual(self.on, 0)
self.assertEqual(self.off, 0)
self.motor.set_throttle(throttle)
time.sleep((cycles + 1) * self.motor.period)
if throttle == 0:
now = time.time()
elapsed = now - self.start_time
logging.info("{:f} finish throttle: {} on: {} of: {} ont: {:f} oft: {:f} tt: {:f} \
real: {:f} ".format(elapsed, throttle, self.on, self.off, self.on_time, self.off_time, self.on_time + self.off_time, 0))
self.assertTrue(self.on_time == 0)
self.assertTrue(self.off_time == 0)
else:
now = time.time()
if self.off == self.on:
self.off_time += now - self.last_time
else:
self.on_time += now - self.last_time
total_time = self.on_time + self.off_time
self.assertTrue(total_time != 0)
on_rate = self.on_time / total_time
elapsed = now - self.start_time
logging.info("{:f} finish throttle: {} on: {} of: {} ont: {:f} oft: {:f} tt: {} \
real: {} ".format(elapsed, throttle, self.on, self.off, self.on_time, self.off_time, self.on_time + self.off_time,
on_rate))
self.assertGreaterEqual(on_rate, throttle - delta)
self.assertLessEqual(on_rate, throttle + delta)
def test_distribution_005(self):
throttle = 5 / 100.0
self.test_distribution(throttle)
def test_distribution_010(self):
throttle = 10 / 100.0
self.test_distribution(throttle)
def test_distribution_020(self):
throttle = 20 / 100.0
self.test_distribution(throttle)
def test_distribution_025(self):
throttle = 25 / 100.0
self.test_distribution(throttle)
def test_distribution_0333(self):
throttle = 33.3333333 / 100.0
self.test_distribution(throttle)
def test_distribution_050(self):
throttle = 50 / 100.0
self.test_distribution(throttle)
def test_distribution_080(self):
throttle = 80 / 100.0
self.test_distribution(throttle)
def test_distribution_090(self):
throttle = 90 / 100.0
self.test_distribution(throttle)
def test_distribution_095(self):
throttle = 95 / 100.0
self.test_distribution(throttle)
def test_distribution_100(self):
throttle = 100 / 100.0
self.test_distribution(throttle)
|
[
"diegorodriguezv@gmail.com"
] |
diegorodriguezv@gmail.com
|
9117ba1df298fe098fa12d7edc7e6e0021796cb9
|
a64b578a271af2491d3f0ca06ac569dd22246e27
|
/Resource/bxAPI.py
|
2d3f65f6a1f4933f713ba6ebb8f2ccea570ea089
|
[] |
no_license
|
fufu2193/chatb0t1234
|
15ca14d30fb42de1082c16916793a61c09203675
|
9dc57a5a26261b454f7ba799456ce02ae43ae0ba
|
refs/heads/master
| 2022-12-21T22:10:05.184642
| 2019-09-15T11:19:38
| 2019-09-15T11:19:38
| 208,576,798
| 0
| 1
| null | 2022-12-08T14:55:27
| 2019-09-15T10:24:18
|
Python
|
UTF-8
|
Python
| false
| false
| 1,038
|
py
|
import requests #ต้อง pip install ก่อน
import pprint
## ดึงราคาจาก Bx.in.th
def GetBxPrice(Number_to_get = 5):
data = requests.get('https://bx.in.th/api/').json()
# pp = pprint.PrettyPrinter(indent=3)
# pp.pprint(data)
result = []
for key in list(data.keys())[0:Number_to_get]:
prim_name = data[key]['primary_currency']
sec_name = data[key]['secondary_currency']
change = data[key]['change']
last_price = data[key]['last_price']
volume = data[key]['volume_24hours']
price_data = {
'prim_name' : prim_name,
'sec_name' : sec_name,
'change' : change,
'last_price' : last_price,
'volume' : volume ,
}
result.append(price_data)
# print(prim_name , change , ' : ' , sec_name , ' : ', last_price , ' : ', change , ' : ', volume)
return result
# if __name__ == '__main__':
# print(GetBxPrice())
pp = pprint.PrettyPrinter(indent=3)
|
[
"petchicclub@gmail.com"
] |
petchicclub@gmail.com
|
ee423771b495161e5671e232ae8dccc9f743ee72
|
c0f328a2f595440c3f8674bbfef054009348e66a
|
/pset6/credit.py
|
3f16f5848dbb48205762f651838229e9bb2ba9e1
|
[] |
no_license
|
wilcokuyper/cs50
|
3b314e8fbcbd011671d228298b9cedfe05d57333
|
21ef27fcb0d4b939930f5c746c445666b9a99d74
|
refs/heads/master
| 2021-08-30T11:18:19.014682
| 2017-12-17T17:35:51
| 2017-12-17T17:35:51
| 114,556,084
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,752
|
py
|
def sumOfDigits(number):
sum = 0
length = len(number)
val = int(number)
# creditcard is only valid if it contains 13, 15 or 16 digits
if length == 13 or length == 15 or length == 16:
# calculate the sum of the digits by adding the individual numbers to each other, every other number should also be doubled
for i in range(1,length+1):
# find the current digits
currentDigit = int(val % 10);
# nth mod 2 digits should be doubled
if i % 2 == 0:
currentDigit *= 2;
if currentDigit > 9:
sum += int(currentDigit % 10) + 1
else:
sum += currentDigit
else:
sum += currentDigit
val = int(val / 10)
return sum;
def typeOfCard(number):
lastTwoDigits = int(int(number) / (pow(10.0, len(number)-2) ))
if lastTwoDigits == 34 or lastTwoDigits == 37:
return "AMEX"
elif (lastTwoDigits == 51 or
lastTwoDigits == 52 or
lastTwoDigits == 53 or
lastTwoDigits == 54 or
lastTwoDigits == 55):
return "MASTERCARD"
elif int(lastTwoDigits / 10) == 4:
return "VISA"
else:
return "INVALID"
def printCardType(number):
# calculatie the sum of the creditcard digits, if it is positive and divisible by 10 it is valid
sum = sumOfDigits(number)
if sum > 0 and sum % 10 == 0:
print(typeOfCard(number))
else:
print("INVALID")
def main():
try:
number = input("Number: ")
printCardType(number)
exit(0)
except ValueError:
print("INVALID")
exit(1)
if __name__ == "__main__":
main()
|
[
"wilcokuyper@hotmail.com"
] |
wilcokuyper@hotmail.com
|
b20d5a6493549d6d865fea913a3776a70d7f3533
|
5b8c2e4a1382e495c279e9ff0c97a964d5f8940e
|
/component_testing/calcPWM_Motor.py
|
34c6e78567937b9460cbee34bca9908dabf3b151
|
[] |
no_license
|
ImamMuis/TugasAkhir
|
1f8f087b9e118d3cd02d66d343af6b13728866d2
|
585b7ffb14b498798c61caee9e9624ecc7f939ca
|
refs/heads/master
| 2023-08-21T02:02:28.891325
| 2021-10-23T10:13:23
| 2021-10-23T10:13:23
| 398,738,079
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,683
|
py
|
# Perhitungan konversi nilai PWM
V_in = 24 #V (Tegangan Maksimal untuk motor DC)
freq = 50 #Hz (Frekuensi PWM)
PWM_Res = 2 ** 16 - 1 #Lebar bit PWM
def motorCalc(value, selector):
valueError = 0
perioda = 1 / freq * 1000
if selector == "PWM":
PWM_Out = value
if PWM_Out > PWM_Res:
print("Input", value, "PWM terlalu besar!")
print("Input maksimal", PWM_Res, "PWM\n")
valueError = 1
else:
dutyCycle = PWM_Out / PWM_Res
V_Out = dutyCycle * V_in
T_on = dutyCycle * perioda
T_off = perioda - T_on
elif selector == "DUTYCYCLE":
dutyCycle = value / 100
if dutyCycle > 1:
print("Input", str(value) + "% Duty Cycle terlalu besar!")
print("Input maksimal 100% Duty Cycle\n")
valueError = 1
else:
V_Out = dutyCycle * V_in
PWM_Out = dutyCycle * PWM_Res
T_on = dutyCycle * perioda
T_off = perioda - T_on
elif selector == "VOLT":
V_Out = value
if V_Out > V_in:
print("Input", value, "Volt terlalu besar!")
print("Input maksimal", V_in, "Volt\n")
valueError = 1
else:
dutyCycle = V_Out / V_in
PWM_Out = dutyCycle * PWM_Res
T_on = dutyCycle * perioda
T_off = perioda - T_on
else:
print("Parameter 'value' harus PWM, DUTYCYCLE atau VOUT!\n")
valueError = 1
if valueError == 0:
print("Perioda :", round(perioda, 2), "ms")
print("PWM :", round(PWM_Out, 2))
print("Duty Cycle:", round(dutyCycle * 100, 2), "%")
print("Voltage :", round(V_Out, 2), "V")
print("Time On :", round(T_on, 2), "ms")
print("Time Off :", round(T_off, 2), "ms\n")
# Cara pakai:
# motorCalc(26214, "PWM")
# motorCalc(40, "DUTYCYCLE")
# motorCalc(9.6, "VOLT")
|
[
"imuis373@gmail.com"
] |
imuis373@gmail.com
|
ddc567904f52522ee544a175b6283bc8884ff50b
|
34d88082307281333ef4aeeec012a3ff5f8ec06e
|
/Work/removeDuplicatedresult.py
|
b4b1711dfa80276421755ae80c9ac27a03967b34
|
[] |
no_license
|
JKChang2015/Python
|
a6f8b56fa3f9943682470ae57e5ad3266feb47a7
|
adf3173263418aee5d32f96b9ea3bf416c43cc7b
|
refs/heads/master
| 2022-12-12T12:24:48.682712
| 2021-07-30T22:27:41
| 2021-07-30T22:27:41
| 80,747,432
| 1
| 8
| null | 2022-12-08T04:32:06
| 2017-02-02T17:05:19
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,663
|
py
|
# removeDuplicatedresult
# Created by JKChang
# 07/11/2018, 12:07
# Tag:
# Description:
from Work.ontology.ontology_info import entity
def removeDuplicated(res_list):
priority = {'MTBLS': 0, 'NCBITAXON': 1, 'BTO': 2, 'EFO': 3, 'CHEBI': 4, 'CHMO': 5, 'NCIT': 6, 'PO': 7}
res = {}
for enti in res_list:
term_name = enti.name.lower()
onto_name = enti.ontoName
try:
prior = priority.get(onto_name)
except:
prior = 1000
if term_name in res:
try:
old_prior = priority.get(res[term_name].ontoName)
except:
old_prior = 1000
if prior < old_prior:
res[term_name] = enti
else:
res[term_name] = enti
return list(res.values())
e1 = entity(name="mass spectrometry", iri="www.google.com", obo_ID="NICT123", ontoName="NCIT",
provenance_name="Metabolights", provenance_uri="www.ebi.ac.uk", Zooma_confidence="high")
e2 = entity(name="mass spectrometry", iri="www.google.com", obo_ID="NCBITAXON231", ontoName="NCBITAXON",
provenance_name="Metabolights", provenance_uri="www.ebi.ac.uk", Zooma_confidence="high")
e3 = entity(name="Mass spectrometry", iri="www.google.com", obo_ID="BTO231", ontoName="BTO",
provenance_name="Metabolights", provenance_uri="www.ebi.ac.uk", Zooma_confidence="high")
e4 = entity(name="mass Spectrometry", iri="www.google.com", obo_ID="CHEBI22", ontoName="CHEBI",
provenance_name="Metabolights", provenance_uri="www.ebi.ac.uk", Zooma_confidence="high")
res = [e1, e2, e3, e4]
r = removeDuplicated(res)
print(r)
|
[
"jkchang2015@gmail.com"
] |
jkchang2015@gmail.com
|
abb3c656035e3dc6d0f7f69e10bc5862b7a67991
|
f7d7d5f24e5c3f80ba21a36215a653da2a386d87
|
/snaketest.py
|
2b42190d44571e8e86c799cff51bc6eb7bfe8e96
|
[] |
no_license
|
lukeh3nderson/snake_game_python
|
2a1ef5946768eb3b5c9ab0f0f0494e1acf4119ba
|
ceb322229aa77aaea2f04fde4aa6bd0be8583f83
|
refs/heads/main
| 2023-01-29T19:42:55.910440
| 2020-12-11T18:00:11
| 2020-12-11T18:00:11
| 313,690,871
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,967
|
py
|
import turtle
import random
import time
delay = 0.1
#score
score = 0
high_score = 0
#Set up the screen
wn = turtle.Screen()
wn.title("Snake by Luke Henderson")
wn.bgcolor("Blue")
wn.setup(width=700, height=700)
wn.tracer(0)
#Snake head
head = turtle.Turtle()
head.speed(1)
head.shape("square")
head.color("Black")
head.penup()
head.goto(0, 0)
head.direction = "stop"
#Snake food
food = turtle.Turtle()
food.speed(0)
food.color("White")
food.shape("circle")
food.penup()
food.goto(0, 100)
segments = []
# Pen
pen = turtle.Turtle()
pen.speed(0)
pen.shape("square")
pen.color("red")
pen.penup()
pen.hideturtle()
pen.goto(0, 300)
pen.write("Score : 0 High Score : 0", align = "center", font = ("Courier", 24, "normal"))
# Fuctions
def go_up():
if head.direction != "down":
head.direction = "up"
def go_down():
if head.direction != "up":
head.direction = "down"
def go_left():
if head.direction != "right":
head.direction = "left"
def go_right():
if head.direction != "left":
head.direction = "right"
def move():
if head.direction == "up":
y = head.ycor()
head.sety(y + 20)
if head.direction == "down":
y = head.ycor()
head.sety(y - 20)
if head.direction == "left":
x = head.xcor()
head.setx(x - 20)
if head.direction == "right":
x = head.xcor()
head.setx(x + 20)
#Setting up keyboard
wn.listen()
wn.onkeypress(go_up, "w")
wn.onkeypress(go_down, "s")
wn.onkeypress(go_left, "a")
wn.onkeypress(go_right, "d")
# Main loop
while True:
wn.update()
# Check for border collision
if head.xcor()>340 or head.xcor()< -340 or head.ycor()>340 or head.ycor()< -340:
time.sleep(1)
head.goto(0,0)
head.direction = "stop"
# Hide the segments
for segment in segments:
segment.goto(1000,1000)
# Clear the segments list
segments.clear()
#Reset the score
score = 0
#Reset the Delay
delay = 0.1
#reset pen
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
#Check for collison with food
if head.distance(food) < 20:
# Move food to random location
x = random.randint(-300, 300)
y = random.randint(-300, 300)
food.goto(x,y)
#Add segments
new_segment = turtle.Turtle()
new_segment.speed(0)
new_segment.shape("square")
new_segment.color("grey")
new_segment.penup()
segments.append(new_segment)
#Shorten the delay
delay -=0.001
#Increase score
score += 10
if score > high_score:
high_score = score
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
# Move segments to end in reverse order
for index in range(len(segments)-1, 0, -1):
x = segments[index-1].xcor()
y = segments[index-1].ycor()
segments[index].goto(x,y)
# Move segment 0 to where the head is
if len(segments) > 0:
x = head.xcor()
y = head.ycor()
segments[0].goto(x,y)
move()
#Check for head collsion with body
for segment in segments:
if segment.distance(head) < 20:
time.sleep(1)
head.goto(0,0)
head.direction = "stop"
#Hide the segments
for segment in segments:
segment.goto(1000,1000)
#Clear the segments
segments.clear()
#Reset the score
score = 0
#Reset delay
delay = 0.1
#Update score display
pen.clear()
pen.write("Score: {} High Score: {}".format(score, high_score), align="center", font=("Courier", 24, "normal"))
time.sleep(delay)
wn.mainloop()
|
[
"henderln@dukes.jmu.edu"
] |
henderln@dukes.jmu.edu
|
95365343ebd1d22bf9fdeb48fff8e8cbba2971f4
|
2ef867a1f6e823c5e36a0699c11f0f1214d096d8
|
/scripts/train_classifier.py
|
397a279f2c1c98a6e5a8a7645642fe8781bf4841
|
[] |
no_license
|
afcarl/deconvolution_cam
|
55566e618244c7e07e9afb2448a7fe655f67a5da
|
cb41a420d4bdc58bad7acccb25001ebfe966f79f
|
refs/heads/master
| 2020-03-18T16:40:20.805192
| 2017-10-27T16:44:10
| 2017-10-27T16:44:10
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,632
|
py
|
import sys
import math
import time
import gzip
import glob
import pickle
import argparse
import datetime
import torch
import torchvision
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
import torch.backends.cudnn as cudnn
from models import VGGlike_vanilla, VGGlike_upsample
from utils import get_loaders, load_log
parser = argparse.ArgumentParser(description='Kaggle Cdiscounts Training')
parser.add_argument('--gpu', default=1, type=int,
help='which gpu to run')
parser.add_argument('--batch_size', default=128, type=int,
help='size of batches')
parser.add_argument('--epochs', default=500, type=int,
help='number of epochs')
parser.add_argument('--lr', default=0.001, type=float,
help='learning rate')
parser.add_argument('--es_patience', default=3, type=int,
help='early stopping patience')
parser.add_argument('--lr_patience', default=1, type=int,
help='learning rate decay patience')
parser.add_argument('--lr_decay_scale', default=0.1, type=float,
help='how much to scale learning rate on each decay')
parser.add_argument('--load_best', action='store_true',
help='flag to load from checkpoint')
parser.add_argument('--load_last', action='store_true',
help='flag to load from end of training')
parser.add_argument('--model_name', default='vgglike', type=str,
help='name of model for saving/loading weights')
parser.add_argument('--exp_name', default='vanilla', type=str,
help='name of experiment for saving files')
parser.add_argument('--num_workers', default=3, type=int,
help='how many workers to use for data loader')
parser.add_argument('--imsize', default=128, type=int,
help='what size to set images')
args = parser.parse_args()
# set model filenames
MODEL_CKPT = '../models/best_{}_{}_classifier.pth'.format(args.model_name,
args.exp_name)
MODEL_FINL = '../models/last_{}_{}_classifier.pth'.format(args.model_name,
args.exp_name)
# init some training params or load from saved
valid_patience = 0
lr_patience = 0
# load the model
if args.exp_name == 'vanilla':
print('Using vanilla model ...')
net = VGGlike_vanilla()
else:
print('Using upsample model ...')
net = VGGlike_upsample()
# load from previous run,
if args.load_best:
print('Loading from checkpoint ...')
#net.load_state_dict(torch.load(MODEL_CKPT))
net.load_state_dict(torch.load('../models/best_{}_{}_classifier.pth'.format(args.model_name,
args.exp_name)))
# load stats from saved run
STRT_EPOCH, best_val_loss = load_log(model_nm=args.model_name,
exp_nm=args.exp_name, load_best=True)
print('Starting from epoch {}, with best val loss {}'.format(STRT_EPOCH,
best_val_loss))
elif args.load_last:
print('Loading from end of run ...')
net.load_state_dict(torch.load(MODEL_FINL))
# load stats from saved run
STRT_EPOCH, best_val_loss = load_log(args.model_name,
exp_nm=args.exp_name, load_best=False)
print('Starting from epoch {}, with best val loss {}'.format(STRT_EPOCH,
best_val_loss))
else:
STRT_EPOCH, best_val_loss = 0, 10.0
# cuda and GPU settings
if args.gpu == 99:
net = torch.nn.DataParallel(net, device_ids=[0,1]).cuda()
else:
torch.cuda.set_device(args.gpu)
net.cuda()
cudnn.benchmark = True
criterion = nn.CrossEntropyLoss().cuda()
optimizer = optim.Adam(net.parameters(), lr=args.lr)
train_loader, val_loader, len_train = get_loaders(args.batch_size,
args.num_workers,
args.imsize)
# training loop
def train():
net.train()
# keep track of accuracy
total = 0
correct = 0
# keep track of losses
iter_loss = 0.
iter_correct = 0.
num_batch_epoch = 0
for i, data in enumerate(train_loader):
# get the inputs
inputs, labels = data
inputs = Variable(inputs.cuda())
labels = Variable(labels.cuda(async=True))
# zero the parameter gradients
optimizer.zero_grad()
# forward + backward + optimize
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += predicted.eq(labels.data).cpu().sum()
iter_loss += loss.data[0]
num_batch_epoch += 1
#print('Processed batch', num_batch_epoch, labels.data)
sys.stdout.write('\r')
multiplier = int((float(i) / (len_train // args.batch_size)) * 10)
sys.stdout.write('B: {:>3}/{:<3} | {:.3} | {:.3}'.format(i,
len_train // args.batch_size,
iter_loss / num_batch_epoch,
100.*correct/total))
#sys.stdout.write('-' * multiplier)
sys.stdout.flush()
avg_loss = iter_loss / num_batch_epoch
print('\n' + 'Train Loss: {:.3} | Train Acc: {:.3}'.format(avg_loss,
100.*correct/total))
return iter_loss / num_batch_epoch
# validation loop
def validate():
net.eval()
# keep track of accuracy
val_total = 0
val_correct = 0
# keep track of losses
val_loss = 0.
val_batch_num = 0
for j, data in enumerate(val_loader):
val_in, val_lab = data
#val_in = torch.from_numpy(val_in).float()
#val_lab = torch.from_numpy(val_lab).long()
val_in = Variable(val_in.cuda(), volatile=True)
val_lab = Variable(val_lab.cuda(async=True))
val_out = net(val_in)
v_l = criterion(val_out, val_lab)
val_loss += v_l.data[0]
_, val_pred = torch.max(val_out.data, 1)
val_total += val_lab.size(0)
val_correct += val_pred.eq(val_lab.data).cpu().sum()
val_batch_num += 1
avg_vloss = float(val_loss) / val_batch_num
print('Eval Loss: {:.3} | Eval Acc: {:.3}'.format(avg_vloss,
100.*val_correct/val_total))
return val_loss / val_batch_num, 100.*val_correct/val_total
# train the model
try:
print('Training ...')
train_losses = []
valid_losses = []
for e in range(STRT_EPOCH, args.epochs):
print('\n' + 'Epoch {}/{}'.format(e, args.epochs))
start = time.time()
t_l = train()
v_l, v_a = validate()
train_losses.append(t_l)
valid_losses.append(v_l)
# write the losses to a text file
with open('../logs/losses_{}_{}.txt'.format(args.model_name,
args.exp_name), 'a') as logfile:
logfile.write('{},{},{},{}'.format(e, t_l, v_l, v_a) + "\n")
# save the model everytime we get a new best valid loss
if v_l < best_val_loss:
torch.save(net.state_dict(), MODEL_CKPT)
best_val_loss = v_l
valid_patience = 0
lr_patience = 0
# if the validation loss gets worse increment 1 to the patience values
if v_l > best_val_loss:
valid_patience += 1
lr_patience += 1
# if the model doesn't improve by a certain amount of epochs,
# lower learning rate
if lr_patience >= args.lr_patience:
print('Changing learning rate by {}'.format(args.lr_decay_scale))
for params in optimizer.param_groups:
params['lr'] = params['lr'] * args.lr_decay_scale
lr_patience = 0
#LR_DECAY += 5
# start the net from the previous best
#net.load_state_dict(torch.load(MODEL_CKPT))
# if the model stops improving by a certain number epochs, stop
if valid_patience == args.es_patience:
break
print('Time: {}'.format(time.time()-start))
print('Finished Training')
except KeyboardInterrupt:
pass
torch.save(net.state_dict(), MODEL_FINL)
|
[
"f.muellerklein@gmail.com"
] |
f.muellerklein@gmail.com
|
054b5a380f5c808db9a55ee682fd68ce632c77b9
|
05be6f562f1ac2445c835e717649a70bd747b21a
|
/ScrapyForAndroidDashboard/ScrapyForAndroidDashboard/pipelines.py
|
b7405d29892f1479993edf9e088095c48c7d31f9
|
[
"Apache-2.0"
] |
permissive
|
Kyson/ScrapyForAndroidDashboard
|
a5be0e390203d992c52bbcd4c28c4919b0a55985
|
a77a81564980fc0fde64b534d82991136e82e8ff
|
refs/heads/master
| 2021-01-20T05:14:15.560308
| 2017-04-29T06:01:48
| 2017-04-29T06:01:48
| 89,765,230
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,522
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
# ---
# title: hello,hikyson
# tags: [Default]
# category: [Default]
# comments: true
# date: 2014-04-20 22:18:43
# ---
#
# hello,hikyson
#
# <!-- more -->
#
# |Version|Codename|API|Distribution|
# |---|---|---|---|
# |111|222|333|444|
import os
from ScrapyForAndroidDashboard.git_pusher import post_title, local_time_str, post_name, push, post_file_dir
class ScrapyforandroiddashboardPipeline(object):
def process_item(self, item, spider):
# generate md file
divider = "---"
line_feed = "\r\n"
title = post_title
tags = "[android,spider,scrapy]"
category = "[scrapy]"
comments = "true"
date = local_time_str
more = "<!-- more -->"
head = "".join(
[divider, line_feed, "title: ", title, line_feed, "tags: ", tags, line_feed, "category: ", category,
line_feed, "comments: ", comments, line_feed, "date: ", date, line_feed, divider, line_feed])
summary = "This is a post generate by a spider , grab from url: [developer.android.google.cn](developer.android.google.cn)"
updatetime = "Update time: %s" % local_time_str
version_data_dict = json.loads(item["version_data"])
version_chart_url = "https:" + version_data_dict["chart"] + ".png"
# version text
text_version = "".join(
["" % version_chart_url, line_feed, line_feed, "|Codename|API|Distribution|",
line_feed, "|---|---|---|", line_feed])
version_items = version_data_dict["data"]
for version_item in version_items:
api = version_item["api"]
name = version_item["name"]
perc = version_item["perc"]
text_version = text_version + "|" + str(api) + "|" + name + "|" + str(perc) + "|" + line_feed
post = "".join(
[head, line_feed, line_feed, summary, line_feed, updatetime, line_feed, line_feed, more, line_feed,
line_feed, text_version])
for file_name in os.listdir(post_file_dir):
if file_name.find(post_title) >= 0:
os.remove(os.path.join(post_file_dir, file_name))
file_name = os.path.join(post_file_dir, post_name)
with open(file_name, 'wb') as f:
f.write(post)
push()
return item
|
[
"kysonchao@gmail.com"
] |
kysonchao@gmail.com
|
a425f5725507444a3e99b41db0e6814131a13d9e
|
cd2af363104e2f95f1fa25c7be1d24da9d7dfc74
|
/flask-hello-world/env/bin/wheel
|
8a0beb58ee6f2a04ef7f1ca48ae8d95e76ed3ea3
|
[] |
no_license
|
mnuman/real-python-2
|
27ebcd26bec3c292c4356898675888b68b8957ce
|
84ca78ceb9172a1f65bdcfe5fdda98215b8ed2a5
|
refs/heads/master
| 2021-01-10T06:06:47.968160
| 2016-01-09T16:21:10
| 2016-01-09T16:21:10
| 48,548,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 266
|
#!/home/milco/real-python/real-python-2/flask-hello-world/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"milco.numan@gmail.com"
] |
milco.numan@gmail.com
|
|
29dbd5c2e549839b8b31bd5f7b351b3c5fecf4f5
|
ee60cd0d0a69555df79934af79cf6d9cb54a3294
|
/dictionary.py
|
10a041ab4de2d746ccf7300cc866fcb998731711
|
[] |
no_license
|
rlllzk/pydasar
|
36a0e3312e8ee5e193c3f7efa1b50de00eb55031
|
5d257730e17b058aac75a2c067767427c2d78036
|
refs/heads/master
| 2021-08-03T00:15:43.038123
| 2021-07-20T15:11:21
| 2021-07-20T15:11:21
| 184,442,242
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 313
|
py
|
#dictionary
def main():
d = {'satu':10, 'dua':20, 'tiga':30}
#menampilkan nilai dictionary
print("d['satu']: ", d['satu'])
print("d['dua']: ", d['dua'])
print("d['tiga']: ", d['tiga'])
print("d['dua'] * d['tiga']: ", (d['dua'] * d['tiga']))
if __name__=="__main__":
main()
|
[
"rezkysy@gmail.com"
] |
rezkysy@gmail.com
|
14290c30bf5ee52588265a1f4cf3d29f5c862a38
|
7d188086e76d6eb58d3f66a8606b1c18ea228caf
|
/blog/models.py
|
b8a80a22cac911e177ab0924aee7eec61c49a47f
|
[] |
no_license
|
gastonvera/Domus-2.0
|
069db70084cf7fbc0cac86e430ebee8f3d75c396
|
b852df1184a1b54c8262663f39f74609d86fa488
|
refs/heads/master
| 2023-04-23T12:38:17.258479
| 2021-05-19T20:18:46
| 2021-05-19T20:18:46
| 365,814,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,802
|
py
|
from django.contrib.auth.models import User
from django.db import models
TPO_CLIENTE = (
('CORPORATIVO', 'CORPORATIVO'),
('INDIVIDUO', 'INDIVIDUO'),
)
ESTADO_PAGO = (
('PAGADO', 'Pagado'),
('PENDIENTE', 'Pendiente'),
)
MEDIO_PAGO = (
('TRANSFERENCIA', 'Transferencia'),
('EFECTIVO', 'Efectivo'),
)
TPO_PROPIEDAD = (
('VENTA', 'Venta'),
('ALQUILER', 'Alquiler'),
)
FRENTE_CONTRAFRENTE = (
('FRENTE', 'Frente'),
('CONTRAFRENTE', 'Contrafrente'),
)
ORIENTACION = (
('ESTE', 'Este'),
('OESTE', 'Oeste'),
('NORTE', 'Norte'),
('SUR', 'Sur'),
)
class JefeAdministracion(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class JefeComercializacion(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class Cajera(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class EmpleadoMarketing(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class Agente(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class Secretaria(models.Model):
nombre = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.nombre.username
class Propiedades(models.Model):
pid = models.AutoField('PID', primary_key = True, unique = True,
help_text='Identificación de propiedad'
)
foto = models.ImageField('imagen de propiedad', blank = True, null = True)
nombrep = models.CharField('Nombre', max_length = 50, blank = False, null = False)
direccion = models.CharField('direccion', max_length = 100)
superficie = models.CharField('Superficie', max_length=50, blank=False,
help_text='Superficie medida en m2'
)
piso = models.IntegerField('Piso')
dpto = models.CharField('Departamento', max_length=4)
orientacion = models.CharField('orientacion', max_length = 5,
choices = ORIENTACION
)
frente_contra = models.CharField('frente-contrafrente', max_length = 15,
choices = FRENTE_CONTRAFRENTE
)
dormitorios = models.IntegerField('Dormitorios', blank=False)
baños = models.IntegerField('Baños', blank=False)
cocina = models.BooleanField('Cocina')
comedor = models.BooleanField('Comedor')
cochera = models.BooleanField('Cochera')
balcon = models.BooleanField('Balcon')
observaciones = models.TextField('observaciones', max_length = 300,
help_text='Datos adicionales para la propiedad',
blank = True,
null = True
)
expensa = models.DecimalField('Expensa', max_digits = 16, decimal_places = 2,
help_text='Precio aproximado'
)
precio = models.DecimalField('Precio', max_digits = 16, decimal_places = 2,
help_text='Valor neto total de la propiedad/alquiler'
)
tpo_propiedad = models.CharField('Tipo de propiedad', max_length = 15,
choices = TPO_PROPIEDAD
)
disponible = models.BooleanField('Disponible')
class Meta:
verbose_name = "Propiedad"
verbose_name_plural = "Propiedades"
def __str__(self):
nombre = str(self.nombrep)
return nombre
class Citas(models.Model):
identificacion = models.AutoField('ID Cita', primary_key = True, unique = True,
help_text='Identificación de propiedad'
)
nombre_apellido = models.CharField('Nombre cliente', max_length=100, blank=False)
num_cliente = models.IntegerField('Cel./Tel.')
email = models.EmailField('Email', blank = True, null = True)
propiedad = models.ForeignKey(Propiedades, on_delete=models.CASCADE)
fecha_cita = models.DateField('Fecha de cita', blank = True, null=True)
hora_cita = models.TimeField('Hora de cita', blank=True, null=True)
agente = models.ForeignKey(Agente, on_delete = models.CASCADE, blank=True, null=True)
estado_cita = models.BooleanField('Atendido/a', blank=True, null=True)
class Meta:
verbose_name = "Cita"
verbose_name_plural = "Citas"
def __str__(self):
id = str(self.identificacion)
return id
class Venta(models.Model):
id_pay = models.AutoField('ID. Pago', primary_key = True, unique = True)
propiedad = models.ForeignKey(Propiedades, on_delete = models.CASCADE)
medio_pago = models.CharField('Medio de Pago', max_length = 25, choices = MEDIO_PAGO)
nya_cliente = models.CharField('Nombre Cliente', max_length = 100, blank = False, null = False)
dni_cli = models.IntegerField('DNI Cliente')
estado_pago = models.CharField('Estado de Pago', max_length = 20, choices = ESTADO_PAGO)
fecha_creacion = models.DateTimeField('Fecha de creacion', auto_now_add= True, editable=False)
class Meta:
verbose_name = "Venta"
verbose_name_plural = "Ventas"
def __str__(self):
id_pago = str(self.id_pay)
return id_pago
class Cliente(models.Model):
nombre = models.CharField('Nombre', max_length = 50, blank = False, null = False)
apellido = models.CharField('apellido', max_length = 50, blank = False, null = False)
domicilio = models.CharField('domicilio', max_length = 100, blank = False, null = False)
dni = models.IntegerField('DNI Cliente')
tpo_cliente = models.CharField('tipo de cliente', max_length = 20, choices = TPO_CLIENTE)
class Meta:
verbose_name = "Cliente"
verbose_name_plural = "Clientes"
def __str__(self):
return self.nombre
|
[
"vera.gastonn@gmail.com"
] |
vera.gastonn@gmail.com
|
ea2e0d118494909d212b7941c5f48b6e19e3df73
|
41c830ef97809c6b696c02948c99a6c0ef06ac1b
|
/week3/todo/auth_/models.py
|
5d136d8784325d3264d29e4cb06cf5a81bd4403a
|
[] |
no_license
|
sidakinaddd/BFDjango
|
b3523fec9d7ad0064848c9d4817c803e57d186e6
|
331f3e9df1af9927f9d6bc5d693619f256f28d24
|
refs/heads/master
| 2020-12-11T11:19:15.780295
| 2020-04-19T21:54:46
| 2020-04-19T21:54:46
| 233,829,497
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,585
|
py
|
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.models import PermissionsMixin, UserManager, AbstractUser
from django.contrib.auth.validators import UnicodeUsernameValidator
from django.core.mail import send_mail
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
class MyUserManager(UserManager):
def create_editor(self, username, email=None, password=None, **extra_fields):
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
extra_fields.setdefault('is_editor', True)
return self._create_user(username, email, password, **extra_fields)
class MyUser(AbstractUser):
pass
class MyAbstractUser(AbstractBaseUser,PermissionsMixin):
username = models.CharField(max_length=100, unique=True)
first_name = models.CharField(50, blank=True)
last_name = models.CharField(max_length=50, blank=True)
email = models.EmailField(blank=True)
is_staff = models.BooleanField(default=False)
is_active = models.BooleanField(default=False)
data_joined = models.DateTimeField()
objects = UserManager()
EMAIL_FIELD = 'email'
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
class Meta:
abstract = True
verbose_name = ('user')
verbose_name_plural = ('users')
def get_full_name(self):
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name
def get_short_name(self):
return self.first_name
|
[
"sidakinaddd@gmail.com"
] |
sidakinaddd@gmail.com
|
be478a6cbfe4b853f6711255d424dbda052d3cf6
|
a8b143284c5d887a8c808e3449ab734689a86247
|
/10/day10.py
|
aa9f726066c2faa8604bb35d85f3120eea79b4b8
|
[] |
no_license
|
killmaster/adventofcode2016
|
afd2556ae488aef70401d02ea46d5eb344b1d61a
|
9ce4fe39cbf7479685be29433aa437a8168a8e13
|
refs/heads/master
| 2020-06-16T07:26:54.411904
| 2016-12-23T12:12:27
| 2016-12-23T12:12:27
| 75,234,535
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,203
|
py
|
import re
import collections
rule1 = re.compile(r'value (\d+) goes to (\w+ \d+)')
rule2 = re.compile(r'(\w+ \d+) gives low to (\w+ \d+) and high to (\w+ \d+)')
lines = open('input.txt').readlines()
bots = collections.OrderedDict()
for line in lines:
if rule1.match(line):
botid = rule1.match(line).group(2)
value = int(rule1.match(line).group(1))
if botid in bots:
if 'value1' in bots[botid]:
bots[botid]['value2'] = value
else:
bots[botid]['value1'] = value
else:
bots[botid] = {}
bots[botid]['value1'] = value
if rule2.match(line):
botid = rule2.match(line).group(1)
id1 = rule2.match(line).group(2)
id2 = rule2.match(line).group(3)
if botid not in bots:
bots[botid] = {}
bots[botid]['rule'] = []
bots[botid]['rule'].append(id1)
bots[botid]['rule'].append(id2)
if id1.startswith('output') and id1 not in bots:
bots[id1] = []
if id2.startswith('output') and id2 not in bots:
bots[id2] = []
def give(botid, value):
if 'value1' in bots[botid]:
bots[botid]['value2'] = value
else:
bots[botid]['value1'] = value
#print('{} {}'.format(botid,value))
def part1(bots):
flag = True
found = False
while flag:
flag = False
for k,v in bots.items():
#print('{} {}'.format(k,v))
if 'value2' in v:
flag = True
values = sorted([v.pop('value1'), v.pop('value2')])
#print(v['rule'])
if v['rule'][0].startswith('output'):
bots[v['rule'][0]].append(values[0])
else:
give(v['rule'][0], values[0])
if v['rule'][1].startswith('output'):
bots[v['rule'][1]].append(values[1])
else:
give(v['rule'][1], values[1])
if values == [17,61] and not found:
print(k)
found = True
part1(bots)
print(bots['output 0'][0] * bots['output 1'][0] * bots['output 2'][0])
|
[
"carlosmartins8@gmail.com"
] |
carlosmartins8@gmail.com
|
9779357a90fccd1a63511b35172902fb31492148
|
788444066bdd20ccb7d3c743053eb7a5e6dcae09
|
/topic_urls.py
|
a79370e0e059e6243cda81db005c843fd11be712
|
[
"MIT"
] |
permissive
|
mp314/waste.d
|
b7fb077069fd92db8c3e16cf9ab77442e582a112
|
77604e76467cecd061cc0dbf8d8a5fc141f9d5a3
|
refs/heads/main
| 2023-01-09T18:00:59.960433
| 2020-11-04T08:23:07
| 2020-11-04T08:23:07
| 309,759,883
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 187
|
py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'post/$','topic_views.post'),
#(r'(?P<topicid>.*)/$','topic_views.index'),
(r'','topic_views.index'),
)
|
[
"pylkkanen@gmail.com"
] |
pylkkanen@gmail.com
|
eecbf583448148ec9571603ebdab287bf511a52f
|
8a8b0267c4db8847a898ac73ccb6e78e1744e24c
|
/fundamentals/ppf-ex08/ppf-ex09/factorial.py
|
1e76fec5709c74a5e1ba2e52da6db990e882f38d
|
[] |
no_license
|
entirelymagic/Link_Academy
|
41ba890df6793924d186ea94dc8d13b0636c6679
|
844c39ff1281fae8406cd1a0dc06afd357f0bef3
|
refs/heads/master
| 2023-06-07T03:17:00.527924
| 2021-07-03T09:59:25
| 2021-07-03T09:59:25
| 314,755,255
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 167
|
py
|
def factorial(n):
res = 1
for i in range(1,n+1):
res *= i
return res
while True:
n = int(input("Enter number: "))
print(factorial(n))
|
[
"entirelymagic@gmail.com"
] |
entirelymagic@gmail.com
|
c60099d87e9bf3227dd1b3546e417e7e405aee6f
|
6df24c8e7d80704574dfd21886d66b0930851999
|
/MNIST/train_mnist.py
|
fb22e7ba1d12a08306e846e80532bc14dc555425
|
[] |
no_license
|
Seiya-Yamamoto/example_TensorFlow
|
e2e0a31f6432eccb4ab675d7837cd2eaca9438df
|
ad61cd8a82a166e564e597afd48676d0f058b378
|
refs/heads/master
| 2020-03-17T21:10:55.234397
| 2018-05-19T10:56:24
| 2018-05-19T10:56:24
| 133,947,324
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,264
|
py
|
# -*- coding:utf-8 -*-
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
def main():
# mnistデータを格納したオブジェクトを呼び出す
mnist = input_data.read_data_sets('data/', one_hot=True)
# 訓練用の入力データ、正解データをミニバッチ数を指定して取得
train_images, train_labels = mnist.train.next_batch(50)
# テスト用の全画像データを取得
test_images = mnist.test.images
# テスト用の全正解データを取得
test_labels = mnist.test.labels
# 入力データを定義
x = tf.placeholder(tf.float32, [None, 784])
# 入力層から中間層1
w_1 = tf.Variable(tf.truncated_normal([784, 1000], stddev=0.1), name='w1')
b_1 = tf.Variable(tf.zeros([1000]), name='b1')
h_1 = tf.nn.relu(tf.matmul(x, w_1) + b_1)
# 中間層1から中間層2
w_2 = tf.Variable(tf.truncated_normal([1000, 1000], stddev=0.1), name='w2')
b_2 = tf.Variable(tf.zeros([1000]), name='b2')
h_2 = tf.nn.relu(tf.matmul(h_1, w_2) + b_2)
# 中間層2から出力層
w_3 = tf.Variable(tf.truncated_normal([1000, 10], stddev=0.1), name='w3')
b_3 = tf.Variable(tf.zeros([10]), name='b3')
out = tf.nn.softmax(tf.matmul(h_2, w_3) + b_3)
# 誤差関数(平均二乗誤差)
y = tf.placeholder(tf.float32, [None, 10])
loss = tf.reduce_mean(tf.square(y - out))
# 訓練
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(loss)
# 評価
correct = tf.equal(tf.argmax(out, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
# 初期化
init = tf.global_variables_initializer()
with tf.Session() as sess:
# 変数の初期化を実行
sess.run(init)
for i in range(1500):
step = i + 1
train_images, train_labels = mnist.train.next_batch(50)
sess.run(train_step, feed_dict={x: train_images, y: train_labels})
if step % 10 == 0:
acc_val = sess.run(accuracy, feed_dict={
x: test_images, y: test_labels})
print('Step %d: accuracy = %.2f' % (step, acc_val))
if __name__ == '__main__':
main()
|
[
"linux.yamagen@gmail.com"
] |
linux.yamagen@gmail.com
|
61cc88976873bcfe9505f2d7b64690bb756da0ac
|
060b64eda0f17f975e40d5b48f0e0111462f1670
|
/백준저지/파이썬코드/8-2.py
|
6549b1afc65f43f5c6e0356d392fcb78181b9e5b
|
[] |
no_license
|
LEE-JAEHAK/Algorithm
|
1fb4546e0ee134d13b43c53a4de6b23d313e9086
|
8df48259edf9ae96d56f5d9a24f86bac54bdc12b
|
refs/heads/master
| 2023-05-24T10:37:43.218844
| 2021-06-25T10:17:10
| 2021-06-25T10:17:10
| 304,694,972
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 184
|
py
|
memo = [0] * 100
def fib(x):
if x == 1 or x == 2:
return 1
if memo[x] != 0:
return memo[x]
memo[x] = fib(x-1) + fib(x-2)
return memo[x]
print(fib(90))
|
[
"wogkr101@gmail.com"
] |
wogkr101@gmail.com
|
16edaa87d2d0ab903b85c77d613befdb52240f7b
|
3bee45ec4afdff252007beb9680f254854e61cfe
|
/Squeeze/squeeze.py
|
e963425c675797c8a9611abcbf80be32d40663dd
|
[] |
no_license
|
adelevski/finance_dashboards
|
9aef76c82cd561736ddc45438e5cd1e3257f584a
|
ab15462cf545ade37106c5b357ee3973bc6b9b90
|
refs/heads/main
| 2023-08-18T12:27:54.115696
| 2021-10-22T01:02:54
| 2021-10-22T01:02:54
| 337,586,586
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,820
|
py
|
import os
import yfinance as yf
import pandas as pd
import plotly.graph_objects as go
symbols = ['AAPL']
for filename in os.listdir("datasets/daily"):
symbol = filename.split(".")[0]
df = pd.read_csv(f'datasets/daily/{filename}')
if df.empty:
continue
df['20sma'] = df['Close'].rolling(window=20).mean()
df['std'] = df['Close'].rolling(window=20).std()
df['lowerband'] = df['20sma'] - (2 * df['std'])
df['upperband'] = df['20sma'] + (2 * df['std'])
df['TR'] = abs(df['High']-df['Low'])
df['ATR'] = df['TR'].rolling(window=20).mean()
df['upperKC'] = df['20sma'] + (df['ATR'] * 1.5)
df['lowerKC'] = df['20sma'] - (df['ATR'] * 1.5)
def in_squeeze(df):
return df['lowerband']>df['lowerKC'] and df['upperband']<df['upperKC']
df['squeeze_on'] = df.apply(in_squeeze, axis=1)
if df.iloc[-3]['squeeze_on'] and not df.iloc[-1]['squeeze_on']:
print(f"{symbol} is coming out of the squeeze")
# if symbol in symbols:
# aapl_df = df
# candlestick = go.Candlestick(x=aapl_df['Date'], open=aapl_df['Open'], high=aapl_df['High'], low=aapl_df['Low'], close=aapl_df['Close'])
# upperband = go.Scatter(x=aapl_df['Date'], y=aapl_df['upperband'], name='Upper Bollinger Band', line={'color': 'orange'})
# lowerband = go.Scatter(x=aapl_df['Date'], y=aapl_df['lowerband'], name='Lower Bollinger Band', line={'color': 'orange'})
# upperKC = go.Scatter(x=aapl_df['Date'], y=aapl_df['upperKC'], name='Upper Keltner Channel', line={'color': 'blue'})
# lowerKC = go.Scatter(x=aapl_df['Date'], y=aapl_df['lowerKC'], name='Lower Keltner Channel', line={'color': 'blue'})
# fig = go.Figure(data=[candlestick, upperband, lowerband, upperKC, lowerKC])
# fig.layout.xaxis.type = 'category'
# fig.layout.xaxis.rangeslider.visible = False
# fig.show()
|
[
"adelevski@gmail.com"
] |
adelevski@gmail.com
|
b29a4e91beb1a9edf20222c85bb6c9f11c3a5d6e
|
14c26c5ed676f0df4b8e67a0d0ec48155686c7a4
|
/setup.py
|
34215ee5d9675681800c7ea16f126fabd36712b0
|
[
"MIT"
] |
permissive
|
mahi97/RCJRVision
|
b21f85f8ac6a60fda691d9e1a1ed53cabd4abc68
|
0f73d1a1f2ac2eca02bef47699f9c1afdc6c1617
|
refs/heads/master
| 2022-09-03T18:23:31.402983
| 2020-05-07T00:50:33
| 2020-05-07T00:50:33
| 267,239,947
| 0
| 0
|
MIT
| 2020-05-27T06:35:02
| 2020-05-27T06:33:54
| null |
UTF-8
|
Python
| false
| false
| 1,474
|
py
|
"""Setup module."""
from setuptools import setup, find_packages
from os import path
def get_requires():
"""Read requirements.txt."""
requirements = open("requirements.txt", "r").read()
return list(filter(lambda x: x != "", requirements.split()))
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md')) as f:
long_description = f.read()
setup(
# Name of Project
# $ pip install RCJRVision
# where it will live on PyPI: https://pypi.org/project/RCJRVision/
name='RCJRVision',
version='1.5',
description='A fast and simple image processing method to detect H S U victims in rescue maze',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/mhmmdshirazi/RCJRVision',
author='Mohammad Mahdi Shirazi',
author_email='mhmmdshirazi@gmail.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
packages=['RCJRVision'],
python_requires='>=3.5, <4',
install_requires=get_requires(),
)
|
[
"mhmmdshirazi@gmail.com"
] |
mhmmdshirazi@gmail.com
|
a3a167d6317c1fda6c2a2f5360dd5acfe9c8076b
|
6b2d3a5beaed0f1a52a978d6bba6ee19301f8468
|
/src/pickyoptions/core/configuration/child.py
|
7a0473d8259c90720d2d9eccb859c08904eb69b4
|
[] |
no_license
|
nickmflorin/pickyoptions
|
4fb0e01845414d6a9419f94024672428cbaea1af
|
68e07ac9d1d040cdab2f3e0eefb2fe884a17d875
|
refs/heads/master
| 2022-12-18T03:54:25.004279
| 2020-09-20T18:12:52
| 2020-09-20T18:12:52
| 292,388,986
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,911
|
py
|
import logging
from pickyoptions import settings
from pickyoptions.lib.utils import extends_or_instance_of
from pickyoptions.core.base import Base, BaseMixin
from pickyoptions.core.decorators import raise_with_error
from pickyoptions.core.exceptions import (
PickyOptionsError,
ValueTypeError,
)
logger = logging.getLogger(settings.PACKAGE_NAME)
class ChildMixin(BaseMixin):
abstract_properties = ('parent_cls', )
required_errors = (
'set_error',
'not_set_error',
'required_error',
'not_required_error',
'invalid_type_error',
'invalid_error',
'locked_error',
'not_null_error',
)
def _init(self, parent=None, error_map=None):
self._assigned = False
self._parent = None
if parent is not None:
self.assign_parent(parent)
@property
def field(self):
return self._field
def raise_with_self(self, *args, **kwargs):
kwargs['name'] = self.field
return super(ChildMixin, self).raise_with_self(*args, **kwargs)
def assert_set(self, *args, **kwargs):
if not self.set:
self.raise_not_set(*args, **kwargs)
def assert_not_set(self, *args, **kwargs):
if self.set:
self.raise_set(*args, **kwargs)
def assert_required(self, *args, **kwargs):
if not self.required:
self.raise_not_required(*args, **kwargs)
def assert_not_required(self, *args, **kwargs):
if self.required:
self.raise_required(*args, **kwargs)
@raise_with_error(error='set_error')
def raise_set(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is set
when it is not expected to be set.
"""
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='not_set_error')
def raise_not_set(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is not
set when it is expected to be set.
"""
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='locked_error')
def raise_locked(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is
locked and cannot be altered.
"""
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='required_error')
def raise_required(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is
required and does not exist.
"""
return self.raise_with_self(*args, **kwargs)
# TODO: Come up with extensions for the specific object.
# TODO: Is this even being used anymore?
@raise_with_error(error='not_required_error')
def raise_not_required(self, *args, **kwargs):
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='not_null_error')
def raise_null_not_allowed(self, *args, **kwargs):
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='invalid_error')
def raise_invalid(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is invalid.
"""
return self.raise_with_self(*args, **kwargs)
@raise_with_error(error='invalid_type_error')
def raise_invalid_type(self, *args, **kwargs):
"""
Raises an exception to indicate that the `obj:Child` instance is of
invalid type.
"""
assert 'types' in kwargs
return self.raise_invalid(*args, **kwargs)
@property
def assigned(self):
return self._assigned
@property
def parent(self):
"""
Returns the assigned parent of the `obj:Child` instance if it is
assigned. If the parent is not assigned to the `obj:Child` instance,
an exception will be thrown - so it is necessary to check if the
`obj:Child` is assigned before accessing the parent.
TODO:
----
- What should we do if the parent is changed? We don't have a setter
for parent, but the parent can still be removed/re-assigned. Do we
have to trigger some sort of reconfiguration?
"""
if self._parent is None:
raise PickyOptionsError(
"The %s instance has not been assigned a parent yet."
% self.__class__.__name__
)
return self._parent
def validate_parent(self, parent):
if not extends_or_instance_of(parent, self.parent_cls):
# TODO: Come up with a better error.
raise ValueTypeError(
value=parent,
message="The parent must be of type `{types}`.",
types=self.parent_cls,
)
def remove_parent(self):
"""
Removes the parent from the `obj:Child` instance.
"""
if not self.assigned:
raise PickyOptionsError(
"The %s instance does not have an assigned parent."
% self.__class__.__name__
)
self._parent = None
self._assigned = False
def assign_parent(self, parent):
"""
Assigns an `obj:Parent` (or another object) instance as the parent of
the `obj:Child` instance.
Parameters:
----------
parent: `obj:object`
The parent which we want to assign to this `obj:Child` instance.
The parent can be arbitrary - it does need not be an instance of
`obj:Parent` or have this `obj:Child` class defined in it's
`child_cls` property. This is because there are cases where we want
to assign a `obj:Parent` to a `obj:Child` but not assign the
`obj:Child` as a child to the `obj:Parent`. Usually, this is because
the `obj:Parent` is a parent to another set of children.
"""
from .parent import Parent
self.validate_parent(parent)
if self.assigned:
raise PickyOptionsError(
"The %s instance already has an assigned parent."
% self.__class__.__name__
)
self._parent = parent
if isinstance(parent, Parent) and isinstance(self, parent.child_cls):
if not parent.has_child(self):
parent.assign_child(self)
else:
logger.debug(
"Not adding child %s instance as a child of the parent, "
"since it is already a child." % self.__class__.__name__
)
self._assigned = True
class Child(ChildMixin, Base):
__abstract__ = True
def __init__(self, parent=None, **kwargs):
super(Child, self).__init__()
ChildMixin._init(parent=parent, **kwargs)
|
[
"nickmflorin@gmail.com"
] |
nickmflorin@gmail.com
|
bf85b4c2e99a6d2c06480e307f6e2aa43e16059b
|
2d688097c44c37084a494f5b34efce64493c7ef7
|
/update_stk_json.py
|
e92478255bc01d38c4b54e9474afe4d0ff80fedf
|
[] |
no_license
|
lukasturcani/chem_tools
|
98d302670abfae963936ded3caf5ffbd1c2191c1
|
75120e1112b920739248882e6d1060a260cae15c
|
refs/heads/master
| 2021-06-04T02:26:00.323449
| 2019-08-04T00:46:26
| 2019-08-04T00:46:26
| 108,840,929
| 2
| 3
| null | 2020-08-28T12:57:21
| 2017-10-30T11:22:59
|
Python
|
UTF-8
|
Python
| false
| false
| 2,635
|
py
|
import json
import argparse
import multiprocessing as mp
import stk
import rdkit.Chem.AllChem as rdkit
def load_dict(filename):
with open(filename, 'r') as f:
content = json.load(f)
return content
def write_dict(d, filename):
with open(filename, 'w') as f:
json.dump(d, f, indent=4)
def convert_bb_counter(counter):
return [[convert_bb(bb), count] for bb, count in counter]
def convert_bb(bb):
new_bb = {'atom_props': {}}
for key, value in bb.items():
if key == 'func_grp':
new_bb['func_groups'] = [value]
elif key == 'mol_block':
stk.StructUnit(rdkit.MolFromMolBlock(value, removeHs=False))
new_bb['conformers'] = [[0, value]]
else:
new_bb[key] = value
return new_bb
def convert(key, value):
if key == 'mol_block':
return 'conformers', [[0, value]]
elif key == 'topology':
new_value = value.replace('bb_assignments', 'bb_positions')
return 'topology', new_value
elif key == 'bb_counter':
return key, convert_bb_counter(value)
elif key == 'building_blocks':
return key, [convert_bb(bb) for bb in value]
else:
return key, value
def macromol_func_groups(atom_props):
func_groups = {}
for atom, props in atom_props.items():
if 'fg_id' in props:
fg_id = props['fg_id']
if fg_id not in func_groups:
func_groups[fg_id] = stk.FunctionalGroup(
id_=props['fg_id'],
atom_ids=[],
bonder_ids=[],
deleter_ids=[],
info=props['fg']
)
fg = func_groups[fg_id]
atom_id = int(atom)
fg.atom_ids.append(atom_id)
if 'bonder' in props:
fg.bonder_ids.append(atom_id)
return repr(list(func_groups.values()))
def convert_macromol(macromol):
new = {}
for key, value in macromol.items():
new_key, new_value = convert(key, value)
new[new_key] = new_value
new['func_groups'] = macromol_func_groups(macromol['atom_props'])
return new
def main():
parser = argparse.ArgumentParser()
parser.add_argument('old_json')
parser.add_argument('new_json')
args = parser.parse_args()
pop = load_dict(args.old_json)
with mp.Pool() as pool:
new_pop = pool.map(convert_macromol, pop)
write_dict(new_pop, args.new_json)
if __name__ == '__main__':
main()
|
[
"lukasturcani93@gmail.com"
] |
lukasturcani93@gmail.com
|
13a25905f5fd3ae343b1417b4c68ea593bf84b22
|
ccd9f9b59074cc710a7bc94c86efc6ceffd5f56a
|
/poo/article/__init__.py
|
71f5ac48edd1aa941ce4e02af7b8a2dc5dbacba4
|
[] |
no_license
|
krosf-university/oop-python
|
8899d0c4e9e5c780999a59db5a6884f4006eb5b9
|
50cc88d074c67aa2c225d60cadd875d59040c32c
|
refs/heads/master
| 2022-01-15T01:35:29.516182
| 2019-08-14T18:16:31
| 2019-08-14T18:16:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 232
|
py
|
from .article import Article, EBook, Book, USB, PsycalArticle, ArticleException
from .author import Author
__all__ = [
"Article",
"EBook",
"Book",
"USB",
"PsycalArticle",
"Author",
"ArticleException",
]
|
[
"rodrigosanabria22@gmail.com"
] |
rodrigosanabria22@gmail.com
|
04540cf1f86fd312a71a789d3edfb1386bd05bf3
|
85a9ffeccb64f6159adbd164ff98edf4ac315e33
|
/pysnmp/CISCO-EPC-GATEWAY-MIB.py
|
ae6b7a3a41c6bc0948a86237d93017c15bfe3978
|
[
"Apache-2.0"
] |
permissive
|
agustinhenze/mibs.snmplabs.com
|
5d7d5d4da84424c5f5a1ed2752f5043ae00019fb
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
refs/heads/master
| 2020-12-26T12:41:41.132395
| 2019-08-16T15:51:41
| 2019-08-16T15:53:57
| 237,512,469
| 0
| 0
|
Apache-2.0
| 2020-01-31T20:41:36
| 2020-01-31T20:41:35
| null |
UTF-8
|
Python
| false
| false
| 28,904
|
py
|
#
# PySNMP MIB module CISCO-EPC-GATEWAY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-EPC-GATEWAY-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 17:40:03 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint")
ciscoMgmt, = mibBuilder.importSymbols("CISCO-SMI", "ciscoMgmt")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
IpAddress, Counter64, ModuleIdentity, Bits, Unsigned32, iso, TimeTicks, Integer32, NotificationType, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, ObjectIdentity, Gauge32, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "IpAddress", "Counter64", "ModuleIdentity", "Bits", "Unsigned32", "iso", "TimeTicks", "Integer32", "NotificationType", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "ObjectIdentity", "Gauge32", "Counter32")
TruthValue, TextualConvention, TimeStamp, DisplayString, TimeInterval = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "TimeStamp", "DisplayString", "TimeInterval")
ciscoEpcGatewayMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 9, 731))
ciscoEpcGatewayMIB.setRevisions(('2012-02-08 00:00', '2011-05-10 00:00', '2011-03-04 00:00', '2010-06-28 00:00', '2010-05-06 00:00', '2010-04-21 00:00',))
if mibBuilder.loadTexts: ciscoEpcGatewayMIB.setLastUpdated('201202080000Z')
if mibBuilder.loadTexts: ciscoEpcGatewayMIB.setOrganization('Cisco Systems, Inc.')
ciscoEpcGatewayMIBNotifications = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 0))
ciscoEpcGatewayMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1))
ciscoEpcGatewayStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1))
cegOverloadProtectionStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 1))
cegBufferStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2))
ciscoEpcGatewayConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2))
cegOverloadProtectionConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 1))
cegBufferingAgentConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 2))
ciscoEpcGatewayStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3))
ciscoEpcGatewayNotifMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 4))
cegCongestionIncomingReqDrops = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionIncomingReqDrops.setStatus('current')
cegCongestionLowThresholdReached = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionLowThresholdReached.setStatus('current')
cegCongestionHighThresholdReached = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionHighThresholdReached.setStatus('current')
cegBuffersCreated = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 1), Counter32()).setUnits('buffer').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBuffersCreated.setStatus('current')
cegBuffersDeleted = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 2), Counter32()).setUnits('buffer').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBuffersDeleted.setStatus('current')
cegBuffersTimedOut = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBuffersTimedOut.setStatus('current')
cegBufferPacketsEnqueued = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 4), Counter32()).setUnits('packet').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferPacketsEnqueued.setStatus('current')
cegBufferPacketsDequeued = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 5), Counter32()).setUnits('packet').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferPacketsDequeued.setStatus('current')
cegBufferBytesEnqueued = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 6), Counter32()).setUnits('Bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferBytesEnqueued.setStatus('current')
cegBufferBytesDequeued = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 7), Counter32()).setUnits('Bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferBytesDequeued.setStatus('current')
cegBufferRejMemUnavailable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferRejMemUnavailable.setStatus('current')
cegBufferRejLowMem = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegBufferRejLowMem.setStatus('current')
cegPacketDropDueToMaxPacketLimit = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegPacketDropDueToMaxPacketLimit.setStatus('current')
cegPacketDropDueToMaxBufferLimit = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 1, 2, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegPacketDropDueToMaxBufferLimit.setStatus('current')
cegCongestionLowThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 100)).clone(95)).setUnits('percent').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegCongestionLowThreshold.setStatus('current')
cegCongestionHighThreshold = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 100)).clone(100)).setUnits('percent').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegCongestionHighThreshold.setStatus('current')
cegBufferingAgentEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 2, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegBufferingAgentEnabled.setStatus('current')
cegBufferMaxSize = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 2, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(400, 12000)).clone(1024)).setUnits('Bytes').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegBufferMaxSize.setStatus('current')
cegBufferDiscardDataTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 2, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 300)).clone(30)).setUnits('second').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegBufferDiscardDataTime.setStatus('current')
cegBufferMaxPacketsPerBuffer = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 2, 2, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 15)).clone(5)).setUnits('packet').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegBufferMaxPacketsPerBuffer.setStatus('current')
cegVersion = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegVersion.setStatus('current')
cegActivatedIpv4Bearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 2), Gauge32()).setUnits('bearer').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv4Bearers.setStatus('current')
cegActivatedIpv6Bearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 3), Gauge32()).setUnits('bearer').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv6Bearers.setStatus('current')
cegTotalUsers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalUsers.setStatus('current')
cegTotalIdleUsers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 5), Gauge32()).setUnits('Users').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalIdleUsers.setStatus('current')
cegTotalSuspendedUsers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalSuspendedUsers.setStatus('current')
cegActivatedIpv4v6Sessions = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 7), Gauge32()).setUnits('sessions').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv4v6Sessions.setStatus('current')
cegActivatedIpv4v6Bearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 10), Gauge32()).setUnits('bearers').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv4v6Bearers.setStatus('current')
cegActivatedGtpv2SgwSessions = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 11), Gauge32()).setUnits('sessions').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedGtpv2SgwSessions.setStatus('current')
cegActivatedGtpv2PgwSessions = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 12), Gauge32()).setUnits('sessions').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedGtpv2PgwSessions.setStatus('current')
cegActivatedGtpv2SPgwSessions = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 13), Gauge32()).setUnits('sessions').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedGtpv2SPgwSessions.setStatus('current')
cegOverloadProtectionStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8))
cegCongestionDfpWeight = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionDfpWeight.setStatus('current')
cegCongestionStatus = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("normal", 1), ("low", 2), ("high", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionStatus.setStatus('current')
cegCongestionLowLastOccurTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionLowLastOccurTime.setStatus('current')
cegCongestionLowLastDuration = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 4), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionLowLastDuration.setStatus('current')
cegCongestionHighLastOccurTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 5), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionHighLastOccurTime.setStatus('current')
cegCongestionHighLastDuration = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 8, 6), TimeInterval()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cegCongestionHighLastDuration.setStatus('current')
cegBufferStatus = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 9))
cegActivatedBearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 14), Gauge32()).setUnits('Bearers').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedBearers.setStatus('current')
cegActivatedDedicatedBearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 15), Gauge32()).setUnits('Bearers').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedDedicatedBearers.setStatus('current')
cegActivatedIpv4DedicatedBearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 16), Gauge32()).setUnits('Bearers').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv4DedicatedBearers.setStatus('current')
cegActivatedIpv6DedicatedBearers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 17), Gauge32()).setUnits('Bearers').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegActivatedIpv6DedicatedBearers.setStatus('current')
cegTotalIdleSessions = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 18), Gauge32()).setUnits('Sessions').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalIdleSessions.setStatus('current')
cegTotalInUseBuffers = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 9, 1), Gauge32()).setUnits('buffer').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalInUseBuffers.setStatus('current')
cegTotalBufferedPackets = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 9, 2), Gauge32()).setUnits('packet').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalBufferedPackets.setStatus('current')
cegTotalBufferedBytes = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 9, 3), Gauge32()).setUnits('Bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalBufferedBytes.setStatus('current')
cegTotalBufferAvailable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 3, 9, 4), Gauge32()).setUnits('Bytes').setMaxAccess("readonly")
if mibBuilder.loadTexts: cegTotalBufferAvailable.setStatus('current')
cegCongestionHighNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 4, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegCongestionHighNotifEnable.setStatus('current')
cegCongestionLowNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 4, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegCongestionLowNotifEnable.setStatus('current')
cegCongestionClearNotifEnable = MibScalar((1, 3, 6, 1, 4, 1, 9, 9, 731, 1, 4, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cegCongestionClearNotifEnable.setStatus('current')
cegCongestionHighThresholdNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 731, 0, 1)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegVersion"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionDfpWeight"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionStatus"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighThreshold"))
if mibBuilder.loadTexts: cegCongestionHighThresholdNotif.setStatus('current')
cegCongestionLowThresholdNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 731, 0, 2)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegVersion"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionDfpWeight"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionStatus"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowThreshold"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighThreshold"))
if mibBuilder.loadTexts: cegCongestionLowThresholdNotif.setStatus('current')
cegCongestionClearedNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 9, 731, 0, 3)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegVersion"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionDfpWeight"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionStatus"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowThreshold"))
if mibBuilder.loadTexts: cegCongestionClearedNotif.setStatus('current')
ciscoEpcGatewayMIBConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 3))
ciscoEpcGatewayMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 1))
ciscoEpcGatewayMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2))
ciscoEpcGatewayMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 1, 1)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifMgmtGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatusGrp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEpcGatewayMIBCompliance = ciscoEpcGatewayMIBCompliance.setStatus('deprecated')
ciscoEpcGatewayMIBComplianceRev1 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 1, 2)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifMgmtGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup1"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatusGrp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEpcGatewayMIBComplianceRev1 = ciscoEpcGatewayMIBComplianceRev1.setStatus('deprecated')
ciscoEPCGatewayMIBComplianceRev2 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 1, 3)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifMgmtGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup1"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup2"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatusGrp"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEPCGatewayMIBComplianceRev2 = ciscoEPCGatewayMIBComplianceRev2.setStatus('deprecated')
ciscoEpcGatewayMIBComplianceRev3 = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 1, 4)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifMgmtGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegOverloadProtectionNotifGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup1"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup2"), ("CISCO-EPC-GATEWAY-MIB", "cegSystemStatusGrpSup3"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatsGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentConfigGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatusGrp"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentStatsGrpSup1"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoEpcGatewayMIBComplianceRev3 = ciscoEpcGatewayMIBComplianceRev3.setStatus('current')
cegSystemStatusGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 1)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegVersion"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv4Bearers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv6Bearers"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalUsers"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalSuspendedUsers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv4v6Sessions"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalIdleUsers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegSystemStatusGrp = cegSystemStatusGrp.setStatus('current')
cegOverloadProtectionStatsGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 2)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegCongestionIncomingReqDrops"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighThresholdReached"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowThresholdReached"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegOverloadProtectionStatsGrp = cegOverloadProtectionStatsGrp.setStatus('current')
cegBufferingAgentStatsGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 3)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegBuffersCreated"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferPacketsEnqueued"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferBytesEnqueued"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferPacketsDequeued"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferBytesDequeued"), ("CISCO-EPC-GATEWAY-MIB", "cegBuffersDeleted"), ("CISCO-EPC-GATEWAY-MIB", "cegBuffersTimedOut"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferRejMemUnavailable"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferRejLowMem"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegBufferingAgentStatsGrp = cegBufferingAgentStatsGrp.setStatus('current')
cegOverloadProtectionConfigGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 4)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowThreshold"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighThreshold"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegOverloadProtectionConfigGrp = cegOverloadProtectionConfigGrp.setStatus('current')
cegBufferingAgentConfigGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 5)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegBufferingAgentEnabled"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferMaxSize"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferDiscardDataTime"), ("CISCO-EPC-GATEWAY-MIB", "cegBufferMaxPacketsPerBuffer"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegBufferingAgentConfigGrp = cegBufferingAgentConfigGrp.setStatus('current')
cegBufferingAgentStatusGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 6)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegTotalInUseBuffers"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalBufferedPackets"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalBufferedBytes"), ("CISCO-EPC-GATEWAY-MIB", "cegTotalBufferAvailable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegBufferingAgentStatusGrp = cegBufferingAgentStatusGrp.setStatus('current')
cegOverloadProtectionStatusGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 7)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegCongestionDfpWeight"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionStatus"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowLastOccurTime"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowLastDuration"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighLastOccurTime"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighLastDuration"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegOverloadProtectionStatusGrp = cegOverloadProtectionStatusGrp.setStatus('current')
cegOverloadProtectionNotifMgmtGrp = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 8)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighNotifEnable"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowNotifEnable"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionClearNotifEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegOverloadProtectionNotifMgmtGrp = cegOverloadProtectionNotifMgmtGrp.setStatus('current')
cegOverloadProtectionNotifGrp = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 9)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegCongestionHighThresholdNotif"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionLowThresholdNotif"), ("CISCO-EPC-GATEWAY-MIB", "cegCongestionClearedNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegOverloadProtectionNotifGrp = cegOverloadProtectionNotifGrp.setStatus('current')
cegSystemStatusGrpSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 10)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv4v6Bearers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedGtpv2SgwSessions"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedGtpv2PgwSessions"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedGtpv2SPgwSessions"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegSystemStatusGrpSup1 = cegSystemStatusGrpSup1.setStatus('current')
cegSystemStatusGrpSup2 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 11)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegActivatedBearers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedDedicatedBearers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv4DedicatedBearers"), ("CISCO-EPC-GATEWAY-MIB", "cegActivatedIpv6DedicatedBearers"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegSystemStatusGrpSup2 = cegSystemStatusGrpSup2.setStatus('current')
cegSystemStatusGrpSup3 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 12)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegTotalIdleSessions"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegSystemStatusGrpSup3 = cegSystemStatusGrpSup3.setStatus('current')
cegBufferingAgentStatsGrpSup1 = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 9, 731, 3, 2, 13)).setObjects(("CISCO-EPC-GATEWAY-MIB", "cegPacketDropDueToMaxPacketLimit"), ("CISCO-EPC-GATEWAY-MIB", "cegPacketDropDueToMaxBufferLimit"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cegBufferingAgentStatsGrpSup1 = cegBufferingAgentStatsGrpSup1.setStatus('current')
mibBuilder.exportSymbols("CISCO-EPC-GATEWAY-MIB", cegCongestionClearedNotif=cegCongestionClearedNotif, cegTotalBufferedPackets=cegTotalBufferedPackets, cegBufferBytesEnqueued=cegBufferBytesEnqueued, cegBufferingAgentConfigGrp=cegBufferingAgentConfigGrp, cegSystemStatusGrpSup1=cegSystemStatusGrpSup1, cegOverloadProtectionStats=cegOverloadProtectionStats, cegSystemStatusGrp=cegSystemStatusGrp, cegCongestionLowThresholdReached=cegCongestionLowThresholdReached, cegSystemStatusGrpSup3=cegSystemStatusGrpSup3, ciscoEpcGatewayNotifMgmt=ciscoEpcGatewayNotifMgmt, cegTotalIdleUsers=cegTotalIdleUsers, cegBufferStatus=cegBufferStatus, cegTotalBufferedBytes=cegTotalBufferedBytes, ciscoEpcGatewayMIBNotifications=ciscoEpcGatewayMIBNotifications, cegCongestionLowThreshold=cegCongestionLowThreshold, cegOverloadProtectionConfig=cegOverloadProtectionConfig, PYSNMP_MODULE_ID=ciscoEpcGatewayMIB, cegActivatedIpv4v6Bearers=cegActivatedIpv4v6Bearers, cegBufferStats=cegBufferStats, cegBufferingAgentStatsGrpSup1=cegBufferingAgentStatsGrpSup1, cegBufferRejMemUnavailable=cegBufferRejMemUnavailable, ciscoEPCGatewayMIBComplianceRev2=ciscoEPCGatewayMIBComplianceRev2, cegTotalIdleSessions=cegTotalIdleSessions, cegTotalInUseBuffers=cegTotalInUseBuffers, cegPacketDropDueToMaxBufferLimit=cegPacketDropDueToMaxBufferLimit, cegActivatedIpv4v6Sessions=cegActivatedIpv4v6Sessions, cegCongestionClearNotifEnable=cegCongestionClearNotifEnable, cegCongestionStatus=cegCongestionStatus, cegCongestionHighLastOccurTime=cegCongestionHighLastOccurTime, ciscoEpcGatewayMIBGroups=ciscoEpcGatewayMIBGroups, cegBuffersTimedOut=cegBuffersTimedOut, cegBufferPacketsDequeued=cegBufferPacketsDequeued, cegBufferMaxSize=cegBufferMaxSize, cegVersion=cegVersion, cegCongestionLowLastDuration=cegCongestionLowLastDuration, cegCongestionHighNotifEnable=cegCongestionHighNotifEnable, cegBufferBytesDequeued=cegBufferBytesDequeued, cegBuffersDeleted=cegBuffersDeleted, cegActivatedIpv4DedicatedBearers=cegActivatedIpv4DedicatedBearers, cegCongestionHighThreshold=cegCongestionHighThreshold, ciscoEpcGatewayMIBObjects=ciscoEpcGatewayMIBObjects, cegOverloadProtectionNotifMgmtGrp=cegOverloadProtectionNotifMgmtGrp, ciscoEpcGatewayMIBComplianceRev1=ciscoEpcGatewayMIBComplianceRev1, ciscoEpcGatewayMIBCompliances=ciscoEpcGatewayMIBCompliances, cegOverloadProtectionConfigGrp=cegOverloadProtectionConfigGrp, cegOverloadProtectionStatusGrp=cegOverloadProtectionStatusGrp, cegActivatedDedicatedBearers=cegActivatedDedicatedBearers, cegBufferPacketsEnqueued=cegBufferPacketsEnqueued, cegActivatedGtpv2PgwSessions=cegActivatedGtpv2PgwSessions, cegTotalUsers=cegTotalUsers, cegBufferingAgentStatsGrp=cegBufferingAgentStatsGrp, cegActivatedIpv6Bearers=cegActivatedIpv6Bearers, ciscoEpcGatewayStatus=ciscoEpcGatewayStatus, cegActivatedIpv4Bearers=cegActivatedIpv4Bearers, cegCongestionHighThresholdNotif=cegCongestionHighThresholdNotif, cegBufferingAgentConfig=cegBufferingAgentConfig, ciscoEpcGatewayMIB=ciscoEpcGatewayMIB, cegActivatedBearers=cegActivatedBearers, cegTotalSuspendedUsers=cegTotalSuspendedUsers, cegBufferingAgentEnabled=cegBufferingAgentEnabled, cegPacketDropDueToMaxPacketLimit=cegPacketDropDueToMaxPacketLimit, cegBufferMaxPacketsPerBuffer=cegBufferMaxPacketsPerBuffer, cegOverloadProtectionStatsGrp=cegOverloadProtectionStatsGrp, ciscoEpcGatewayMIBConformance=ciscoEpcGatewayMIBConformance, ciscoEpcGatewayMIBCompliance=ciscoEpcGatewayMIBCompliance, cegBufferRejLowMem=cegBufferRejLowMem, cegBufferDiscardDataTime=cegBufferDiscardDataTime, ciscoEpcGatewayMIBComplianceRev3=ciscoEpcGatewayMIBComplianceRev3, ciscoEpcGatewayStatistics=ciscoEpcGatewayStatistics, cegBuffersCreated=cegBuffersCreated, cegActivatedGtpv2SgwSessions=cegActivatedGtpv2SgwSessions, cegTotalBufferAvailable=cegTotalBufferAvailable, cegOverloadProtectionStatus=cegOverloadProtectionStatus, cegCongestionHighLastDuration=cegCongestionHighLastDuration, cegCongestionLowLastOccurTime=cegCongestionLowLastOccurTime, cegCongestionDfpWeight=cegCongestionDfpWeight, cegCongestionLowNotifEnable=cegCongestionLowNotifEnable, cegBufferingAgentStatusGrp=cegBufferingAgentStatusGrp, cegActivatedIpv6DedicatedBearers=cegActivatedIpv6DedicatedBearers, ciscoEpcGatewayConfig=ciscoEpcGatewayConfig, cegCongestionIncomingReqDrops=cegCongestionIncomingReqDrops, cegCongestionLowThresholdNotif=cegCongestionLowThresholdNotif, cegOverloadProtectionNotifGrp=cegOverloadProtectionNotifGrp, cegActivatedGtpv2SPgwSessions=cegActivatedGtpv2SPgwSessions, cegCongestionHighThresholdReached=cegCongestionHighThresholdReached, cegSystemStatusGrpSup2=cegSystemStatusGrpSup2)
|
[
"dcwangmit01@gmail.com"
] |
dcwangmit01@gmail.com
|
6c78a14b6f14f0a105896c9e9244dccdaf40733c
|
9d011587623b1ca590c8c85db6b46b87a6d6e97d
|
/simulation/VFA_v2-2.py
|
7081a54997de09cdb521f58ef793e4b396b8db4b
|
[] |
no_license
|
haugerud/epsSensorNetwork
|
2b26984ca754b0a195711e3fcd800ef48c0a6215
|
e4bc81fe409366e073ab79275beac7e8e5010225
|
refs/heads/master
| 2020-04-10T04:43:47.035480
| 2018-12-07T10:55:13
| 2018-12-07T10:55:13
| 160,807,357
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,878
|
py
|
#Rajouter une situation où certains nodes ne peuvent plus bouger (drône en panne)
#-> définir une nouvelle liste où l'on met les nodes statiques
#-> ou un paramètre booléen dans la class node pour indiquer si le noeud peut bouger ou non.
#Source for mathematique: https://fr.wikibooks.org/wiki/Math%C3%A9matiques_avec_Python_et_Ruby/Points_en_Python
from math import *
#Source for drawing: https://matplotlib.org/
#https://stackoverflow.com/questions/21519203/plotting-a-list-of-x-y-coordinates-in-python-matplotlib
import numpy.random as np
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
#obstacle class, define a rectangular area or polygon (python polygon intersection with line)
#http://geoexamples.blogspot.com/2014/08/shortest-distance-to-geometry-in.html
#Install libaries : $ sudo pip3 install Shapely
# $ sudo pip3 install descartes
from shapely.geometry import Polygon, Point, LinearRing, LineString
from descartes import PolygonPatch
#library for JSON object
import json
class Node:#Definition of a class Node
"""This class defines a node described by:
- its coordinate on X axis
- its coordinate on Y axis
- its node ID"""
def __init__(self, x, y, id):
self.coord = Point(x,y) # class Point from shapely.geometry
self.id=id
self.s=0 # variable to know for how long the node is stable
self.mobil=True # variable to know if the node is mobile or not (in case of broken node)
def display(self):#Display the coordinates between () separed by ; after converted them in string
return 'Node '+str(self.id)+'='+str(self.coord.wkt)
def middle(self, p):
return Node((self.coord.x+p.coord.x)/2,(self.coord.y+p.coord.y)/2)
def vector(self, p):
return Vector(p.coord.x-self.coord.x , p.coord.y-self.coord.y)
def distance(self, p):
return self.vector(p).norm()
def translation(self, p):
return Node(self.coord.x+p.x, self.coord.y+p.y, self.id)
class Vector:#Definition of a class Vector
"""This class defines a vector described by:
- its coordinate on X axis
- its coordinate on Y axis"""
def __init__(self, x, y):
self.x=x
self.y=y
def display(self):
return '('+str(self.x)+';'+str(self.y)+')'
def norm(self):
return hypot(self.x, self.y)
""".hypot(x, y) returns the Euclidean norm, sqrt(x*x + y*y).
This is the length of the vector from the origin to point (x, y)."""
def __add__(self, v):#Method to add 2 vectors
return Vector(self.x+v.x, self.y+v.y)
def VF_sensors(i, j):#Function to calculte the VF exert on a node by a neighboor node
"""This function takes 2 inputs:
- i: the node on which the force is exerted
- j: the neighboor node which exerted the force
It returns a vector Fij_temp"""
Fij_temp = Vector(0,0)#temporary Vector initialized to zero vector
# d_ij = i.distance(j)
d_ij = i.coord.distance(j.coord)
if Cr >= d_ij and d_ij>d_th:#In this case, Si and Sj are too far and an attractive force is exerted by Sj on Si
#print("Node {} is too far from node {}, Cr({}) >= d_ij({}) and d_ij > d_th ({}): Attractive force".format(i.id, j.id, Cr, d_ij, d_th))
Fij_temp.x = (Ka * (d_ij - d_th)) * ((j.coord.x - i.coord.x) / d_ij)
Fij_temp.y = (Ka * (d_ij - d_th)) * ((j.coord.y - i.coord.y) / d_ij)
elif d_ij < d_th:#In this case, Si and Sj are too close and a repulsive force is exerted by Sj on Si
#print("Node {} is too close from node {}, d_ij({}) < d_th ({}): Repulsive force".format(i.id, j.id, d_ij, d_th))
Fij_temp.x = (Kr * (d_th - d_ij)) * ((i.coord.x - j.coord.x) / d_ij);
Fij_temp.y = (Kr * (d_th - d_ij)) * ((i.coord.y - j.coord.y) / d_ij);
#If none of the previous conditions are met, the force vector is still null because no force is exerted on node i.
return Fij_temp
def VF_obstacles(i ,j):
"""This function takes 2 inputs:
- i: the node on which the force is exerted
- j: the obstacle (a polygon) which exerted the force
It returns a vector Fij_temp"""
Fiobs_temp = Vector(0,0)
d_iobs = i.coord.distance(j) # Distance between point Si (i.coord) and Obsj (a polygon)
if d_iobs < d_thobs and d_iobs>0:#In this case, Si is too close from the obstable and a repulsive force is exerted by the obstable.
# print("Obstacle detected, d_iobs<d_thobs")
pol_ext = LinearRing(j.exterior.coords)
d = pol_ext.project(i.coord)
closest_point = pol_ext.interpolate(d)
Fiobs_temp.x = (Kr_obs * (d_thobs - d_iobs)) * ((i.coord.x - closest_point.x) / d_iobs);
Fiobs_temp.y = (Kr_obs * (d_thobs - d_iobs)) * ((i.coord.y - closest_point.y) / d_iobs);
#else the obstacle is too far, so no force is exerted on node i and Fiobs_temps = vector zero
return Fiobs_temp
def Node_translation(i, F, list_o):
"""This function takes 3 inputs:
- i: the node to move
- F: the force that moves the node
- list_o: the obstacle list
It returns a new node that is the result of the input node translation"""
temp = i.translation(F)
dist_init = i.coord.distance(temp.coord)
g = F.x * 1000
h = F.y * 1000
F_temp=Vector(g,h)
projection = i.translation(F_temp)
line = LineString([(i.coord.x, i.coord.y),(projection.coord.x, projection.coord.y)])
dist_min = None
closest_obs = None
for elt in list_o:
difference = line.difference(elt)
if difference.geom_type == 'MultiLineString': # In this case we meet an obstacle on our way
dist = list(difference.geoms)[0].length
if dist_min is None or dist_min > dist:
dist_min = dist
closest_obs = elt
if dist_min != None and dist_min < dist_init: # If dist_min is different from None, that means we meet and osbtacle and we need to reduce the translation.
#print("CHANGEMENT CAR distance initale {} > dist_min {}".format(dist_init, dist_min))
ratio = (dist_min * 0.9)/dist_init
F.x = F.x * ratio
F.y = F.y * ratio
temp = i.translation(F)
#We must also verify that the node doesn't move out of our field of interest.
inField=True
x=temp.coord.x
y=temp.coord.y
if temp.coord.x < (-xfield)/2:
x = (-xfield)/2
inField = False
elif temp.coord.x > xfield/2:
x = xfield/2
inField = False
if temp.coord.y < (-yfield)/2:
y = (-yfield)/2
inField = False
elif temp.coord.y > yfield/2:
y = yfield/2
inField = False
if inField == False:
temp=Node(x, y, i.id)
return temp
# As range() doesn't allow to use decimal, I defined my own function to use decimal
def frange(start, stop, step):
i = start
while i < stop:
yield i
i += step
def Estimate_coverage(list_n):
"""This function takes only 1 input:
- list_n: the list of nodes
It returns the % of covered area"""
covered=0 # number of covered point
total=0 # total number of point
for x in frange(-xfield/2, xfield/2, 0.5):
for y in frange(-yfield/2, yfield/2, 0.5):
test=False
for node in list_n:
# If the distance between the current point and a node is less than the sensing range the point is covered
if node.coord.distance(Point(x,y)) < Sr:
test=True # Change the value of test
break # Now leave the loop, it's useless to verify for the other nodes
# else, the point is uncovered
if test == True: # In this case, the point is covered
covered+=1
total+=1
coverage=(covered/total)*100
return coverage
#Parameters definition
global Cr #Communication range
global Sr #Sensing range
global L_th # Distance threshold where a node stop to move if its movement is less than this one
global S_th #Time duration after what we consider a node reach its optimal position (use number of iteration, no time units)
global d_th #Distance threshold (= sqrt(3)*Sr)
global Ka #Attraction coefficient
global Kr #Repulsion coefficient
global Kr_obs #Repulsion coefficient for obstacle
global d_thobs #Distance threshold (= sqrt(3)*Sr / 2)
global xfield
global yfield
global xfield_min
global yfield_min
Cr=20
Sr=Cr/2
S_th=10
L_th=0.001
Ka=0.001
Kr=0.3
d_th = sqrt(3)*Sr
Kr_obs = 0.6
d_thobs = d_th / 2
Max_iteration=300
iteration=0
xfield=75
yfield=75
#System definition (field, nodes, obstacles)
#field = Polygon([(xfield/2,yfield/2),(-xfield/2,yfield/2),(xfield/2,-yfield/2),(-xfield/2,-yfield/2)])
list_node=[]
master_node=False
#Initialise each node with the json object
#The first object read will be considered as Master Node
with open("demonstration_input.txt", "r") as file:
for line in file:
rdata = json.loads(line)
if master_node == False:#we need to center the grah on the master node (0,0)
x_to_center=rdata['la']
y_to_center=rdata['lo']
master_node=True
#Need to convert the data to use them on the map.
x=(rdata['la']-x_to_center)/100
y=(rdata['lo']-y_to_center)/100
temp = Node(x, y, rdata['nr'])
print(temp.display())
list_node.append(temp)
x_to_center=59976109
y_to_center=11043916
#n10.mobil=False
poly0 = Polygon([(-3,8),(-7,6),(-5,11)])
poly1 = Polygon([(8,1),(7,4),(10,3)])
poly2 = Polygon([(3,29),(2,32),(5,28)])
poly3 = Polygon([(-7,-24),(-5,-27),(-7,-28)])
poly4 = Polygon([(0,-8),(0,-11),(3,-5)])
poly5 = Polygon([(-13,29),(-10,28),(-14,27)])
poly6 = Polygon([(-24,12),(-22,11),(-25,10)])
poly7 = Polygon([(34,17),(33,19),(34,15)])
poly8 = Polygon([(-27,-34),(-28,-35),(-27,-33)])
poly9 = Polygon([(-27,-18),(-24,-18),(-26,-15)])
poly10 = Polygon([(18,-31),(16,-32),(17,-29)])
poly11 = Polygon([(14,-18),(15,-16),(13,-15)])
poly12 = Polygon([(22,31),(20,32),(23,29)])
poly13 = Polygon([(30,16),(29,14),(28,15)])
poly14 = Polygon([(25,-3),(25,-1),(23,-0)])
poly15 = Polygon([(32,-24),(30,-21),(33,-22)])
#list_node=[n0, n1, n2, n3, n4, n5, n6, n7, n8, n9, n10, n11, n12, n13]
list_poly=[poly0, poly1, poly2, poly3, poly4, poly5, poly6, poly7, poly8, poly9, poly10, poly11, poly12, poly13, poly14, poly15]
#Plot the initial positions on the graph and legend
#xx, yy = np.meshgrid(np.arange(-25, 26), np.arange(-25,26), sparse=True)
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.set_xlim(-(xfield/2), xfield/2)
ax.set_ylim(-(yfield/2), yfield/2)
"""for elt in list_node:
plt.scatter(elt.coord.x, elt.coord.y, color="#7f7f7f")"""
#Plot obstacles (polygons)
for elt in list_poly:
patch2b = PolygonPatch(elt, fc='#ff7f7f', ec='#ff3232', alpha=1., zorder=2)
ax.add_patch(patch2b)
#Legend display
"""label = "Cr={} | Sr={} | S_th={} | L_th={}\nKr={} | Ka={} | Kr_obs={}".format(Cr, Sr, S_th, L_th, Kr, Ka, Kr_obs)
legend = mpatches.Patch(color='none', label=label)
plt.legend(handles=[legend])"""
#Main loop
while iteration<Max_iteration:
print("Iteration n°", iteration)
test=0#Testing variable, reset at each iteration
for index, i in enumerate(list_node):#For each node in the system
SumF_node=Vector(0,0)#Reset the force sums at each iteration
SumF_obs=Vector(0,0)
if i.s < S_th and i.mobil==True: # If the node isn't stable for a certain amount of time and still mobile, use VF on it.
for jndex, j in enumerate(list_node):#For each node Sj in the system, calculation of the force it exertes on Si
if index!=jndex:#To avoid to calculate the force exerted by Si on itself.
F_node=VF_sensors(i, j)
SumF_node=SumF_node.__add__(F_node)
for obs in list_poly:#For each obstacle Oj in the system, calculation of the force it exertes on Si
F_obs=VF_obstacles(i, obs)
SumF_obs=SumF_obs.__add__(F_obs)
F_tot=SumF_node.__add__(SumF_obs)#Total of the forces exerted on Si (SumF_obs + SumF_node)
if i.distance(i.translation(F_tot)) < L_th:#Stable ? If the node should move more than a distance treshold: YES
i.s+=1#Increment the stability timer
else:#Stable ? NO, so translation
i.s=0#Reset stability timer
list_node[index]=Node_translation(list_node[index], F_tot, list_poly)#Move the node to its new position
elif i.s >= S_th: # The node is stable for more than the time threshold, we don't use VF on it. It already reach its optimal position.
test+=1 # Increment the testing variable
else:
test+=1 # Still incrementing the testing variable
#Plot every points on a graph
for elt in list_node:#elt takes the value of each elements in list_node
plt.scatter(elt.coord.x, elt.coord.y, color="#cbcbcb")
#Test
if test==index+1:#If all nodes are stable, the system is optimize so leave the loop
break
iteration+=1
if test==index+1:
print("Opimized after {} iterations".format(iteration, index))
else:
print("Non optimized after {} iterations".format(Max_iteration))
print("Area covered at {}% with {} nodes".format(Estimate_coverage(list_node), index))
for elt in list_node:
print(elt.display())
#Write the new positions in a JSON object
#JSON object format : {"nr":unint32_t,"cn":boolean,"al":boolean,"la":int32_t,"lo":int32_t}
with open("demonstration_output.txt", "w") as wfile:
for elt in list_node:
# #Note that is neccessary to double any { or } that are not part of a formatting placeholder.
#data='{{"nr":{},"cn":true,"al":false,"la":{},"lo":{}}}'.format(elt.id, int(elt.coord.x), int(elt.coord.y))
x=elt.coord.x*100+x_to_center
y=elt.coord.y*100+y_to_center
# data="{'nr':"+str(elt.id)+",'cn':true,'al':false,'la':"+str(int(elt.coord.x))+",'lo':"+str(int(elt.coord.y))+"}"
data="{'nr':"+str(elt.id)+",'cn':true,'al':false,'la':"+str(int(x))+",'lo':"+str(int(y))+"}"
jsonlike_data = data.replace("'", "\"")
wfile.write(jsonlike_data)
wfile.write("\n")
#Plot the final positions
for index,elt in enumerate(list_node):#elt takes the value of each elements in list_node
if elt.mobil == False: # use different color if node is broken
circ = plt.Circle((elt.coord.x, elt.coord.y), radius=Sr, edgecolor='#FFA500', facecolor="none")#Draw a circle around the point, show the sensing range
plt.scatter(elt.coord.x, elt.coord.y, color="#FFA500")
ax.add_patch(circ)
else:
circ = plt.Circle((elt.coord.x, elt.coord.y), radius=Sr, edgecolor='b', facecolor="none")#Draw a circle around the point, show the sensing range
plt.scatter(elt.coord.x, elt.coord.y, color="b")
ax.add_patch(circ)
plt.show()
|
[
"haugerud@hioa.no"
] |
haugerud@hioa.no
|
6d27efdd7d47cfcfddb3c0e52419afd2ec0433e5
|
fba1de0664363c24ef7233c2a3dd423b80cbc61f
|
/download.py
|
f3355b17b1c0c51d088b01d6b8f202287e68a76a
|
[] |
no_license
|
alex-993/web_scraper
|
ff37c005e9a6cfff4d35483f5addf83d4533f625
|
784351e5150d2863d3db694c288dbb8182d20e31
|
refs/heads/master
| 2022-04-05T06:47:53.022205
| 2020-01-30T16:00:47
| 2020-01-30T16:02:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 718
|
py
|
import urllib.request
from urllib.error import URLError, HTTPError, ContentTooShortError
def download(url, user_agent='wswp', num_retries=2, charset='utf-8'):
request = urllib.request.Request(url)
request.add_header('User-agent', user_agent)
try:
resp = urllib.request.urlopen(request)
cs = resp.headers.get_content_charset()
if not cs:
cs = charset
html = resp.read().decode(cs)
except (URLError, HTTPError, ContentTooShortError) as e:
print('Download error:', e.reason)
html = None
if num_retries > 0:
if hasattr(e, 'code') and 500 <= e.code < 600:
return download(url, num_retries - 1)
return html
|
[
"alex14.an@gmail.com"
] |
alex14.an@gmail.com
|
63cd814987f8153f78e5743152e0cc25c4c23c63
|
3c8bc614c9f09db5efce54af3cbcaf78e0f48b54
|
/0x00-python-hello_world/8-concat_edges.py
|
f531e57e0ce482b0c1cb28ede6ce2094ad90bd11
|
[] |
no_license
|
davidknoppers/holbertonschool-higher_level_programming
|
7848d301c4bf5c1fa285314392adfb577d6d082f
|
beaf6e5ece426c2086f34763e50c3ce0f56923ac
|
refs/heads/master
| 2021-04-29T10:10:27.071278
| 2017-05-03T02:46:44
| 2017-05-03T02:46:44
| 77,847,936
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 208
|
py
|
#!/usr/bin/python3
str = "Python is an interpreted, interactive, object-oriented programming\
language that combines remarkable power with very clear syntax"
str = str[39:67]+str[107:112]+str[:6]
print(str)
|
[
"david.knoppers@holbertonschool.com"
] |
david.knoppers@holbertonschool.com
|
fcc09a982519bbf5b31efdd28089428f5328ed9d
|
ba0cbdae81c171bd4be7b12c0594de72bd6d625a
|
/MyToontown/Panda3D-1.9.0/direct/leveleditor/LevelLoaderBase.py
|
f5c0b44ab0ad2f0f0900cdabd038db10f55e2cdf
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
sweep41/Toontown-2016
|
65985f198fa32a832e762fa9c59e59606d6a40a3
|
7732fb2c27001264e6dd652c057b3dc41f9c8a7d
|
refs/heads/master
| 2021-01-23T16:04:45.264205
| 2017-06-04T02:47:34
| 2017-06-04T02:47:34
| 93,279,679
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,221
|
py
|
import imp
class LevelLoaderBase:
"""
Base calss for LevelLoader
which you will use to load level editor data in your game.
Refer LevelLoader.py for example.
"""
def __init__(self):
self.defaultPath = None # this should be set in your LevelLoader.py
self.initLoader()
def initLoader(self):
# You should implement this in subclass
raise NotImplementedError('populate() must be implemented in your LevelLoader.py')
def cleanUp(self):
# When you don't need to load any more data, you can call clean up
del base.objectPalette
del base.protoPalette
del base.objectHandler
del base.objectMgr
def loadFromFile(self, fileName, filePath=None):
if filePath is None:
filePath = self.defaultPath
if fileName.endswith('.py'):
fileName = fileName[:-3]
file, pathname, description = imp.find_module(fileName, [filePath])
try:
module = imp.load_module(fileName, file, pathname, description)
return True
except:
print 'failed to load %s'%fileName
return None
|
[
"sweep14@gmail.com"
] |
sweep14@gmail.com
|
5c78b2aead19ffa8b1950c6a1dc99dc58910c08e
|
c3d244c51e4c8a318f67872330b0166614fd22b5
|
/Srp/venv/bin/cygdb
|
1dcf69eb01f484725c1027922146feb3c3f87957
|
[] |
no_license
|
siva-28/Srp
|
cf8f38e3360d79a4efd471812b5e1d9ebd7f0001
|
b7797448b883762aae4106ef2d31dafd2fec0ab3
|
refs/heads/master
| 2020-03-27T02:58:34.758865
| 2018-09-06T08:19:06
| 2018-09-06T08:19:06
| 145,830,383
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 256
|
#!/home/siva-m/PycharmProjects/Srp/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from Cython.Debugger.Cygdb import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"smartsiva939@gmail.com"
] |
smartsiva939@gmail.com
|
|
704f013c130662ebe0573fac63f33843058e6de6
|
c4c58279db858356c41e2a90074181fdf0dd6735
|
/Django相关/auth_form/auth_demo/settings.py
|
b9bf9437ab29361b297bd833119f7ec31e7751a3
|
[] |
no_license
|
mowangmo/mini_apps
|
3ae9e269cd6dcfbdc850113962ffac541b03ff8c
|
f5c2d71ea057b9dbdf554bed4d94f5aabab6b2c4
|
refs/heads/master
| 2021-09-07T16:20:32.298619
| 2018-02-26T01:53:04
| 2018-02-26T01:53:04
| 107,953,009
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,239
|
py
|
"""
Django settings for auth_demo project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'v(oph1*5_e^h#i!s&so0@i5^gvjkcjn126=4f8d78@8fbwd*rh'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
#from django.contrib.auth.middleware import AuthenticationMiddleware
ROOT_URLCONF = 'auth_demo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'auth_demo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
SESSION_COOKIE_AGE =10
|
[
"wangmo@syberos.com"
] |
wangmo@syberos.com
|
c21bb2097509c60695c3b4904a525faa92d6f888
|
b65f7a26db907a5e24671d31bae491ac3b351959
|
/word_pattern.py
|
c045e0c4a00c61f991e8e2225855237b5c6781a5
|
[] |
no_license
|
srivats0603/Algos_Practice
|
3db7dadd910976cd4284a04a4de6e2b4b4ad9a8a
|
1c0e9f579a2dc0bf2c0cac4510dc12c396302825
|
refs/heads/master
| 2020-12-13T14:25:14.976398
| 2020-06-07T13:13:10
| 2020-06-07T13:13:10
| 234,443,685
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,309
|
py
|
class Solution_wordPattern:
def find_num_words(self,str_x,len_str_x):
num_words = 1
space_list = []
for i in range(0,len_str_x):
if str_x[i] == ' ':
space_list.append(i)
num_words = num_words+1
space_list.append(len_str_x+1)
return num_words,space_list
def wordPattern(self, pattern, str_x):
len_pat = len(pattern)
len_str_x = len(str_x)
num_words,space_list = self.find_num_words(str_x,len_str_x)
if len_pat != num_words:
return False
pattern_dict = {pattern[0] : str_x[0:space_list[0]]}
word_list = [str_x[0:space_list[0]]]
for i in range(1,len_pat):
this_word = str_x[(space_list[i-1]+1):space_list[i]]
if (pattern[i] not in pattern_dict.keys()):
if this_word not in word_list:
pattern_dict[pattern[i]] = this_word
word_list.append(this_word)
else:
#print("the patterns for the word {} don't match".format(this_word))
return False
elif (pattern_dict[pattern[i]] == this_word):
#print(pattern_dict[pattern[i]],"matches",this_word, "and")
pass
elif (pattern_dict[pattern[i]] != this_word):
#print("the words for the pattern {} don't match".format(pattern[i]))
return False
#print(pattern_dict)
return True
|
[
"shuklasr@utopusinsights.com"
] |
shuklasr@utopusinsights.com
|
a3da4fc993c60e6e717dd3a753c22ba7c5a1debb
|
0e82ad56b79439520c656b45b068c8f6528b1796
|
/Assignment5/GameMaster.py
|
7320a2a8f7c59d224ff71f80ced5ec5c0d75c6e4
|
[
"MIT"
] |
permissive
|
vaibhavi-r/CSE-415
|
a755dec25e836786ce726d14ddaf320ea8212194
|
f5fc90e01aa5a2e1c338b5e9eeccb56c05e39a7c
|
refs/heads/master
| 2021-08-28T04:19:14.913498
| 2017-12-11T06:54:22
| 2017-12-11T06:54:22
| 106,122,569
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,734
|
py
|
'''GameMaster.py
based on code from RunKInARow.py
'''
USE_HTML = True
import Hobgoblin as player1
import Player as player2
from winTesterForK import winTesterForK
if USE_HTML: import gameToHTML
TTT_INITIAL_STATE = \
[[[' ',' ',' '],
[' ',' ',' '],
[' ',' ',' ']], "X"]
K = 3
GAME_TYPE = 'TIC-TAC-TOE'
FIVE_INITIAL_STATE = \
[[['-',' ',' ',' ',' ',' ','-'],
[' ',' ',' ',' ',' ',' ',' '],
[' ',' ',' ',' ',' ',' ',' '],
[' ',' ',' ',' ',' ',' ',' '],
[' ',' ',' ',' ',' ',' ',' '],
[' ',' ',' ',' ',' ',' ',' '],
['-',' ',' ',' ',' ',' ','-']], "X"]
K = 5
GAME_TYPE = "Five in a Row on Seven-by-Seven Board with Corners Forbidden"
INITIAL_STATE = FIVE_INITIAL_STATE
TIME_PER_MOVE = 10
N = len(INITIAL_STATE[0]) # height of board
M = len(INITIAL_STATE[0][0]) # width of board
FINISHED = False
def runGame():
currentState = INITIAL_STATE
print('The Gamemaster says, "Players, introduce yourselves."')
print(' (Playing X:) '+player1.introduce())
print(' (Playing O:) '+player2.introduce())
if USE_HTML:
gameToHTML.startHTML(player1.nickname(), player2.nickname(), GAME_TYPE, 1)
try:
p1comment = player1.prepare(INITIAL_STATE, K, 'X', player2.nickname())
except:
report = 'Player 1 ('+player1.nickname()+' failed to prepare, and loses by default.'
print(report)
if USE_HTML: gameToHTML.reportResult(report)
report = 'Congratulations to Player 2 ('+player2.nickname()+')!'
print(report)
if USE_HTML: gameToHTML.reportResult(report)
if USE_HTML: gameToHTML.endHTML()
return
try:
p2comment = player2.prepare(INITIAL_STATE, K, 'O', player1.nickname())
except:
report = 'Player 2 ('+player2.nickname()+' failed to prepare, and loses by default.'
print(report)
if USE_HTML: gameToHTML.reportResult(report)
report = 'Congratulations to Player 1 ('+player1.nickname()+')!'
print(report)
if USE_HTML: gameToHTML.reportResult(report)
if USE_HTML: gameToHTML.endHTML()
return
return
print('The Gamemaster says, "Let\'s Play!"')
print('The initial state is...')
currentRemark = "The game is starting."
if USE_HTML: gameToHTML.stateToHTML(currentState)
XsTurn = True
name = None
global FINISHED
FINISHED = False
turnCount = 0
printState(currentState)
while not FINISHED:
who = currentState[1]
if XsTurn:
playerResult = player1.makeMove(currentState, currentRemark, TIME_PER_MOVE)
name = player1.nickname()
XsTurn = False
else:
playerResult = player2.makeMove(currentState, currentRemark, TIME_PER_MOVE)
name = player2.nickname()
XsTurn = True
moveAndState, currentRemark = playerResult
if moveAndState==None:
FINISHED = True; continue
move, currentState = moveAndState
moveReport = "Move is by "+who+" to "+str(move)
print(moveReport)
utteranceReport = name +' says: '+currentRemark
print(utteranceReport)
if USE_HTML: gameToHTML.reportResult(moveReport)
if USE_HTML: gameToHTML.reportResult(utteranceReport)
possibleWin = winTesterForK(currentState, move, K)
if possibleWin != "No win":
FINISHED = True
printState(currentState)
if USE_HTML: gameToHTML.stateToHTML(currentState, finished=True)
print(possibleWin)
if USE_HTML: gameToHTML.reportResult(possibleWin)
if USE_HTML: gameToHTML.endHTML()
return
printState(currentState)
if USE_HTML: gameToHTML.stateToHTML(currentState)
turnCount += 1
#if turnCount == 9: FINISHED=True
printState(currentState)
if USE_HTML: gameToHTML.stateToHTML(currentState)
who = currentState[1]
print("Game over.")
if USE_HTML: gameToHTML.reportResult("Game Over; it's a draw")
if USE_HTML: gameToHTML.endHTML()
def printState(s):
global FINISHED
board = s[0]
who = s[1]
horizontalBorder = "+"+3*N*"-"+"+"
print(horizontalBorder)
for row in board:
print("|",end="")
for item in row:
print(" "+item+" ", end="")
print("|")
print(horizontalBorder)
if not FINISHED:
print("It is "+who+"'s turn to move.\n")
runGame()
|
[
"vaibhavi.rangarajan@gmail.com"
] |
vaibhavi.rangarajan@gmail.com
|
21b16f6a2046047120d8a98f0d49823758acea53
|
7d12a2b57b5f72755029e6a71357b2ae749fcc0b
|
/StockInsightPython/predict/GetPredict.py
|
bc727042e9affb3a2c9dd13ee27f0cb7536f2733
|
[] |
no_license
|
DeveloperHailie/StockInsight
|
b06c911e7d9672f02bf167ca5d52fbc4eff3e9b1
|
23bc455fcde82d68991a136bc6b961ad2df29745
|
refs/heads/main
| 2023-06-02T06:03:07.245164
| 2021-06-19T05:39:17
| 2021-06-19T05:39:17
| 310,904,981
| 0
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,627
|
py
|
import numpy as np
import cv2
from urllib.request import urlopen
import tensorflow as tf
import FinanceDataReader as fdr
from sklearn.preprocessing import MinMaxScaler
import pandas as pd
import csv
import os
import pymysql
import tkinter
from datetime import datetime
import time
import sys
from DBInfo import DB
# 장 열려있지 않으면 바로 종료
n = time.localtime().tm_wday
n_tm = time.localtime() # 현재 시간 구하기
if(n==5 or n==6) : # 토요일 혹은 일요일이면
sys.exit()
# now.tm_hour, now.tm_min
if(n_tm.tm_hour<9 or n_tm.tm_hour>16) :
sys.exit()
dbInformation = DB()
# DB 연결
connect = pymysql.connect(host=dbInformation.host, port=dbInformation.port,
user=dbInformation.user, password=dbInformation.password, db=dbInformation.dbName)
#cursor 객체 생성
cursor = conn.cursor()
#모델 로드 함수
def loadModel(model) :
np.set_printoptions(suppress=True)
classifierLoad = tf.keras.models.load_model(model)
return classifierLoad
def predict(code) :
df = fdr.DataReader(df_krx_stock, '2020')
source = df.tail(5) # 최근 5일치 api
source = source[['Open', 'High', 'Low', 'Close', 'Volume']]
source = source.values
transformer = MinMaxScaler()
train = transformer.fit_transform(source)
train = np.array([train])
y = model.predict(train)
y_inverse = transformer.inverse_transform([[0, 0, 0, y[0], 0]])
predict_close = y_inverse[0][3]
return predict_close
# model path 설정 및 model load
model_path = "C:\\StockInsightPython\\predict\\DeepLearningModel.h5"
model = loadModel(model_path)
#한국거래소 상장종목 전체
df_krx_code = fdr.StockListing('KRX')['Symbol'] # 업종 코드
df_krx_name = fdr.StockListing('KRX')['Name'] # 회사명
# csv 저장할 list
c = list()
check = True
# 모델 가지고 예측값 구하기
for df_krx_stock in df_krx_code:
#source는 5일간의 데이터
if(os.path.isfile("..\\realtimeData\\"+df_krx_stock + ".csv")) : # 파일 있을 시
check = True
else : # 파일 없을 시
check = False
# csv 받아오기
if(check == True) :
try :
df_today = pd.read_csv("..\\realtimeData\\"+df_krx_stock + ".csv", header=None) # today dateframe(가격)
len(df_today) # csv row 갯수
df_close_list = df_today[1] # today close 열 list
df_open = df_close_list[0] # 현재가격 Open 시가
df_close = df_close_list[len(df_today)-1] # 현재가격 Close 종가
df_volume = df_today[4][len(df_today)-1] # 현재가격 거래량
# csv 값 list에 넣기
for x in df_close_list:
c.append(x) # 현재가격 colume 받아오기
# 정렬 후 - 내림차순 정렬
c.sort()
df_low = c[0] # Low - 저가
df_high = c[len(c)-1] #High - 고가
df = fdr.DataReader(df_krx_stock, '2020')
if df.empty : # dataframe none 확인
continue
else :
source = df.tail(5) # 최근 5일치 api
source = source[['Open', 'High', 'Low', 'Close', 'Volume']]
source = source.values
## update 할 내용이 있을 때
source = np.delete(source,0, axis=0)
source = np.insert(source,4, [df_open, df_high, df_low, df_close, df_volume], axis=0)
##
transformer = MinMaxScaler()
train = transformer.fit_transform(source)
train = np.array([train])
y = model.predict(train)
y_inverse = transformer.inverse_transform([[0, 0, 0, y[0], 0]])
predict_close = y_inverse[0][3]
c = [] # 초기화
except:
predict_close = predict(df_krx_stock)
else :
try:
predict_close = predict(df_krx_stock)
except:
continue
#DB 업데이트
try:
sql = "UPDATE Stock SET stock_future = '"+str(predict_close)+"' WHERE stock_code ="+(df_krx_stock)
cursor.execute(sql)
conn.commit()
except:
print("error")
conn.close()
|
[
"20181011@sungshin.ac.kr"
] |
20181011@sungshin.ac.kr
|
c1e495c692005ed98eeeddca13ac61b00102c9c9
|
7d946a2105a4f08f97df3c99d6348bbb216a3fc9
|
/assignment2/cs231n/im2col.py
|
f400fef7928dfd517f41674b7f0e4385b075e231
|
[] |
no_license
|
srivatsa96/cs231n
|
c2628d1edd7a30b7e96ec3ecc443b9e8d705990a
|
62ca2fef3e64c2ecee1e5aab34496a993f92d473
|
refs/heads/master
| 2021-07-12T19:31:10.437684
| 2017-10-13T09:26:24
| 2017-10-13T09:26:24
| 105,500,626
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,256
|
py
|
from builtins import range
import numpy as np
def get_im2col_indices(x_shape, field_height, field_width, padding=1, stride=1):
# First figure out what the size of the output should be
N, C, H, W = x_shape
assert (H + 2 * padding - field_height) % stride == 0
assert (W + 2 * padding - field_width) % stride == 0
out_height = (H + 2 * padding - field_height) / stride + 1
out_height = int(out_height)
out_width = (W + 2 * padding - field_width) / stride + 1
out_width = int(out_width)
i0 = np.repeat(np.arange(field_height), field_width)
i0 = np.tile(i0, C)
i1 = stride * np.repeat(np.arange(out_height), out_width)
j0 = np.tile(np.arange(field_width), field_height * C)
j1 = stride * np.tile(np.arange(out_width), out_height)
i = i0.reshape(-1, 1) + i1.reshape(1, -1)
j = j0.reshape(-1, 1) + j1.reshape(1, -1)
k = np.repeat(np.arange(C), field_height * field_width).reshape(-1, 1)
return (k, i, j)
def im2col_indices(x, field_height, field_width, padding=1, stride=1):
""" An implementation of im2col based on some fancy indexing """
# Zero-pad the input
p = padding
x_padded = np.pad(x, ((0, 0), (0, 0), (p, p), (p, p)), mode='constant')
k, i, j = get_im2col_indices(x.shape, field_height, field_width, padding,
stride)
cols = x_padded[:, k, i, j]
C = x.shape[1]
cols = cols.transpose(1, 2, 0).reshape(field_height * field_width * C, -1)
return cols
def col2im_indices(cols, x_shape, field_height=3, field_width=3, padding=1,
stride=1):
""" An implementation of col2im based on fancy indexing and np.add.at """
N, C, H, W = x_shape
H_padded, W_padded = H + 2 * padding, W + 2 * padding
x_padded = np.zeros((N, C, H_padded, W_padded), dtype=cols.dtype)
k, i, j = get_im2col_indices(x_shape, field_height, field_width, padding,
stride)
cols_reshaped = cols.reshape(C * field_height * field_width, -1, N)
cols_reshaped = cols_reshaped.transpose(2, 0, 1)
np.add.at(x_padded, (slice(None), k, i, j), cols_reshaped)
if padding == 0:
return x_padded
return x_padded[:, :, padding:-padding, padding:-padding]
pass
|
[
"srivatsasinha@gmail.com"
] |
srivatsasinha@gmail.com
|
d9cfc49a82bc5c833bfc34078ccb8862441965ff
|
665a0ec78f8b26972868dbc8ac5acbb29482527b
|
/venv/bin/f2py
|
a0f1f9ddf293011510ec9b3985ba6faa502bfcf6
|
[] |
no_license
|
tanjan-sj/Android-Malware-Analysis
|
8f1fd95196b600d8649a9d51941bccf3dd7404da
|
8cfe748d0e39ba6d3d8550f5dd7e0b06951cbaed
|
refs/heads/master
| 2020-03-27T14:23:32.360290
| 2018-08-29T22:00:06
| 2018-08-29T22:00:06
| 146,659,749
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
#!/home/sj/PycharmProjects/irisMal/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from numpy.f2py.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"tanjan.sj@gmail.com"
] |
tanjan.sj@gmail.com
|
|
4292ca66d299ecd2c72db2fabefb7a193e512295
|
d1ec14b25b3489993079b36b7971dacb3bd09340
|
/app/trace.py
|
0d808beddb833a01d35ad4f1e5ac790575150058
|
[
"Apache-2.0"
] |
permissive
|
TinlokLee/Django-Celery
|
9cb23fe902302eb5d6ed626dd7ed2a1403c04346
|
320b36ee2091aa876ecf92003af6b8e11b3041ed
|
refs/heads/master
| 2020-04-12T11:11:32.115875
| 2018-12-20T13:01:07
| 2018-12-20T13:01:07
| 162,451,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 24,548
|
py
|
# -*- coding: utf-8 -*-
"""Trace task execution.
This module defines how the task execution is traced:
errors are recorded, handlers are applied and so on.
"""
from __future__ import absolute_import, unicode_literals
import logging
import os
import sys
from collections import namedtuple
from warnings import warn
from billiard.einfo import ExceptionInfo
from kombu.exceptions import EncodeError
from kombu.serialization import loads as loads_message
from kombu.serialization import prepare_accept_content
from kombu.utils.encoding import safe_repr, safe_str
from celery import current_app, group, signals, states
from celery._state import _task_stack
from celery.app.task import Context
from celery.app.task import Task as BaseTask
from celery.exceptions import Ignore, InvalidTaskError, Reject, Retry
from celery.five import monotonic, text_t
from celery.utils.log import get_logger
from celery.utils.nodenames import gethostname
from celery.utils.objects import mro_lookup
from celery.utils.saferepr import saferepr
from celery.utils.serialization import (get_pickleable_etype,
get_pickleable_exception,
get_pickled_exception)
# ## ---
# This is the heart of the worker, the inner loop so to speak.
# It used to be split up into nice little classes and methods,
# but in the end it only resulted in bad performance and horrible tracebacks,
# so instead we now use one closure per task class.
# pylint: disable=redefined-outer-name
# We cache globals and attribute lookups, so disable this warning.
# pylint: disable=broad-except
# We know what we're doing...
__all__ = (
'TraceInfo', 'build_tracer', 'trace_task',
'setup_worker_optimizations', 'reset_worker_optimizations',
)
logger = get_logger(__name__)
#: Format string used to log task success.
LOG_SUCCESS = """\
Task %(name)s[%(id)s] succeeded in %(runtime)ss: %(return_value)s\
"""
#: Format string used to log task failure.
LOG_FAILURE = """\
Task %(name)s[%(id)s] %(description)s: %(exc)s\
"""
#: Format string used to log task internal error.
LOG_INTERNAL_ERROR = """\
Task %(name)s[%(id)s] %(description)s: %(exc)s\
"""
#: Format string used to log task ignored.
LOG_IGNORED = """\
Task %(name)s[%(id)s] %(description)s\
"""
#: Format string used to log task rejected.
LOG_REJECTED = """\
Task %(name)s[%(id)s] %(exc)s\
"""
#: Format string used to log task retry.
LOG_RETRY = """\
Task %(name)s[%(id)s] retry: %(exc)s\
"""
log_policy_t = namedtuple(
'log_policy_t', ('format', 'description', 'severity', 'traceback', 'mail'),
)
log_policy_reject = log_policy_t(LOG_REJECTED, 'rejected', logging.WARN, 1, 1)
log_policy_ignore = log_policy_t(LOG_IGNORED, 'ignored', logging.INFO, 0, 0)
log_policy_internal = log_policy_t(
LOG_INTERNAL_ERROR, 'INTERNAL ERROR', logging.CRITICAL, 1, 1,
)
log_policy_expected = log_policy_t(
LOG_FAILURE, 'raised expected', logging.INFO, 0, 0,
)
log_policy_unexpected = log_policy_t(
LOG_FAILURE, 'raised unexpected', logging.ERROR, 1, 1,
)
send_prerun = signals.task_prerun.send
send_postrun = signals.task_postrun.send
send_success = signals.task_success.send
STARTED = states.STARTED
SUCCESS = states.SUCCESS
IGNORED = states.IGNORED
REJECTED = states.REJECTED
RETRY = states.RETRY
FAILURE = states.FAILURE
EXCEPTION_STATES = states.EXCEPTION_STATES
IGNORE_STATES = frozenset({IGNORED, RETRY, REJECTED})
#: set by :func:`setup_worker_optimizations`
_localized = []
_patched = {}
trace_ok_t = namedtuple('trace_ok_t', ('retval', 'info', 'runtime', 'retstr'))
def info(fmt, context):
"""Log 'fmt % context' with severity 'INFO'.
'context' is also passed in extra with key 'data' for custom handlers.
"""
logger.info(fmt, context, extra={'data': context})
def task_has_custom(task, attr):
"""Return true if the task overrides ``attr``."""
return mro_lookup(task.__class__, attr, stop={BaseTask, object},
monkey_patched=['celery.app.task'])
def get_log_policy(task, einfo, exc):
if isinstance(exc, Reject):
return log_policy_reject
elif isinstance(exc, Ignore):
return log_policy_ignore
elif einfo.internal:
return log_policy_internal
else:
if task.throws and isinstance(exc, task.throws):
return log_policy_expected
return log_policy_unexpected
def get_task_name(request, default):
"""Use 'shadow' in request for the task name if applicable."""
# request.shadow could be None or an empty string.
# If so, we should use default.
return getattr(request, 'shadow', None) or default
class TraceInfo(object):
"""Information about task execution."""
__slots__ = ('state', 'retval')
def __init__(self, state, retval=None):
self.state = state
self.retval = retval
def handle_error_state(self, task, req,
eager=False, call_errbacks=True):
store_errors = not eager
if task.ignore_result:
store_errors = task.store_errors_even_if_ignored
return {
RETRY: self.handle_retry,
FAILURE: self.handle_failure,
}[self.state](task, req,
store_errors=store_errors,
call_errbacks=call_errbacks)
def handle_reject(self, task, req, **kwargs):
self._log_error(task, req, ExceptionInfo())
def handle_ignore(self, task, req, **kwargs):
self._log_error(task, req, ExceptionInfo())
def handle_retry(self, task, req, store_errors=True, **kwargs):
"""Handle retry exception."""
# the exception raised is the Retry semi-predicate,
# and it's exc' attribute is the original exception raised (if any).
type_, _, tb = sys.exc_info()
try:
reason = self.retval
einfo = ExceptionInfo((type_, reason, tb))
if store_errors:
task.backend.mark_as_retry(
req.id, reason.exc, einfo.traceback, request=req,
)
task.on_retry(reason.exc, req.id, req.args, req.kwargs, einfo)
signals.task_retry.send(sender=task, request=req,
reason=reason, einfo=einfo)
info(LOG_RETRY, {
'id': req.id,
'name': get_task_name(req, task.name),
'exc': text_t(reason),
})
return einfo
finally:
del tb
def handle_failure(self, task, req, store_errors=True, call_errbacks=True):
"""Handle exception."""
_, _, tb = sys.exc_info()
try:
exc = self.retval
# make sure we only send pickleable exceptions back to parent.
einfo = ExceptionInfo()
einfo.exception = get_pickleable_exception(einfo.exception)
einfo.type = get_pickleable_etype(einfo.type)
task.backend.mark_as_failure(
req.id, exc, einfo.traceback,
request=req, store_result=store_errors,
call_errbacks=call_errbacks,
)
task.on_failure(exc, req.id, req.args, req.kwargs, einfo)
signals.task_failure.send(sender=task, task_id=req.id,
exception=exc, args=req.args,
kwargs=req.kwargs,
traceback=tb,
einfo=einfo)
self._log_error(task, req, einfo)
return einfo
finally:
del tb
def _log_error(self, task, req, einfo):
eobj = einfo.exception = get_pickled_exception(einfo.exception)
exception, traceback, exc_info, sargs, skwargs = (
safe_repr(eobj),
safe_str(einfo.traceback),
einfo.exc_info,
safe_repr(req.args),
safe_repr(req.kwargs),
)
policy = get_log_policy(task, einfo, eobj)
context = {
'hostname': req.hostname,
'id': req.id,
'name': get_task_name(req, task.name),
'exc': exception,
'traceback': traceback,
'args': sargs,
'kwargs': skwargs,
'description': policy.description,
'internal': einfo.internal,
}
logger.log(policy.severity, policy.format.strip(), context,
exc_info=exc_info if policy.traceback else None,
extra={'data': context})
def build_tracer(name, task, loader=None, hostname=None, store_errors=True,
Info=TraceInfo, eager=False, propagate=False, app=None,
monotonic=monotonic, trace_ok_t=trace_ok_t,
IGNORE_STATES=IGNORE_STATES):
"""Return a function that traces task execution.
Catches all exceptions and updates result backend with the
state and result.
If the call was successful, it saves the result to the task result
backend, and sets the task status to `"SUCCESS"`.
If the call raises :exc:`~@Retry`, it extracts
the original exception, uses that as the result and sets the task state
to `"RETRY"`.
If the call results in an exception, it saves the exception as the task
result, and sets the task state to `"FAILURE"`.
Return a function that takes the following arguments:
:param uuid: The id of the task.
:param args: List of positional args to pass on to the function.
:param kwargs: Keyword arguments mapping to pass on to the function.
:keyword request: Request dict.
"""
# noqa: C901
# pylint: disable=too-many-statements
# If the task doesn't define a custom __call__ method
# we optimize it away by simply calling the run method directly,
# saving the extra method call and a line less in the stack trace.
fun = task if task_has_custom(task, '__call__') else task.run
loader = loader or app.loader
backend = task.backend
ignore_result = task.ignore_result
track_started = task.track_started
track_started = not eager and (task.track_started and not ignore_result)
publish_result = not eager and not ignore_result
hostname = hostname or gethostname()
loader_task_init = loader.on_task_init
loader_cleanup = loader.on_process_cleanup
task_on_success = None
task_after_return = None
if task_has_custom(task, 'on_success'):
task_on_success = task.on_success
if task_has_custom(task, 'after_return'):
task_after_return = task.after_return
store_result = backend.store_result
mark_as_done = backend.mark_as_done
backend_cleanup = backend.process_cleanup
pid = os.getpid()
request_stack = task.request_stack
push_request = request_stack.push
pop_request = request_stack.pop
push_task = _task_stack.push
pop_task = _task_stack.pop
_does_info = logger.isEnabledFor(logging.INFO)
resultrepr_maxsize = task.resultrepr_maxsize
prerun_receivers = signals.task_prerun.receivers
postrun_receivers = signals.task_postrun.receivers
success_receivers = signals.task_success.receivers
from celery import canvas
signature = canvas.maybe_signature # maybe_ does not clone if already
def on_error(request, exc, uuid, state=FAILURE, call_errbacks=True):
if propagate:
raise
I = Info(state, exc)
R = I.handle_error_state(
task, request, eager=eager, call_errbacks=call_errbacks,
)
return I, R, I.state, I.retval
def trace_task(uuid, args, kwargs, request=None):
# R - is the possibly prepared return value.
# I - is the Info object.
# T - runtime
# Rstr - textual representation of return value
# retval - is the always unmodified return value.
# state - is the resulting task state.
# This function is very long because we've unrolled all the calls
# for performance reasons, and because the function is so long
# we want the main variables (I, and R) to stand out visually from the
# the rest of the variables, so breaking PEP8 is worth it ;)
R = I = T = Rstr = retval = state = None
task_request = None
time_start = monotonic()
try:
try:
kwargs.items
except AttributeError:
raise InvalidTaskError(
'Task keyword arguments is not a mapping')
push_task(task)
task_request = Context(request or {}, args=args,
called_directly=False, kwargs=kwargs)
root_id = task_request.root_id or uuid
push_request(task_request)
try:
# -*- PRE -*-
if prerun_receivers:
send_prerun(sender=task, task_id=uuid, task=task,
args=args, kwargs=kwargs)
loader_task_init(uuid, task)
if track_started:
store_result(
uuid, {'pid': pid, 'hostname': hostname}, STARTED,
request=task_request,
)
# -*- TRACE -*-
try:
R = retval = fun(*args, **kwargs)
state = SUCCESS
except Reject as exc:
I, R = Info(REJECTED, exc), ExceptionInfo(internal=True)
state, retval = I.state, I.retval
I.handle_reject(task, task_request)
except Ignore as exc:
I, R = Info(IGNORED, exc), ExceptionInfo(internal=True)
state, retval = I.state, I.retval
I.handle_ignore(task, task_request)
except Retry as exc:
I, R, state, retval = on_error(
task_request, exc, uuid, RETRY, call_errbacks=False)
except Exception as exc:
I, R, state, retval = on_error(task_request, exc, uuid)
except BaseException:
raise
else:
try:
# callback tasks must be applied before the result is
# stored, so that result.children is populated.
# groups are called inline and will store trail
# separately, so need to call them separately
# so that the trail's not added multiple times :(
# (Issue #1936)
callbacks = task.request.callbacks
if callbacks:
if len(task.request.callbacks) > 1:
sigs, groups = [], []
for sig in callbacks:
sig = signature(sig, app=app)
if isinstance(sig, group):
groups.append(sig)
else:
sigs.append(sig)
for group_ in groups:
group_.apply_async(
(retval,),
parent_id=uuid, root_id=root_id,
)
if sigs:
group(sigs, app=app).apply_async(
(retval,),
parent_id=uuid, root_id=root_id,
)
else:
signature(callbacks[0], app=app).apply_async(
(retval,), parent_id=uuid, root_id=root_id,
)
# execute first task in chain
chain = task_request.chain
if chain:
_chsig = signature(chain.pop(), app=app)
_chsig.apply_async(
(retval,), chain=chain,
parent_id=uuid, root_id=root_id,
)
mark_as_done(
uuid, retval, task_request, publish_result,
)
except EncodeError as exc:
I, R, state, retval = on_error(task_request, exc, uuid)
else:
Rstr = saferepr(R, resultrepr_maxsize)
T = monotonic() - time_start
if task_on_success:
task_on_success(retval, uuid, args, kwargs)
if success_receivers:
send_success(sender=task, result=retval)
if _does_info:
info(LOG_SUCCESS, {
'id': uuid,
'name': get_task_name(task_request, name),
'return_value': Rstr,
'runtime': T,
})
# -* POST *-
if state not in IGNORE_STATES:
if task_after_return:
task_after_return(
state, retval, uuid, args, kwargs, None,
)
finally:
try:
if postrun_receivers:
send_postrun(sender=task, task_id=uuid, task=task,
args=args, kwargs=kwargs,
retval=retval, state=state)
finally:
pop_task()
pop_request()
if not eager:
try:
backend_cleanup()
loader_cleanup()
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception as exc:
logger.error('Process cleanup failed: %r', exc,
exc_info=True)
except MemoryError:
raise
except Exception as exc:
if eager:
raise
R = report_internal_error(task, exc)
if task_request is not None:
I, _, _, _ = on_error(task_request, exc, uuid)
return trace_ok_t(R, I, T, Rstr)
return trace_task
def trace_task(task, uuid, args, kwargs, request={}, **opts):
"""Trace task execution."""
try:
if task.__trace__ is None:
task.__trace__ = build_tracer(task.name, task, **opts)
return task.__trace__(uuid, args, kwargs, request)
except Exception as exc:
return trace_ok_t(report_internal_error(task, exc), None, 0.0, None)
def _trace_task_ret(name, uuid, request, body, content_type,
content_encoding, loads=loads_message, app=None,
**extra_request):
app = app or current_app._get_current_object()
embed = None
if content_type:
accept = prepare_accept_content(app.conf.accept_content)
args, kwargs, embed = loads(
body, content_type, content_encoding, accept=accept,
)
else:
args, kwargs, embed = body
hostname = gethostname()
request.update({
'args': args, 'kwargs': kwargs,
'hostname': hostname, 'is_eager': False,
}, **embed or {})
R, I, T, Rstr = trace_task(app.tasks[name],
uuid, args, kwargs, request, app=app)
return (1, R, T) if I else (0, Rstr, T)
trace_task_ret = _trace_task_ret # noqa: E305
def _fast_trace_task(task, uuid, request, body, content_type,
content_encoding, loads=loads_message, _loc=_localized,
hostname=None, **_):
embed = None
tasks, accept, hostname = _loc
if content_type:
args, kwargs, embed = loads(
body, content_type, content_encoding, accept=accept,
)
else:
args, kwargs, embed = body
request.update({
'args': args, 'kwargs': kwargs,
'hostname': hostname, 'is_eager': False,
}, **embed or {})
R, I, T, Rstr = tasks[task].__trace__(
uuid, args, kwargs, request,
)
return (1, R, T) if I else (0, Rstr, T)
def report_internal_error(task, exc):
_type, _value, _tb = sys.exc_info()
try:
_value = task.backend.prepare_exception(exc, 'pickle')
exc_info = ExceptionInfo((_type, _value, _tb), internal=True)
warn(RuntimeWarning(
'Exception raised outside body: {0!r}:\n{1}'.format(
exc, exc_info.traceback)))
return exc_info
finally:
del _tb
def setup_worker_optimizations(app, hostname=None):
"""Setup worker related optimizations."""
global trace_task_ret
hostname = hostname or gethostname()
# make sure custom Task.__call__ methods that calls super
# won't mess up the request/task stack.
_install_stack_protection()
# all new threads start without a current app, so if an app is not
# passed on to the thread it will fall back to the "default app",
# which then could be the wrong app. So for the worker
# we set this to always return our app. This is a hack,
# and means that only a single app can be used for workers
# running in the same process.
app.set_current()
app.set_default()
# evaluate all task classes by finalizing the app.
app.finalize()
# set fast shortcut to task registry
_localized[:] = [
app._tasks,
prepare_accept_content(app.conf.accept_content),
hostname,
]
trace_task_ret = _fast_trace_task
from celery.worker import request as request_module
request_module.trace_task_ret = _fast_trace_task
request_module.__optimize__()
def reset_worker_optimizations():
"""Reset previously configured optimizations."""
global trace_task_ret
trace_task_ret = _trace_task_ret
try:
delattr(BaseTask, '_stackprotected')
except AttributeError:
pass
try:
BaseTask.__call__ = _patched.pop('BaseTask.__call__')
except KeyError:
pass
from celery.worker import request as request_module
request_module.trace_task_ret = _trace_task_ret
def _install_stack_protection():
# Patches BaseTask.__call__ in the worker to handle the edge case
# where people override it and also call super.
#
# - The worker optimizes away BaseTask.__call__ and instead
# calls task.run directly.
# - so with the addition of current_task and the request stack
# BaseTask.__call__ now pushes to those stacks so that
# they work when tasks are called directly.
#
# The worker only optimizes away __call__ in the case
# where it hasn't been overridden, so the request/task stack
# will blow if a custom task class defines __call__ and also
# calls super().
if not getattr(BaseTask, '_stackprotected', False):
_patched['BaseTask.__call__'] = orig = BaseTask.__call__
def __protected_call__(self, *args, **kwargs):
stack = self.request_stack
req = stack.top
if req and not req._protected and \
len(stack) == 1 and not req.called_directly:
req._protected = 1
return self.run(*args, **kwargs)
return orig(self, *args, **kwargs)
BaseTask.__call__ = __protected_call__
BaseTask._stackprotected = True
|
[
"noreply@github.com"
] |
TinlokLee.noreply@github.com
|
7e1ae2a09303d86c91821f6b1b682f862e1d061d
|
d42955505163519b01f8abdca398434921269f24
|
/accounts/migrations/0003_auto_20170905_0746.py
|
42905c0e43e4a7a0242d89c02a2ff9a87a7ee85e
|
[] |
no_license
|
k-murata08/tweet_analysis_app
|
fde1b780b8177988875542288d8ee303cb47e637
|
3fd227779e6497e51541220d10e6fb534ad0600b
|
refs/heads/master
| 2022-12-12T01:06:15.336412
| 2017-09-15T07:56:35
| 2017-09-15T07:56:35
| 102,452,623
| 1
| 0
| null | 2022-12-08T00:38:18
| 2017-09-05T08:04:25
|
CSS
|
UTF-8
|
Python
| false
| false
| 389
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-05 07:46
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_oauthkey'),
]
operations = [
migrations.RenameModel(
old_name='OauthKey',
new_name='OathKey',
),
]
|
[
"murata.kazuma@donuts.ne.jp"
] |
murata.kazuma@donuts.ne.jp
|
20f6e72dddae300aa3184c98ab23b84680089df2
|
8fb5d9c56e10099d4f0a2c6032d241f30c3adc36
|
/0x01-python-if_else_loops_functions/7-islower.py
|
0172706a6aa9f95b25ad98201c9ceeee557debe0
|
[] |
no_license
|
kbetoacero/holbertonschool-higher_level_programming
|
c58a8dfd5cc8995333753f13feefb30ec6c938a7
|
7fc926a16b2a3cf8f9d46868eda73f92f4cafe51
|
refs/heads/master
| 2023-06-14T18:00:19.579279
| 2021-07-14T00:22:43
| 2021-07-14T00:22:43
| 291,780,520
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
#!/usr/bin/python3
def islower(c):
if ord(c) >= 97 and ord(c) <= 122:
return True
else:
return False
|
[
"carlos.acero@holbertonschool.com"
] |
carlos.acero@holbertonschool.com
|
be5c87c45174526f8ada272a4e3a067a2cc9c0ff
|
fc2619a1ff0d829d6c5ceaf29658d2252d9a72f8
|
/src/basics/args-order.py
|
a3aaf3022015fc0e0863422a1dcd24b31223faf7
|
[] |
no_license
|
IamManchanda/functional-python
|
c0cc2160e609aa2a2bd23dc8f213cc1c9eb88aa2
|
af6c9f31e18adc22baddff27530fbb0099e2bf1f
|
refs/heads/master
| 2020-05-24T20:58:41.129650
| 2019-07-08T08:49:43
| 2019-07-08T08:49:43
| 187,466,393
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 188
|
py
|
""" Python Functions """
def display_info(*args, instructor="Harry", **kwargs):
return [args, instructor, kwargs]
print( display_info(1, 2, 3, last_name="Manchanda", job="Engineer") )
|
[
"harmanmanchanda182@gmail.com"
] |
harmanmanchanda182@gmail.com
|
db6ea7991721033afb5df26c7653df2e61182af7
|
0377c6a9b4d692845aa58e98537ff4861473af08
|
/lambda.py
|
d920bcd90fcb1cf9c158030033a606ac46e8507c
|
[] |
no_license
|
ferryvanmaurik/rekognition
|
6dbd78d939817b438172b0438f8d0ad8cd649035
|
f2e6719c19355f02e01d527d7aa69123c63ed148
|
refs/heads/master
| 2021-08-31T14:20:46.443586
| 2017-12-21T16:53:14
| 2017-12-21T16:53:14
| 114,996,638
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,064
|
py
|
from __future__ import print_function
import boto3
from decimal import Decimal
import json
import urllib
print('Loading function')
rekognition = boto3.client('rekognition')
# --------------- Helper Functions to call Rekognition APIs ------------------
def detect_faces(bucket, key):
response = rekognition.detect_faces(Image={"S3Object": {"Bucket": bucket, "Name": key}})
return response
def detect_labels(bucket, key):
response = rekognition.detect_labels(Image={"S3Object": {"Bucket": bucket, "Name": key}})
# Sample code to write response to DynamoDB table 'MyTable' with 'PK' as Primary Key.
# Note: role used for executing this Lambda function should have write access to the table.
#table = boto3.resource('dynamodb').Table('MyTable')
#labels = [{'Confidence': Decimal(str(label_prediction['Confidence'])), 'Name': label_prediction['Name']} for label_prediction in response['Labels']]
#table.put_item(Item={'PK': key, 'Labels': labels})
return response
def index_faces(bucket, key):
# Note: Collection has to be created upfront. Use CreateCollection API to create a collecion.
#rekognition.create_collection(CollectionId='BLUEPRINT_COLLECTION')
response = rekognition.index_faces(Image={"S3Object": {"Bucket": bucket, "Name": key}}, CollectionId="BLUEPRINT_COLLECTION")
return response
# --------------- Main handler ------------------
def lambda_handler(event, context):
'''Demonstrates S3 trigger that uses
Rekognition APIs to detect faces, labels and index faces in S3 Object.
'''
#print("Received event: " + json.dumps(event, indent=2))
# Get the object from the event
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key'].encode('utf8'))
try:
# Calls rekognition DetectFaces API to detect faces in S3 object
response = detect_faces(bucket, key)
# Calls rekognition DetectLabels API to detect labels in S3 object
#response = detect_labels(bucket, key)
|
[
"ferry.van.maurik@nike.com"
] |
ferry.van.maurik@nike.com
|
8e9f34d3aeef20f60dcd9993fa14cb2d81573f58
|
800f0b03a69f965cc910ebcbb7ddfacf7be70182
|
/dbase.py
|
db6e1a8b66712fee9184d1f43e03796a4e04e0f2
|
[] |
no_license
|
hellorobo/EBookOfTheDayAPI
|
db33bca8face40bb381f492d485413bc1703c653
|
a43c1b1d8428718a2d6d5ec948455923ed9d8bd2
|
refs/heads/master
| 2020-03-08T06:22:02.657980
| 2018-06-20T21:39:25
| 2018-06-20T21:39:25
| 127,970,217
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 601
|
py
|
import pymongo
class Database():
def __init__(self, connection, database, collection):
self.dbcol = database+"/"+collection
self.connection = pymongo.MongoClient(connection)
self.db = self.connection[database]
self.col = self.db[collection]
print("opened new connection to "+self.dbcol)
def find_record(self, json):
return self.col.find_one(json)
def insert_record(self, json):
return self.col.insert_one(json)
def __del__(self):
print("closing "+self.dbcol+ " connection")
return self.connection.close()
|
[
"trebor.oknewokaj@gmail.com"
] |
trebor.oknewokaj@gmail.com
|
275dd07d0b26c066736a0b3493b3cbdb2251b1be
|
5a5de156afd9e42cafdf17905a023a37b43b9191
|
/hw session 4/ex1.py
|
9e98e5f38c55552ee4e924470231cc50c6de7ff5
|
[] |
no_license
|
damvip12/duonganhminh-fundamentals-c4e23
|
f240d81e3bbf86f888fcf367d2431c87b398e203
|
157b926b993a8a34c39b7aa81e56328befe59f40
|
refs/heads/master
| 2020-04-02T18:26:47.741271
| 2018-11-26T10:24:05
| 2018-11-26T10:24:05
| 154,700,421
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 865
|
py
|
flock = [5 , 7 , 300 , 90 , 24 , 50 , 75]
print("Hello , my name is Hiep and these are my sheep sizes :" )
print(*flock , sep =", ")
print("Now my biggest sheep has size :", max(flock) , "lets shear it")
s = 8
i = flock.index(max(flock))
flock[i] = s
print("After shearing , here is my flock :" )
print(*flock , sep =", ")
print("MONTH 1:")
increased_flock = [x+50 for x in flock]
print("One month has passed , here is my flock :")
print(*increased_flock, sep =", ")
print("Now my biggest sheep has size :", max(increased_flock) , "lets shear it")
i = increased_flock.index(max(increased_flock))
increased_flock[i] = s
print("After shearing , here is my flock :" )
print(*increased_flock , sep =", ")
summary = sum(increased_flock)
print("My flock has size in total: ", summary)
print("I would get ",summary ," *2$ = ", summary*2 ,"$")
|
[
"noreply@github.com"
] |
damvip12.noreply@github.com
|
fb8adb5e534ce40c5a72575a471c00cca53470c5
|
6ad044e7538c8b87f3992949ad0b0294863fca26
|
/boardapp/views.py
|
e35f6db56cf5d163396ea2dbeae05791900386e7
|
[] |
no_license
|
kawaguchi1102/boardproject
|
195db4b14b1baefd80c4df17111b83f0d5ae1871
|
879066806e7fc62576ba2fedae206606ecd187c9
|
refs/heads/master
| 2020-06-24T04:29:29.670770
| 2019-07-28T15:28:54
| 2019-07-28T15:28:54
| 198,849,580
| 0
| 0
| null | 2019-07-28T15:28:55
| 2019-07-25T14:44:16
|
Python
|
UTF-8
|
Python
| false
| false
| 1,208
|
py
|
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
def signupfunc(request):
if request.method == 'POST':
username_post = request.POST['username']
password_post = request.POST['password']
try:
User.objects.get(username=username_post)
return render(request, 'signup.html', {'error': 'このユーザーは登録されています'})
except:
user = User.objects.create_user(username_post, 'mail@example.com', password_post)
return render(request, 'signup.html', {'some': 100})
return render(request, 'signup.html', {'some': 100})
def loginfunc(request):
if request.method == 'POST':
username_post = request.POST['username']
password_post = request.POST['password']
user = authenticate(request, username=username_post, password=password_post)
if user is not None:
login(request, user)
return redirect('signup')
else:
return redirect('login')
return render(request, 'login.html')
def listfunc(request):
return render(request, 'list.html')
|
[
"info@kawaguchi-design.com"
] |
info@kawaguchi-design.com
|
bf1a5d4fbc64933eaee40dc208a104c8b60186a8
|
af6beab37b0b5f2de788e56c1738ecbb11d0f815
|
/build/costmap_2d/cmake/costmap_2d-genmsg-context.py
|
e3c5ad180e2880c5b08917ca486b22b0ba12f41b
|
[] |
no_license
|
ThanhTVBK61/ROS_Navigation
|
37bfaf8fd5965c8f0c8612162ffd489d92cace81
|
66e9764058ac405caf370fa53558958f9d5ae445
|
refs/heads/master
| 2020-12-15T02:46:03.690907
| 2020-01-21T19:38:46
| 2020-01-21T19:38:46
| 234,965,124
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 894
|
py
|
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/tranthanh/cafebot_ws/src/costmap_2d/msg/VoxelGrid.msg"
services_str = ""
pkg_name = "costmap_2d"
dependencies_str = "std_msgs;geometry_msgs;map_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "costmap_2d;/home/tranthanh/cafebot_ws/src/costmap_2d/msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg;map_msgs;/opt/ros/kinetic/share/map_msgs/cmake/../msg;sensor_msgs;/opt/ros/kinetic/share/sensor_msgs/cmake/../msg;nav_msgs;/opt/ros/kinetic/share/nav_msgs/cmake/../msg;actionlib_msgs;/opt/ros/kinetic/share/actionlib_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
|
[
"thanh.tvbk61@gmail.com"
] |
thanh.tvbk61@gmail.com
|
af367f7a5eab4747919a59bbd0e0b2d6adf354b6
|
124a2f096adb7f9b0a67311ee55a1191c1b1c7eb
|
/trader/migrations/0037_auto_20210817_1535.py
|
a2daa564019c3bd76c81044505fa65a2ada1b383
|
[] |
no_license
|
webclinic017/bot-3
|
9913cc785fd5245638a4c811fec8ae0c2dc90a07
|
7c856d237d91e9fa8db8960be2e4d57faa5227bd
|
refs/heads/main
| 2023-08-11T05:34:25.174756
| 2021-09-26T08:13:41
| 2021-09-26T08:13:41
| 426,765,932
| 1
| 0
| null | 2021-11-10T20:24:27
| 2021-11-10T20:24:27
| null |
UTF-8
|
Python
| false
| false
| 577
|
py
|
# Generated by Django 2.2.24 on 2021-08-17 12:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trader', '0036_auto_20210817_1509'),
]
operations = [
migrations.RemoveField(
model_name='variants',
name='amount_perc',
),
migrations.AddField(
model_name='variants',
name='deal_perc',
field=models.FloatField(blank=True, null=True, verbose_name='Процент от баланса на сделку'),
),
]
|
[
""
] | |
574923447ef569066205755da06eda7b200eb558
|
6b6e20004b46165595f35b5789e7426d5289ea48
|
/config_app/config_util/config/TransientDirectoryProvider.py
|
5ac6855924fc6b2dd302f1be9d0fe223bee02639
|
[
"Apache-2.0"
] |
permissive
|
anwarchk/quay
|
2a83d0ab65aff6a1120fbf3a45dd72f42211633b
|
23c5120790c619174e7d36784ca5aab7f4eece5c
|
refs/heads/master
| 2020-09-12T18:53:21.093606
| 2019-11-15T19:29:02
| 2019-11-15T19:29:02
| 222,517,145
| 0
| 0
|
Apache-2.0
| 2019-11-18T18:32:35
| 2019-11-18T18:32:35
| null |
UTF-8
|
Python
| false
| false
| 2,233
|
py
|
import os
from shutil import copytree
from backports.tempfile import TemporaryDirectory
from config_app.config_util.config.fileprovider import FileConfigProvider
OLD_CONFIG_SUBDIR = 'old/'
class TransientDirectoryProvider(FileConfigProvider):
""" Implementation of the config provider that reads and writes the data
from/to the file system, only using temporary directories,
deleting old dirs and creating new ones as requested.
"""
def __init__(self, config_volume, yaml_filename, py_filename):
# Create a temp directory that will be cleaned up when we change the config path
# This should ensure we have no "pollution" of different configs:
# no uploaded config should ever affect subsequent config modifications/creations
temp_dir = TemporaryDirectory()
self.temp_dir = temp_dir
self.old_config_dir = None
super(TransientDirectoryProvider, self).__init__(temp_dir.name, yaml_filename, py_filename)
@property
def provider_id(self):
return 'transient'
def new_config_dir(self):
"""
Update the path with a new temporary directory, deleting the old one in the process
"""
self.temp_dir.cleanup()
temp_dir = TemporaryDirectory()
self.config_volume = temp_dir.name
self.temp_dir = temp_dir
self.yaml_path = os.path.join(temp_dir.name, self.yaml_filename)
def create_copy_of_config_dir(self):
"""
Create a directory to store loaded/populated configuration (for rollback if necessary)
"""
if self.old_config_dir is not None:
self.old_config_dir.cleanup()
temp_dir = TemporaryDirectory()
self.old_config_dir = temp_dir
# Python 2.7's shutil.copy() doesn't allow for copying to existing directories,
# so when copying/reading to the old saved config, we have to talk to a subdirectory,
# and use the shutil.copytree() function
copytree(self.config_volume, os.path.join(temp_dir.name, OLD_CONFIG_SUBDIR))
def get_config_dir_path(self):
return self.config_volume
def get_old_config_dir(self):
if self.old_config_dir is None:
raise Exception('Cannot return a configuration that was no old configuration')
return os.path.join(self.old_config_dir.name, OLD_CONFIG_SUBDIR)
|
[
"jimmy.zelinskie+git@gmail.com"
] |
jimmy.zelinskie+git@gmail.com
|
2a59d24c2f7b50a7d34d28be79e4088ca29e9dce
|
3a2029810870989258ecd3394274f7ae978f2113
|
/resnet-in-tensorflow/old/cifar10_train.py
|
b1f30379198cdc566d5fb6df3a7a75a6c48aa1ce
|
[] |
no_license
|
JudyMRSD/Tensor_Flow_VLR
|
b11eb27ea1409e730a9297ac9302d23f6f4b4695
|
3f57e1437e2d38351feb316b10248b8f6a333145
|
refs/heads/master
| 2021-01-18T17:59:56.723685
| 2017-04-17T04:58:20
| 2017-04-17T04:58:20
| 86,833,350
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,360
|
py
|
# Coder: Wenxin Xu
# Github: https://github.com/wenxinxu/resnet_in_tensorflow
# ==============================================================================
from resnet import *
from datetime import datetime
import time
from cifar10_input import *
import pandas as pd
class Train(object):
'''
This Object is responsible for all the training and validation process
'''
def __init__(self):
# Set up all the placeholders
self.placeholders()
def placeholders(self):
'''
There are five placeholders in total.
image_placeholder and label_placeholder are for train images and labels
vali_image_placeholder and vali_label_placeholder are for validation imgaes and labels
lr_placeholder is for learning rate. Feed in learning rate each time of training
implements learning rate decay easily
'''
self.image_placeholder = tf.placeholder(dtype=tf.float32,
shape=[FLAGS.train_batch_size, IMG_HEIGHT,
IMG_WIDTH, IMG_DEPTH])
self.label_placeholder = tf.placeholder(dtype=tf.int32, shape=[FLAGS.train_batch_size])
self.vali_image_placeholder = tf.placeholder(dtype=tf.float32, shape=[FLAGS.validation_batch_size,
IMG_HEIGHT, IMG_WIDTH, IMG_DEPTH])
self.vali_label_placeholder = tf.placeholder(dtype=tf.int32, shape=[FLAGS.validation_batch_size])
self.lr_placeholder = tf.placeholder(dtype=tf.float32, shape=[])
def build_train_validation_graph(self):
'''
This function builds the train graph and validation graph at the same time.
'''
global_step = tf.Variable(0, trainable=False)
validation_step = tf.Variable(0, trainable=False)
# Logits of training data and valiation data come from the same graph. The inference of
# validation data share all the weights with train data. This is implemented by passing
# reuse=True to the variable scopes of train graph
logits = inference(self.image_placeholder, FLAGS.num_residual_blocks, reuse=False)
vali_logits = inference(self.vali_image_placeholder, FLAGS.num_residual_blocks, reuse=True)
# The following codes calculate the train loss, which is consist of the
# softmax cross entropy and the relularization loss
regu_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
loss = self.loss(logits, self.label_placeholder)
self.full_loss = tf.add_n([loss] + regu_losses)
predictions = tf.nn.softmax(logits)
self.train_top1_error = self.top_k_error(predictions, self.label_placeholder, 1)
# Validation loss
self.vali_loss = self.loss(vali_logits, self.vali_label_placeholder)
vali_predictions = tf.nn.softmax(vali_logits)
self.vali_top1_error = self.top_k_error(vali_predictions, self.vali_label_placeholder, 1)
self.train_op, self.train_ema_op = self.train_operation(global_step, self.full_loss,
self.train_top1_error)
self.val_op = self.validation_op(validation_step, self.vali_top1_error, self.vali_loss)
def train(self):
'''
This is the main function for training
'''
# For the first step, we are loading all training images and validation images into the
# memory
all_data, all_labels = prepare_train_data(padding_size=FLAGS.padding_size)
vali_data, vali_labels = read_validation_data()
# Build the graph for train and validation
self.build_train_validation_graph()
# Initialize a saver to save checkpoints. Merge all summaries, so we can run all
# summarizing operations by running summary_op. Initialize a new session
saver = tf.train.Saver(tf.all_variables())
summary_op = tf.merge_all_summaries()
init = tf.initialize_all_variables()
sess = tf.Session()
# If you want to load from a checkpoint
if FLAGS.is_use_ckpt is True:
saver.restore(sess, FLAGS.ckpt_path)
print 'Restored from checkpoint...'
else:
sess.run(init)
# This summary writer object helps write summaries on tensorboard
summary_writer = tf.train.SummaryWriter(train_dir, sess.graph)
# These lists are used to save a csv file at last
step_list = []
train_error_list = []
val_error_list = []
print 'Start training...'
print '----------------------------'
for step in xrange(FLAGS.train_steps):
train_batch_data, train_batch_labels = self.generate_augment_train_batch(all_data, all_labels,
FLAGS.train_batch_size)
validation_batch_data, validation_batch_labels = self.generate_vali_batch(vali_data,
vali_labels, FLAGS.validation_batch_size)
# Want to validate once before training. You may check the theoretical validation
# loss first
if step % FLAGS.report_freq == 0:
if FLAGS.is_full_validation is True:
validation_loss_value, validation_error_value = self.full_validation(loss=self.vali_loss,
top1_error=self.vali_top1_error, vali_data=vali_data,
vali_labels=vali_labels, session=sess,
batch_data=train_batch_data, batch_label=train_batch_labels)
vali_summ = tf.Summary()
vali_summ.value.add(tag='full_validation_error',
simple_value=validation_error_value.astype(np.float))
summary_writer.add_summary(vali_summ, step)
summary_writer.flush()
else:
_, validation_error_value, validation_loss_value = sess.run([self.val_op,
self.vali_top1_error,
self.vali_loss],
{self.image_placeholder: train_batch_data,
self.label_placeholder: train_batch_labels,
self.vali_image_placeholder: validation_batch_data,
self.vali_label_placeholder: validation_batch_labels,
self.lr_placeholder: FLAGS.init_lr})
val_error_list.append(validation_error_value)
start_time = time.time()
_, _, train_loss_value, train_error_value = sess.run([self.train_op, self.train_ema_op,
self.full_loss, self.train_top1_error],
{self.image_placeholder: train_batch_data,
self.label_placeholder: train_batch_labels,
self.vali_image_placeholder: validation_batch_data,
self.vali_label_placeholder: validation_batch_labels,
self.lr_placeholder: FLAGS.init_lr})
duration = time.time() - start_time
if step % FLAGS.report_freq == 0:
summary_str = sess.run(summary_op, {self.image_placeholder: train_batch_data,
self.label_placeholder: train_batch_labels,
self.vali_image_placeholder: validation_batch_data,
self.vali_label_placeholder: validation_batch_labels,
self.lr_placeholder: FLAGS.init_lr})
summary_writer.add_summary(summary_str, step)
num_examples_per_step = FLAGS.train_batch_size
examples_per_sec = num_examples_per_step / duration
sec_per_batch = float(duration)
format_str = ('%s: step %d, loss = %.4f (%.1f examples/sec; %.3f ' 'sec/batch)')
print format_str % (datetime.now(), step, train_loss_value, examples_per_sec,
sec_per_batch)
print 'Train top1 error = ', train_error_value
print 'Validation top1 error = %.4f' % validation_error_value
print 'Validation loss = ', validation_loss_value
print '----------------------------'
step_list.append(step)
train_error_list.append(train_error_value)
if step == FLAGS.decay_step0 or step == FLAGS.decay_step1:
FLAGS.init_lr = 0.1 * FLAGS.init_lr
print 'Learning rate decayed to ', FLAGS.init_lr
# Save checkpoints every 10000 steps
if step % 10000 == 0 or (step + 1) == FLAGS.train_steps:
checkpoint_path = os.path.join(train_dir, 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
df = pd.DataFrame(data={'step':step_list, 'train_error':train_error_list,
'validation_error': val_error_list})
df.to_csv(train_dir + FLAGS.version + '_error.csv')
def test(self, test_image_array):
'''
This function is used to evaluate the test data. Please finish pre-precessing in advance
:param test_image_array: 4D numpy array with shape [num_test_images, img_height, img_width,
img_depth]
:return: the softmax probability with shape [num_test_images, num_labels]
'''
num_test_images = len(test_image_array)
num_batches = num_test_images // FLAGS.test_batch_size
remain_images = num_test_images % FLAGS.test_batch_size
print '%i test batches in total...' %num_batches
# Create the test image and labels placeholders
self.test_image_placeholder = tf.placeholder(dtype=tf.float32, shape=[FLAGS.test_batch_size,
IMG_HEIGHT, IMG_WIDTH, IMG_DEPTH])
# Build the test graph
logits = inference(self.test_image_placeholder, FLAGS.num_residual_blocks, reuse=False)
predictions = tf.nn.softmax(logits)
# Initialize a new session and restore a checkpoint
saver = tf.train.Saver(tf.all_variables())
sess = tf.Session()
saver.restore(sess, FLAGS.test_ckpt_path)
print 'Model restored from ', FLAGS.test_ckpt_path
prediction_array = np.array([]).reshape(-1, NUM_CLASS)
# Test by batches
for step in range(num_batches):
if step % 10 == 0:
print '%i batches finished!' %step
offset = step * FLAGS.test_batch_size
test_image_batch = test_image_array[offset:offset+FLAGS.test_batch_size, ...]
batch_prediction_array = sess.run(predictions,
feed_dict={self.test_image_placeholder: test_image_batch})
prediction_array = np.concatenate((prediction_array, batch_prediction_array))
# If test_batch_size is not a divisor of num_test_images
if remain_images != 0:
self.test_image_placeholder = tf.placeholder(dtype=tf.float32, shape=[remain_images,
IMG_HEIGHT, IMG_WIDTH, IMG_DEPTH])
# Build the test graph
logits = inference(self.test_image_placeholder, FLAGS.num_residual_blocks, reuse=True)
predictions = tf.nn.softmax(logits)
test_image_batch = test_image_array[-remain_images:, ...]
batch_prediction_array = sess.run(predictions, feed_dict={
self.test_image_placeholder: test_image_batch})
prediction_array = np.concatenate((prediction_array, batch_prediction_array))
return prediction_array
## Helper functions
def loss(self, logits, labels):
'''
Calculate the cross entropy loss given logits and true labels
:param logits: 2D tensor with shape [batch_size, num_labels]
:param labels: 1D tensor with shape [batch_size]
:return: loss tensor with shape [1]
'''
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
return cross_entropy_mean
def top_k_error(self, predictions, labels, k):
'''
Calculate the top-k error
:param predictions: 2D tensor with shape [batch_size, num_labels]
:param labels: 1D tensor with shape [batch_size, 1]
:param k: int
:return: tensor with shape [1]
'''
batch_size = predictions.get_shape().as_list()[0]
in_top1 = tf.to_float(tf.nn.in_top_k(predictions, labels, k=1))
num_correct = tf.reduce_sum(in_top1)
return (batch_size - num_correct) / float(batch_size)
def generate_vali_batch(self, vali_data, vali_label, vali_batch_size):
'''
If you want to use a random batch of validation data to validate instead of using the
whole validation data, this function helps you generate that batch
:param vali_data: 4D numpy array
:param vali_label: 1D numpy array
:param vali_batch_size: int
:return: 4D numpy array and 1D numpy array
'''
offset = np.random.choice(10000 - vali_batch_size, 1)[0]
vali_data_batch = vali_data[offset:offset+vali_batch_size, ...]
vali_label_batch = vali_label[offset:offset+vali_batch_size]
return vali_data_batch, vali_label_batch
def generate_augment_train_batch(self, train_data, train_labels, train_batch_size):
'''
This function helps generate a batch of train data, and random crop, horizontally flip
and whiten them at the same time
:param train_data: 4D numpy array
:param train_labels: 1D numpy array
:param train_batch_size: int
:return: augmented train batch data and labels. 4D numpy array and 1D numpy array
'''
offset = np.random.choice(EPOCH_SIZE - train_batch_size, 1)[0]
batch_data = train_data[offset:offset+train_batch_size, ...]
batch_data = random_crop_and_flip(batch_data, padding_size=FLAGS.padding_size)
batch_data = whitening_image(batch_data)
batch_label = train_labels[offset:offset+FLAGS.train_batch_size]
return batch_data, batch_label
def train_operation(self, global_step, total_loss, top1_error):
'''
Defines train operations
:param global_step: tensor variable with shape [1]
:param total_loss: tensor with shape [1]
:param top1_error: tensor with shape [1]
:return: two operations. Running train_op will do optimization once. Running train_ema_op
will generate the moving average of train error and train loss for tensorboard
'''
# Add train_loss, current learning rate and train error into the tensorboard summary ops
tf.scalar_summary('learning_rate', self.lr_placeholder)
tf.scalar_summary('train_loss', total_loss)
tf.scalar_summary('train_top1_error', top1_error)
# The ema object help calculate the moving average of train loss and train error
ema = tf.train.ExponentialMovingAverage(FLAGS.train_ema_decay, global_step)
train_ema_op = ema.apply([total_loss, top1_error])
tf.scalar_summary('train_top1_error_avg', ema.average(top1_error))
tf.scalar_summary('train_loss_avg', ema.average(total_loss))
opt = tf.train.MomentumOptimizer(learning_rate=self.lr_placeholder, momentum=0.9)
train_op = opt.minimize(total_loss, global_step=global_step)
return train_op, train_ema_op
def validation_op(self, validation_step, top1_error, loss):
'''
Defines validation operations
:param validation_step: tensor with shape [1]
:param top1_error: tensor with shape [1]
:param loss: tensor with shape [1]
:return: validation operation
'''
# This ema object help calculate the moving average of validation loss and error
# ema with decay = 0.0 won't average things at all. This returns the original error
ema = tf.train.ExponentialMovingAverage(0.0, validation_step)
ema2 = tf.train.ExponentialMovingAverage(0.95, validation_step)
val_op = tf.group(validation_step.assign_add(1), ema.apply([top1_error, loss]),
ema2.apply([top1_error, loss]))
top1_error_val = ema.average(top1_error)
top1_error_avg = ema2.average(top1_error)
loss_val = ema.average(loss)
loss_val_avg = ema2.average(loss)
# Summarize these values on tensorboard
tf.scalar_summary('val_top1_error', top1_error_val)
tf.scalar_summary('val_top1_error_avg', top1_error_avg)
tf.scalar_summary('val_loss', loss_val)
tf.scalar_summary('val_loss_avg', loss_val_avg)
return val_op
def full_validation(self, loss, top1_error, session, vali_data, vali_labels, batch_data,
batch_label):
'''
Runs validation on all the 10000 valdiation images
:param loss: tensor with shape [1]
:param top1_error: tensor with shape [1]
:param session: the current tensorflow session
:param vali_data: 4D numpy array
:param vali_labels: 1D numpy array
:param batch_data: 4D numpy array. training batch to feed dict and fetch the weights
:param batch_label: 1D numpy array. training labels to feed the dict
:return: float, float
'''
num_batches = 10000 // FLAGS.validation_batch_size
order = np.random.choice(10000, num_batches * FLAGS.validation_batch_size)
vali_data_subset = vali_data[order, ...]
vali_labels_subset = vali_labels[order]
loss_list = []
error_list = []
for step in range(num_batches):
offset = step * FLAGS.validation_batch_size
feed_dict = {self.image_placeholder: batch_data, self.label_placeholder: batch_label,
self.vali_image_placeholder: vali_data_subset[offset:offset+FLAGS.validation_batch_size, ...],
self.vali_label_placeholder: vali_labels_subset[offset:offset+FLAGS.validation_batch_size],
self.lr_placeholder: FLAGS.init_lr}
loss_value, top1_error_value = session.run([loss, top1_error], feed_dict=feed_dict)
loss_list.append(loss_value)
error_list.append(top1_error_value)
return np.mean(loss_list), np.mean(error_list)
maybe_download_and_extract()
# Initialize the Train object
train = Train()
# Start the training session
train.train()
|
[
"jinzhu@Jins-MBP.wv.cc.cmu.edu"
] |
jinzhu@Jins-MBP.wv.cc.cmu.edu
|
882133f97a36e02d9b2be5233fcf40003635be7b
|
ffef2fe37864ca154e0e31b9092df89d50d77f3b
|
/data/LR_dataset.py
|
8db08de82c2d17ed7ad49e9d61d02e15ea3e2a26
|
[
"MIT"
] |
permissive
|
penguin1214/General_Framework_for_SR_Tasks
|
66205b6c041c294c4c74191bb1c62ddc84d071fc
|
4f18bc37d6cb7f1355770a1c321f20a58402e993
|
refs/heads/master
| 2020-03-21T03:08:45.343603
| 2018-08-15T20:41:34
| 2018-08-15T20:41:34
| 138,038,245
| 17
| 3
| null | 2018-07-04T09:22:11
| 2018-06-20T13:42:00
|
Python
|
UTF-8
|
Python
| false
| false
| 1,312
|
py
|
import os.path
import cv2
import numpy as np
import torch
import torch.utils.data as data
import data.common as util
class LRDataset(data.Dataset):
'''
Read LR images only in test phase.
'''
def name(self):
return 'LRDataset'
def __init__(self, opt):
super(LRDataset, self).__init__()
self.opt = opt
self.paths_LR = None
self.LR_env = None # environment for lmdb
# read image list from lmdb or image files
self.LR_env, self.paths_LR = util.get_image_paths(opt['data_type'], opt['dataroot_LR'])
assert self.paths_LR, 'Error: LR paths are empty.'
def __getitem__(self, index):
LR_path = None
# get LR image
LR_path = self.paths_LR[index]
img_LR = util.read_img(self.LR_env, LR_path)
H, W, C = img_LR.shape
# channel conversion
if self.opt['color']:
img_LR = util.channel_convert(C, self.opt['color'], [img_LR])[0]
# HWC to CHW, BGR to RGB, numpy to tensor
if img_LR.shape[2] == 3:
img_LR = img_LR[:, :, [2, 1, 0]]
img_LR = torch.from_numpy(np.ascontiguousarray(np.transpose(img_LR, (2, 0, 1)))).float()
return {'LR': img_LR, 'LR_path': LR_path}
def __len__(self):
return len(self.paths_LR)
|
[
"jinglei_yang@outlook.com"
] |
jinglei_yang@outlook.com
|
c97a02f2a96a5d905eb8a17e5ef60aa967f81f26
|
ddf2e85b8e8fda8cbaf92fc79a53abdb962c8bde
|
/tests/violated/basic_routing_stripped/orig.py
|
d4a180eed5e2964cdb78944a50b2c83762b3ceb5
|
[
"Apache-2.0"
] |
permissive
|
p4gauntlet/toz3
|
359bd20bdc8fe2b7ccf3564a90988823d94df078
|
0fddd9e21ac7b80e4a0bf8a4e6b1bdcc01308724
|
refs/heads/master
| 2023-05-11T17:23:10.972917
| 2023-05-09T16:02:56
| 2023-05-09T16:02:56
| 329,900,719
| 4
| 0
|
Apache-2.0
| 2023-02-22T23:28:49
| 2021-01-15T12:03:53
|
Python
|
UTF-8
|
Python
| false
| false
| 24,983
|
py
|
from p4z3 import *
def p4_program(prog_state):
prog_state.declare_global(
Enum( "error", ["NoError", "PacketTooShort", "NoMatch", "StackOutOfBounds", "HeaderTooShort", "ParserTimeout", "ParserInvalidArgument", ])
)
prog_state.declare_global(
P4Extern("packet_in", type_params=[], methods=[P4Declaration("extract", P4Method("extract", type_params=(None, [
"T",]), params=[
P4Parameter("out", "hdr", "T", None),])), P4Declaration("extract", P4Method("extract", type_params=(None, [
"T",]), params=[
P4Parameter("out", "variableSizeHeader", "T", None),
P4Parameter("in", "variableFieldSizeInBits", z3.BitVecSort(32), None),])), P4Declaration("lookahead", P4Method("lookahead", type_params=("T", [
"T",]), params=[])), P4Declaration("advance", P4Method("advance", type_params=(None, []), params=[
P4Parameter("in", "sizeInBits", z3.BitVecSort(32), None),])), P4Declaration("length", P4Method("length", type_params=(z3.BitVecSort(32), []), params=[])), ])
)
prog_state.declare_global(
P4Extern("packet_out", type_params=[], methods=[P4Declaration("emit", P4Method("emit", type_params=(None, [
"T",]), params=[
P4Parameter("in", "hdr", "T", None),])), ])
)
prog_state.declare_global(
P4Declaration("verify", P4Method("verify", type_params=(None, []), params=[
P4Parameter("in", "check", z3.BoolSort(), None),
P4Parameter("in", "toSignal", "error", None),]))
)
prog_state.declare_global(
P4Declaration("NoAction", P4Action("NoAction", params=[], body=BlockStatement([]
) ))
)
prog_state.declare_global(
P4Declaration("match_kind", ["exact", "ternary", "lpm", ])
)
prog_state.declare_global(
P4Declaration("match_kind", ["range", "optional", "selector", ])
)
prog_state.declare_global(
ValueDeclaration("__v1model_version", 20180101, z3_type=z3.BitVecSort(32))
)
prog_state.declare_global(
StructType("standard_metadata_t", prog_state, fields=[("ingress_port", z3.BitVecSort(9)), ("egress_spec", z3.BitVecSort(9)), ("egress_port", z3.BitVecSort(9)), ("instance_type", z3.BitVecSort(32)), ("packet_length", z3.BitVecSort(32)), ("enq_timestamp", z3.BitVecSort(32)), ("enq_qdepth", z3.BitVecSort(19)), ("deq_timedelta", z3.BitVecSort(32)), ("deq_qdepth", z3.BitVecSort(19)), ("ingress_global_timestamp", z3.BitVecSort(48)), ("egress_global_timestamp", z3.BitVecSort(48)), ("mcast_grp", z3.BitVecSort(16)), ("egress_rid", z3.BitVecSort(16)), ("checksum_error", z3.BitVecSort(1)), ("parser_error", "error"), ("priority", z3.BitVecSort(3)), ], type_params=[])
)
prog_state.declare_global(
Enum( "CounterType", ["packets", "bytes", "packets_and_bytes", ])
)
prog_state.declare_global(
Enum( "MeterType", ["packets", "bytes", ])
)
prog_state.declare_global(
P4Extern("counter", type_params=[], methods=[P4Declaration("counter", P4Method("counter", type_params=(None, []), params=[
P4Parameter("none", "size", z3.BitVecSort(32), None),
P4Parameter("none", "type", "CounterType", None),])), P4Declaration("count", P4Method("count", type_params=(None, []), params=[
P4Parameter("in", "index", z3.BitVecSort(32), None),])), ])
)
prog_state.declare_global(
P4Extern("direct_counter", type_params=[], methods=[P4Declaration("direct_counter", P4Method("direct_counter", type_params=(None, []), params=[
P4Parameter("none", "type", "CounterType", None),])), P4Declaration("count", P4Method("count", type_params=(None, []), params=[])), ])
)
prog_state.declare_global(
P4Extern("meter", type_params=[], methods=[P4Declaration("meter", P4Method("meter", type_params=(None, []), params=[
P4Parameter("none", "size", z3.BitVecSort(32), None),
P4Parameter("none", "type", "MeterType", None),])), P4Declaration("execute_meter", P4Method("execute_meter", type_params=(None, [
"T",]), params=[
P4Parameter("in", "index", z3.BitVecSort(32), None),
P4Parameter("out", "result", "T", None),])), ])
)
prog_state.declare_global(
P4Extern("direct_meter", type_params=[
"T",], methods=[P4Declaration("direct_meter", P4Method("direct_meter", type_params=(None, []), params=[
P4Parameter("none", "type", "MeterType", None),])), P4Declaration("read", P4Method("read", type_params=(None, []), params=[
P4Parameter("out", "result", "T", None),])), ])
)
prog_state.declare_global(
P4Extern("register", type_params=[
"T",], methods=[P4Declaration("register", P4Method("register", type_params=(None, []), params=[
P4Parameter("none", "size", z3.BitVecSort(32), None),])), P4Declaration("read", P4Method("read", type_params=(None, []), params=[
P4Parameter("out", "result", "T", None),
P4Parameter("in", "index", z3.BitVecSort(32), None),])), P4Declaration("write", P4Method("write", type_params=(None, []), params=[
P4Parameter("in", "index", z3.BitVecSort(32), None),
P4Parameter("in", "value", "T", None),])), ])
)
prog_state.declare_global(
P4Extern("action_profile", type_params=[], methods=[P4Declaration("action_profile", P4Method("action_profile", type_params=(None, []), params=[
P4Parameter("none", "size", z3.BitVecSort(32), None),])), ])
)
prog_state.declare_global(
P4Declaration("random", P4Method("random", type_params=(None, [
"T",]), params=[
P4Parameter("out", "result", "T", None),
P4Parameter("in", "lo", "T", None),
P4Parameter("in", "hi", "T", None),]))
)
prog_state.declare_global(
P4Declaration("digest", P4Method("digest", type_params=(None, [
"T",]), params=[
P4Parameter("in", "receiver", z3.BitVecSort(32), None),
P4Parameter("in", "data", "T", None),]))
)
prog_state.declare_global(
Enum( "HashAlgorithm", ["crc32", "crc32_custom", "crc16", "crc16_custom", "random", "identity", "csum16", "xor16", ])
)
prog_state.declare_global(
P4Declaration("mark_to_drop", P4Method("mark_to_drop", type_params=(None, []), params=[]))
)
prog_state.declare_global(
P4Declaration("mark_to_drop", P4Method("mark_to_drop", type_params=(None, []), params=[
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),]))
)
prog_state.declare_global(
P4Declaration("hash", P4Method("hash", type_params=(None, [
"O",
"T",
"D",
"M",]), params=[
P4Parameter("out", "result", "O", None),
P4Parameter("in", "algo", "HashAlgorithm", None),
P4Parameter("in", "base", "T", None),
P4Parameter("in", "data", "D", None),
P4Parameter("in", "max", "M", None),]))
)
prog_state.declare_global(
P4Extern("action_selector", type_params=[], methods=[P4Declaration("action_selector", P4Method("action_selector", type_params=(None, []), params=[
P4Parameter("none", "algorithm", "HashAlgorithm", None),
P4Parameter("none", "size", z3.BitVecSort(32), None),
P4Parameter("none", "outputWidth", z3.BitVecSort(32), None),])), ])
)
prog_state.declare_global(
Enum( "CloneType", ["I2E", "E2E", ])
)
prog_state.declare_global(
P4Extern("Checksum16", type_params=[], methods=[P4Declaration("Checksum16", P4Method("Checksum16", type_params=(None, []), params=[])), P4Declaration("get", P4Method("get", type_params=(z3.BitVecSort(16), [
"D",]), params=[
P4Parameter("in", "data", "D", None),])), ])
)
prog_state.declare_global(
P4Declaration("verify_checksum", P4Method("verify_checksum", type_params=(None, [
"T",
"O",]), params=[
P4Parameter("in", "condition", z3.BoolSort(), None),
P4Parameter("in", "data", "T", None),
P4Parameter("in", "checksum", "O", None),
P4Parameter("none", "algo", "HashAlgorithm", None),]))
)
prog_state.declare_global(
P4Declaration("update_checksum", P4Method("update_checksum", type_params=(None, [
"T",
"O",]), params=[
P4Parameter("in", "condition", z3.BoolSort(), None),
P4Parameter("in", "data", "T", None),
P4Parameter("inout", "checksum", "O", None),
P4Parameter("none", "algo", "HashAlgorithm", None),]))
)
prog_state.declare_global(
P4Declaration("verify_checksum_with_payload", P4Method("verify_checksum_with_payload", type_params=(None, [
"T",
"O",]), params=[
P4Parameter("in", "condition", z3.BoolSort(), None),
P4Parameter("in", "data", "T", None),
P4Parameter("in", "checksum", "O", None),
P4Parameter("none", "algo", "HashAlgorithm", None),]))
)
prog_state.declare_global(
P4Declaration("update_checksum_with_payload", P4Method("update_checksum_with_payload", type_params=(None, [
"T",
"O",]), params=[
P4Parameter("in", "condition", z3.BoolSort(), None),
P4Parameter("in", "data", "T", None),
P4Parameter("inout", "checksum", "O", None),
P4Parameter("none", "algo", "HashAlgorithm", None),]))
)
prog_state.declare_global(
P4Declaration("resubmit", P4Method("resubmit", type_params=(None, [
"T",]), params=[
P4Parameter("in", "data", "T", None),]))
)
prog_state.declare_global(
P4Declaration("recirculate", P4Method("recirculate", type_params=(None, [
"T",]), params=[
P4Parameter("in", "data", "T", None),]))
)
prog_state.declare_global(
P4Declaration("clone", P4Method("clone", type_params=(None, []), params=[
P4Parameter("in", "type", "CloneType", None),
P4Parameter("in", "session", z3.BitVecSort(32), None),]))
)
prog_state.declare_global(
P4Declaration("clone3", P4Method("clone3", type_params=(None, [
"T",]), params=[
P4Parameter("in", "type", "CloneType", None),
P4Parameter("in", "session", z3.BitVecSort(32), None),
P4Parameter("in", "data", "T", None),]))
)
prog_state.declare_global(
P4Declaration("truncate", P4Method("truncate", type_params=(None, []), params=[
P4Parameter("in", "length", z3.BitVecSort(32), None),]))
)
prog_state.declare_global(
P4Declaration("assert", P4Method("assert", type_params=(None, []), params=[
P4Parameter("in", "check", z3.BoolSort(), None),]))
)
prog_state.declare_global(
P4Declaration("assume", P4Method("assume", type_params=(None, []), params=[
P4Parameter("in", "check", z3.BoolSort(), None),]))
)
prog_state.declare_global(
P4Declaration("log_msg", P4Method("log_msg", type_params=(None, []), params=[
P4Parameter("none", "msg", z3.StringSort(), None),]))
)
prog_state.declare_global(
P4Declaration("log_msg", P4Method("log_msg", type_params=(None, [
"T",]), params=[
P4Parameter("none", "msg", z3.StringSort(), None),
P4Parameter("in", "data", "T", None),]))
)
prog_state.declare_global(
ControlDeclaration(P4ParserType("Parser", params=[
P4Parameter("none", "b", "packet_in", None),
P4Parameter("out", "parsedHdr", "H", None),
P4Parameter("inout", "meta", "M", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),], type_params=[
"H",
"M",]))
)
prog_state.declare_global(
ControlDeclaration(P4ControlType("VerifyChecksum", params=[
P4Parameter("inout", "hdr", "H", None),
P4Parameter("inout", "meta", "M", None),], type_params=[
"H",
"M",]))
)
prog_state.declare_global(
ControlDeclaration(P4ControlType("Ingress", params=[
P4Parameter("inout", "hdr", "H", None),
P4Parameter("inout", "meta", "M", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),], type_params=[
"H",
"M",]))
)
prog_state.declare_global(
ControlDeclaration(P4ControlType("Egress", params=[
P4Parameter("inout", "hdr", "H", None),
P4Parameter("inout", "meta", "M", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),], type_params=[
"H",
"M",]))
)
prog_state.declare_global(
ControlDeclaration(P4ControlType("ComputeChecksum", params=[
P4Parameter("inout", "hdr", "H", None),
P4Parameter("inout", "meta", "M", None),], type_params=[
"H",
"M",]))
)
prog_state.declare_global(
ControlDeclaration(P4ControlType("Deparser", params=[
P4Parameter("none", "b", "packet_out", None),
P4Parameter("in", "hdr", "H", None),], type_params=[
"H",]))
)
prog_state.declare_global(
ControlDeclaration(P4Package("V1Switch", params=[
P4Parameter("none", "p", TypeSpecializer("Parser", "H", "M", ), None),
P4Parameter("none", "vr", TypeSpecializer("VerifyChecksum", "H", "M", ), None),
P4Parameter("none", "ig", TypeSpecializer("Ingress", "H", "M", ), None),
P4Parameter("none", "eg", TypeSpecializer("Egress", "H", "M", ), None),
P4Parameter("none", "ck", TypeSpecializer("ComputeChecksum", "H", "M", ), None),
P4Parameter("none", "dep", TypeSpecializer("Deparser", "H", ), None),],type_params=[
"H",
"M",]))
)
prog_state.declare_global(
StructType("ingress_metadata_t", prog_state, fields=[("vrf", z3.BitVecSort(12)), ("bd", z3.BitVecSort(16)), ("nexthop_index", z3.BitVecSort(16)), ], type_params=[])
)
prog_state.declare_global(
HeaderType("ethernet_t", prog_state, fields=[("dstAddr", z3.BitVecSort(48)), ("srcAddr", z3.BitVecSort(48)), ("etherType", z3.BitVecSort(16)), ], type_params=[])
)
prog_state.declare_global(
HeaderType("ipv4_t", prog_state, fields=[("version", z3.BitVecSort(4)), ("ihl", z3.BitVecSort(4)), ("diffserv", z3.BitVecSort(8)), ("totalLen", z3.BitVecSort(16)), ("identification", z3.BitVecSort(16)), ("flags", z3.BitVecSort(3)), ("fragOffset", z3.BitVecSort(13)), ("ttl", z3.BitVecSort(8)), ("protocol", z3.BitVecSort(8)), ("hdrChecksum", z3.BitVecSort(16)), ("srcAddr", z3.BitVecSort(32)), ("dstAddr", z3.BitVecSort(32)), ], type_params=[])
)
prog_state.declare_global(
StructType("metadata", prog_state, fields=[("ingress_metadata", "ingress_metadata_t"), ], type_params=[])
)
prog_state.declare_global(
StructType("headers", prog_state, fields=[("ethernet", "ethernet_t"), ("ipv4", "ipv4_t"), ], type_params=[])
)
prog_state.declare_global(
ControlDeclaration(P4Parser(
name="ParserImpl",
type_params=[],
params=[
P4Parameter("none", "packet", "packet_in", None),
P4Parameter("out", "hdr", "headers", None),
P4Parameter("inout", "meta", "metadata", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),],
const_params=[],
local_decls=[],
body=ParserTree([
ParserState(name="parse_ipv4", select="accept",
components=[
MethodCallStmt(MethodCallExpr(P4Member("packet", "extract"), ["ipv4_t", ], hdr=P4Member("hdr", "ipv4"), )), ]),
ParserState(name="start", select=ParserSelect([P4Member(P4Member("hdr", "ethernet"), "etherType"), ], [(z3.BitVecVal(2048, 16), "parse_ipv4"), (DefaultExpression(), "accept"), ]),
components=[
MethodCallStmt(MethodCallExpr(P4Member("packet", "extract"), ["ethernet_t", ], hdr=P4Member("hdr", "ethernet"), )), ]),
])
))
)
prog_state.declare_global(
ControlDeclaration(P4Control(
name="egress",
type_params=[],
params=[
P4Parameter("inout", "hdr", "headers", None),
P4Parameter("inout", "meta", "metadata", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),],
const_params=[],
body=BlockStatement([
MethodCallStmt(MethodCallExpr(P4Member("rewrite_mac_0", "apply"), [], )),]
),
local_decls=[
P4Declaration("NoAction_0", P4Action("NoAction", params=[], body=BlockStatement([]
) )),
P4Declaration("on_miss", P4Action("on_miss", params=[], body=BlockStatement([]
) )),
P4Declaration("rewrite_src_dst_mac", P4Action("rewrite_src_dst_mac", params=[
P4Parameter("none", "smac", z3.BitVecSort(48), None),
P4Parameter("none", "dmac", z3.BitVecSort(48), None),], body=BlockStatement([
AssignmentStatement(P4Member(P4Member("hdr", "ethernet"), "srcAddr"), "smac"),
AssignmentStatement(P4Member(P4Member("hdr", "ethernet"), "dstAddr"), "dmac"),]
) )),
P4Declaration("rewrite_mac_0", P4Table("rewrite_mac", actions=[MethodCallExpr("on_miss", [], ), MethodCallExpr("rewrite_src_dst_mac", [], ), ], key=[(P4Member(P4Member("meta", "ingress_metadata"), "nexthop_index"), "exact"), ], size=32768, default_action=MethodCallExpr("NoAction_0", [], ), immutable=False)), ]
))
)
prog_state.declare_global(
ControlDeclaration(P4Control(
name="ingress",
type_params=[],
params=[
P4Parameter("inout", "hdr", "headers", None),
P4Parameter("inout", "meta", "metadata", None),
P4Parameter("inout", "standard_metadata", "standard_metadata_t", None),],
const_params=[],
body=BlockStatement([
SwitchStatement(P4Member(MethodCallExpr(P4Member("ipv4_fib_0", "apply"), [], ), "action_run"),cases=[("on_miss_2", BlockStatement([
MethodCallStmt(MethodCallExpr(P4Member("ipv4_fib_lpm_0", "apply"), [], )),]
)), ]),]
),
local_decls=[
P4Declaration("NoAction_8", P4Action("NoAction", params=[], body=BlockStatement([]
) )),
P4Declaration("NoAction_9", P4Action("NoAction", params=[], body=BlockStatement([]
) )),
P4Declaration("on_miss_2", P4Action("on_miss", params=[], body=BlockStatement([]
) )),
P4Declaration("on_miss_5", P4Action("on_miss", params=[], body=BlockStatement([]
) )),
P4Declaration("fib_hit_nexthop", P4Action("fib_hit_nexthop", params=[
P4Parameter("none", "nexthop_index", z3.BitVecSort(16), None),], body=BlockStatement([
AssignmentStatement(P4Member(P4Member("meta", "ingress_metadata"), "nexthop_index"), "nexthop_index"),
AssignmentStatement(P4Member(P4Member("hdr", "ipv4"), "ttl"), P4add(P4Member(P4Member("hdr", "ipv4"), "ttl"), z3.BitVecVal(255, 8))),]
) )),
P4Declaration("fib_hit_nexthop_2", P4Action("fib_hit_nexthop", params=[
P4Parameter("none", "nexthop_index", z3.BitVecSort(16), None),], body=BlockStatement([
AssignmentStatement(P4Member(P4Member("meta", "ingress_metadata"), "nexthop_index"), "nexthop_index"),
AssignmentStatement(P4Member(P4Member("hdr", "ipv4"), "ttl"), P4add(P4Member(P4Member("hdr", "ipv4"), "ttl"), z3.BitVecVal(255, 8))),]
) )),
P4Declaration("ipv4_fib_0", P4Table("ipv4_fib", actions=[MethodCallExpr("on_miss_2", [], ), MethodCallExpr("fib_hit_nexthop", [], ), ], key=[(P4Member(P4Member("meta", "ingress_metadata"), "vrf"), "exact"), (P4Member(P4Member("hdr", "ipv4"), "dstAddr"), "exact"), ], size=131072, default_action=MethodCallExpr("NoAction_8", [], ), immutable=False)),
P4Declaration("ipv4_fib_lpm_0", P4Table("ipv4_fib_lpm", actions=[MethodCallExpr("on_miss_5", [], ), MethodCallExpr("fib_hit_nexthop_2", [], ), ], key=[(P4Member(P4Member("meta", "ingress_metadata"), "vrf"), "exact"), (P4Member(P4Member("hdr", "ipv4"), "dstAddr"), "lpm"), ], size=16384, default_action=MethodCallExpr("NoAction_9", [], ), immutable=False)), ]
))
)
prog_state.declare_global(
ControlDeclaration(P4Control(
name="DeparserImpl",
type_params=[],
params=[
P4Parameter("none", "packet", "packet_out", None),
P4Parameter("in", "hdr", "headers", None),],
const_params=[],
body=BlockStatement([
MethodCallStmt(MethodCallExpr(P4Member("packet", "emit"), ["ethernet_t", ], hdr=P4Member("hdr", "ethernet"), )),
MethodCallStmt(MethodCallExpr(P4Member("packet", "emit"), ["ipv4_t", ], hdr=P4Member("hdr", "ipv4"), )),]
),
local_decls=[]
))
)
prog_state.declare_global(
ControlDeclaration(P4Control(
name="verifyChecksum",
type_params=[],
params=[
P4Parameter("inout", "hdr", "headers", None),
P4Parameter("inout", "meta", "metadata", None),],
const_params=[],
body=BlockStatement([
MethodCallStmt(MethodCallExpr("verify_checksum", [ListType("tuple", prog_state, [z3.BitVecSort(4), z3.BitVecSort(4), z3.BitVecSort(8), z3.BitVecSort(16), z3.BitVecSort(16), z3.BitVecSort(3), z3.BitVecSort(13), z3.BitVecSort(8), z3.BitVecSort(8), z3.BitVecSort(32), z3.BitVecSort(32), ]), z3.BitVecSort(16), ], data=[P4Member(P4Member("hdr", "ipv4"), "version"), P4Member(P4Member("hdr", "ipv4"), "ihl"), P4Member(P4Member("hdr", "ipv4"), "diffserv"), P4Member(P4Member("hdr", "ipv4"), "totalLen"), P4Member(P4Member("hdr", "ipv4"), "identification"), P4Member(P4Member("hdr", "ipv4"), "flags"), P4Member(P4Member("hdr", "ipv4"), "fragOffset"), P4Member(P4Member("hdr", "ipv4"), "ttl"), P4Member(P4Member("hdr", "ipv4"), "protocol"), P4Member(P4Member("hdr", "ipv4"), "srcAddr"), P4Member(P4Member("hdr", "ipv4"), "dstAddr"), ], checksum=P4Member(P4Member("hdr", "ipv4"), "hdrChecksum"), condition=z3.BoolVal(True), algo=P4Member("HashAlgorithm", "csum16"), )),]
),
local_decls=[]
))
)
prog_state.declare_global(
ControlDeclaration(P4Control(
name="computeChecksum",
type_params=[],
params=[
P4Parameter("inout", "hdr", "headers", None),
P4Parameter("inout", "meta", "metadata", None),],
const_params=[],
body=BlockStatement([
MethodCallStmt(MethodCallExpr("update_checksum", [ListType("tuple", prog_state, [z3.BitVecSort(4), z3.BitVecSort(4), z3.BitVecSort(8), z3.BitVecSort(16), z3.BitVecSort(16), z3.BitVecSort(3), z3.BitVecSort(13), z3.BitVecSort(8), z3.BitVecSort(8), z3.BitVecSort(32), z3.BitVecSort(32), ]), z3.BitVecSort(16), ], condition=z3.BoolVal(True), data=[P4Member(P4Member("hdr", "ipv4"), "version"), P4Member(P4Member("hdr", "ipv4"), "ihl"), P4Member(P4Member("hdr", "ipv4"), "diffserv"), P4Member(P4Member("hdr", "ipv4"), "totalLen"), P4Member(P4Member("hdr", "ipv4"), "identification"), P4Member(P4Member("hdr", "ipv4"), "flags"), P4Member(P4Member("hdr", "ipv4"), "fragOffset"), P4Member(P4Member("hdr", "ipv4"), "ttl"), P4Member(P4Member("hdr", "ipv4"), "protocol"), P4Member(P4Member("hdr", "ipv4"), "srcAddr"), P4Member(P4Member("hdr", "ipv4"), "dstAddr"), ], algo=P4Member("HashAlgorithm", "csum16"), checksum=P4Member(P4Member("hdr", "ipv4"), "hdrChecksum"), )),]
),
local_decls=[]
))
)
prog_state.declare_global(
InstanceDeclaration("main", TypeSpecializer("V1Switch", "headers", "metadata", ), p=ConstCallExpr("ParserImpl", ), ig=ConstCallExpr("ingress", ), vr=ConstCallExpr("verifyChecksum", ), eg=ConstCallExpr("egress", ), ck=ConstCallExpr("computeChecksum", ), dep=ConstCallExpr("DeparserImpl", ), )
)
var = prog_state.get_main_function()
return var if isinstance(var, P4Package) else None
|
[
"noreply@github.com"
] |
p4gauntlet.noreply@github.com
|
57a53c43996ff1173b87517ec8a8c6faeff2f551
|
da8f4c19d2b7c996623da126adef1153293e1881
|
/code/stock_scrapper/gunicorn-config.py
|
8b1beda80cec557f46aa91be7a96ad63fb689abc
|
[
"MIT"
] |
permissive
|
erickfis/stock_scrapper
|
d7637dd195ba7cab8133121f444c7b76766b93b4
|
7480d7277c3f5530b51df42cb18b1c281521a9e1
|
refs/heads/master
| 2023-06-26T23:59:39.116985
| 2021-07-30T14:09:04
| 2021-07-30T14:09:04
| 388,815,754
| 2
| 1
| null | 2021-07-30T02:41:38
| 2021-07-23T13:45:56
|
Python
|
UTF-8
|
Python
| false
| false
| 150
|
py
|
"""Configuration for gunicorn."""
import os
timeout = 20
bind = f":{os.environ.get('PORT', '8000')}"
worker_class = "uvicorn.workers.UvicornWorker"
|
[
"erickfis@gmail.com"
] |
erickfis@gmail.com
|
2f9d4b19f169370ca5e35a8505672eb1b7b6d700
|
e81d274d6a1bcabbe7771612edd43b42c0d48197
|
/Django/day60(自定义管理器)/demo/02_webapp/booktest/models.py
|
a85c8a71d12b0947243429f766482df7fc35b17c
|
[
"MIT"
] |
permissive
|
ChWeiking/PythonTutorial
|
1259dc04c843382f2323d69f6678b9431d0b56fd
|
1aa4b81cf26fba2fa2570dd8e1228fef4fd6ee61
|
refs/heads/master
| 2020-05-15T00:50:10.583105
| 2016-07-30T16:03:45
| 2016-07-30T16:03:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,767
|
py
|
from django.db import models
from django.db.models.manager import Manager
from datetime import *
"""
自定义管理器
"""
class BookInfo_Manager(Manager):
def get_queryset(self):
return super().get_queryset().filter(isdelete=0)
def create(self,btitle='xx',bpubdate=date.today(),bread=0,bcomment=0,isdelete=0):
b = BookInfo()
b.btitle = btitle
b.bpubdate = bpubdate
b.bread = bread
b.bcomment = bcomment
b.isdelete = isdelete
return b
class BookInfo(models.Model):
btitle = models.CharField(max_length=100)
bpubdate = models.DateField()
bread = models.IntegerField()
bcomment = models.IntegerField()
isdelete = models.BooleanField()
bookinfo_manager1 = Manager()
bookinfo_manager2 = BookInfo_Manager()
def __str__(self):
return self.btitle
"""
@classmethod
def create(cls,btitle='xx',bpubdate=date.today(),bread=0,bcomment=0,isdelete=0):
b = BookInfo()
b.btitle = btitle
b.bpubdate = bpubdate
b.bread = bread
b.bcomment = bcomment
b.isdelete = isdelete
return b
"""
class HeroInfo(models.Model):
hname = models.CharField(max_length=100)
hgender = models.TextField()
hbookinfo = models.ForeignKey(BookInfo)
hcontent = models.CharField(max_length=200)
isdelete = models.BooleanField()
"""
class Meta():
db_table = 'bookinfo22222222222222222222222'
ordering = ['a']
def myhgender(self):
if self.hgender:
return '男'
else:
return '女'
def myhname(self):
return self.hname
myhgender.short_description = '性别'
myhname.short_description = '姓名'
"""
|
[
"1025212779@qq.com"
] |
1025212779@qq.com
|
9cba16904c3d6c84fa17c84c14a4c69a517dcf65
|
c491494017eeecc83ebaaeb74a1f60a9031e3a39
|
/tethysapp/hs_modflow/model.py
|
349df073ba30738475ba481254495b4736e03bf1
|
[] |
no_license
|
c-krew/hs_modflow
|
49f1900631c080a9c3254b277e5dbdcc849afd3b
|
a625ed113f9a4e498fd3712b3b904e67446f0207
|
refs/heads/master
| 2021-10-08T18:33:33.541728
| 2018-12-16T00:04:44
| 2018-12-16T00:04:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 27,382
|
py
|
import json
import os
import flopy
from datetime import datetime as dt
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import *
from sqlalchemy.orm import sessionmaker
from django.http import JsonResponse, Http404, HttpResponse
from hs_restclient import HydroShare
from .app import HsModflow as app
Base = declarative_base()
# SQLAlchemy ORM definition for the models table
class Model(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'models'
# Columns
id = Column(Integer, primary_key=True)
resourceid = Column(String)
displayname = Column(String)
modeltype = Column(String)
modelfiles = Column(String)
zoneid = Column(Integer, ForeignKey("zone.id"), nullable=True)
multid = Column(Integer, ForeignKey("mult.id"), nullable=True)
pvalid = Column(Integer, ForeignKey("pval.id"), nullable=True)
bas6id = Column(Integer, ForeignKey("bas6.id"), nullable=True)
disid = Column(Integer, ForeignKey("dis.id"), nullable=True)
disuid = Column(Integer, ForeignKey("disu.id"), nullable=True)
bcf6id = Column(Integer, ForeignKey("bcf6.id"), nullable=True)
lpfid = Column(Integer, ForeignKey("lpf.id"), nullable=True)
hfb6id = Column(Integer, ForeignKey("hfb6.id"), nullable=True)
chdid = Column(Integer, ForeignKey("chd.id"), nullable=True)
fhbid = Column(Integer, ForeignKey("fhb.id"), nullable=True)
welid = Column(Integer, ForeignKey("wel.id"), nullable=True)
mnw1id = Column(Integer, ForeignKey("mnw1.id"), nullable=True)
mnw2id = Column(Integer, ForeignKey("mnw2.id"), nullable=True)
mnwiid = Column(Integer, ForeignKey("mnwi.id"), nullable=True)
drnid = Column(Integer, ForeignKey("drn.id"), nullable=True)
rchid = Column(Integer, ForeignKey("rch.id"), nullable=True)
evtid = Column(Integer, ForeignKey("evt.id"), nullable=True)
ghbid = Column(Integer, ForeignKey("ghb.id"), nullable=True)
gmgid = Column(Integer, ForeignKey("gmg.id"), nullable=True)
lmt6id = Column(Integer, ForeignKey("lmt6.id"), nullable=True)
lmt7id = Column(Integer, ForeignKey("lmt7.id"), nullable=True)
rivid = Column(Integer, ForeignKey("riv.id"), nullable=True)
strid = Column(Integer, ForeignKey("str.id"), nullable=True)
swi2id = Column(Integer, ForeignKey("swi2.id"), nullable=True)
pcgid = Column(Integer, ForeignKey("pcg.id"), nullable=True)
pcgnid = Column(Integer, ForeignKey("pcgn.id"), nullable=True)
nwtid = Column(Integer, ForeignKey("nwt.id"), nullable=True)
pksid = Column(Integer, ForeignKey("pks.id"), nullable=True)
smsid = Column(Integer, ForeignKey("sms.id"), nullable=True)
sfrid = Column(Integer, ForeignKey("sfr.id"), nullable=True)
lakid = Column(Integer, ForeignKey("lak.id"), nullable=True)
gageid = Column(Integer, ForeignKey("gage.id"), nullable=True)
sipid = Column(Integer, ForeignKey("sip.id"), nullable=True)
sorid = Column(Integer, ForeignKey("sor.id"), nullable=True)
de4id = Column(Integer, ForeignKey("de4.id"), nullable=True)
ocid = Column(Integer, ForeignKey("oc.id"), nullable=True)
uzfid = Column(Integer, ForeignKey("uzf.id"), nullable=True)
upwid = Column(Integer, ForeignKey("upw.id"), nullable=True)
subid = Column(Integer, ForeignKey("sub.id"), nullable=True)
swtid = Column(Integer, ForeignKey("swt.id"), nullable=True)
hydid = Column(Integer, ForeignKey("hyd.id"), nullable=True)
hobid = Column(Integer, ForeignKey("hob.id"), nullable=True)
vdfid = Column(Integer, ForeignKey("vdf.id"), nullable=True)
vscid = Column(Integer, ForeignKey("vsc.id"), nullable=True)
drtid = Column(Integer, ForeignKey("drt.id"), nullable=True)
pvlid = Column(Integer, ForeignKey("pvl.id"), nullable=True)
etsid = Column(Integer, ForeignKey("ets.id"), nullable=True)
basid = Column(Integer, ForeignKey("bas.id"), nullable=True)
namid = Column(Integer, ForeignKey("nam.id"), nullable=True)
class zone(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'zone'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class mult(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'mult'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class pval(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'pval'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class bas6(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'bas6'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class dis(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'dis'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class disu(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'disu'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class bcf6(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'bcf6'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class lpf(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'lpf'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class hfb6(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'hfb6'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class chd(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'chd'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class fhb(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'fhb'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class wel(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'wel'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class mnw1(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'mnw1'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class mnw2(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'mnw2'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class mnwi(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'mnwi'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class drn(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'drn'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class rch(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'rch'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class evt(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'evt'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class ghb(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'ghb'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class gmg(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'gmg'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class lmt6(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'lmt6'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class lmt7(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'lmt7'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class riv(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'riv'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class str(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'str'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class swi2(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'swi2'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class pcg(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'pcg'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class pcgn(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'pcgn'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class nwt(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'nwt'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class pks(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'pks'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class sms(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'sms'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class sfr(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'sfr'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class lak(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'lak'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class gage(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'gage'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class sip(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'sip'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class sor(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'sor'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class de4(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'de4'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class oc(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'oc'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class uzf(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'uzf'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class upw(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'upw'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class sub(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'sub'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class swt(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'swt'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class hyd(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'hyd'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class hob(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'hob'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class vdf(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'vdf'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class vsc(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'vsc'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class drt(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'drt'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class pvl(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'pvl'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class ets(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'ets'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class bas(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'bas'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
class nam(Base):
"""
SQLAlchemy Model DB Model
"""
__tablename__ = 'nam'
# Columns
id = Column(Integer, primary_key=True)
data = Column(String)
def init_primary_db(engine, first_time):
"""
Initializer for the primary database.
"""
# Create all the tables
Base.metadata.create_all(engine)
# Add data
if first_time:
# Make session
Session = sessionmaker(bind=engine)
session = Session()
session.commit()
session.close()
def get_all_models():
"""
Get all persisted dams.
"""
# Get connection/session to database
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
# Query for all model records
models = session.query(Model).all()
modellist = [(model.displayname, model.displayname) for model in models]
session.close()
return modellist
def save_hs_to_favorites(resourceid, displayname, modeltype):
dbs = {
'zone': zone,
'mult': mult,
'pval': pval,
'bas6': bas6,
'dis': dis,
'disu': disu,
'bcf6': bcf6,
'lpf': lpf,
'hfb6': hfb6,
'chd': chd,
'fhb': fhb,
'wel': wel,
'mnw1': mnw1,
'mnw2': mnw2,
'mnwi': mnwi,
'drn': drn,
'rch': rch,
'evt': evt,
'ghb': ghb,
'gmg': gmg,
'lmt6': lmt6,
'lmt7': lmt7,
'riv': riv,
'str': str,
'swi2': swi2,
'pcg': pcg,
'pcgn': pcgn,
'nwt': nwt,
'pks': pks,
'sms': sms,
'sfr': sfr,
'lak': lak,
'gage': gage,
'sip': sip,
'sor': sor,
'de4': de4,
'oc': oc,
'uzf': uzf,
'upw': upw,
'sub': sub,
'swt': swt,
'hyd': hyd,
'hob': hob,
'vdf': vdf,
'vsc': vsc,
'drt': drt,
'pvl': pvl,
'ets': ets,
'bas': bas,
'nam': nam,
}
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
hs = HydroShare()
app_dir = app.get_app_workspace().path
resourcelist = hs.getResourceFileList(resourceid)
filelist = []
for resource in resourcelist:
url = resource['url'].split("/")
fname = url[-1]
hs.getResourceFile(resourceid, fname, destination=app_dir)
filelist.append(fname)
json.dumps(filelist)
fav = Model(
resourceid=resourceid,
displayname=displayname,
modeltype=modeltype,
modelfiles=filelist
)
# Add the model to the session, commit, and close
session.add(fav)
model = session.query(Model).filter(Model.displayname==displayname).first()
mainid = model.id
for fi in filelist:
ext = fi.split(".")[1]
filepath = os.path.join(app.get_app_workspace().path, fi)
with open(
filepath,
'r'
) as myfile:
data = myfile.read()
json.dumps(data)
tbl = dbs[ext](
data=data,
)
# Add the model to the session, commit, and close
session.add(tbl)
session.commit()
setattr(model, ext + 'id', tbl.id)
session.commit()
os.remove(filepath)
session.close()
return
def upload_to_hs(uploadtype, modelname, resource_name, resource_abstract, resource_key):
dbs = {
'zone':zone,
'mult':mult,
'pval':pval,
'bas6':bas6,
'dis':dis,
'disu':disu,
'bcf6':bcf6,
'lpf':lpf,
'hfb6':hfb6,
'chd':chd,
'fhb':fhb,
'wel':wel,
'mnw1':mnw1,
'mnw2':mnw2,
'mnwi':mnwi,
'drn':drn,
'rch':rch,
'evt':evt,
'ghb':ghb,
'gmg':gmg,
'lmt6':lmt6,
'lmt7':lmt7,
'riv':riv,
'str':str,
'swi2':swi2,
'pcg':pcg,
'pcgn':pcgn,
'nwt':nwt,
'pks':pks,
'sms':sms,
'sfr':sfr,
'lak':lak,
'gage':gage,
'sip':sip,
'sor':sor,
'de4':de4,
'oc':oc,
'uzf':uzf,
'upw':upw,
'sub':sub,
'swt':swt,
'hyd':hyd,
'hob':hob,
'vdf':vdf,
'vsc':vsc,
'drt':drt,
'pvl':pvl,
'ets':ets,
'bas':bas,
'nam':nam
}
hs = HydroShare()
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
fileliststr = session.query(Model).filter(Model.displayname == modelname).first()
filelist = [i for i in fileliststr.modelfiles.strip('{}').split(',')]
mainid = fileliststr.id
resourceid = fileliststr.resourceid
if uploadtype == 'new':
abstract = resource_abstract
title = resource_name
keywords = (i for i in resource_key.split(','))
rtype = 'ModelInstanceResource'
new_resource_id = hs.createResource(rtype, title, keywords=keywords, abstract=abstract)
for fi in filelist:
parts = fi.split(".")
ext_data = session.query(dbs[parts[1]]).filter(dbs[parts[1]].id == mainid).first().data
if uploadtype == 'add':
date = dt.now().strftime("%m-%d-%Y-%X")
filename = "{}_{}.{}".format(parts[0], date, parts[1])
else:
filename = fi
if uploadtype == 'new':
hs.addResourceFile(new_resource_id, ext_data, resource_filename=filename)
elif uploadtype == 'overwrite':
hs.deleteResourceFile(resourceid, filename)
hs.addResourceFile(resourceid, ext_data, resource_filename=filename)
else:
hs.addResourceFile(resourceid, ext_data, resource_filename=filename)
session.close()
return_obj = {'success': True}
return JsonResponse(return_obj)
def save_to_db(resourceid, displayname, modeltype):
dbs = {
'zone':zone,
'mult':mult,
'pval':pval,
'bas6':bas6,
'dis':dis,
'disu':disu,
'bcf6':bcf6,
'lpf':lpf,
'hfb6':hfb6,
'chd':chd,
'fhb':fhb,
'wel':wel,
'mnw1':mnw1,
'mnw2':mnw2,
'mnwi':mnwi,
'drn':drn,
'rch':rch,
'evt':evt,
'ghb':ghb,
'gmg':gmg,
'lmt6':lmt6,
'lmt7':lmt7,
'riv':riv,
'str':str,
'swi2':swi2,
'pcg':pcg,
'pcgn':pcgn,
'nwt':nwt,
'pks':pks,
'sms':sms,
'sfr':sfr,
'lak':lak,
'gage':gage,
'sip':sip,
'sor':sor,
'de4':de4,
'oc':oc,
'uzf':uzf,
'upw':upw,
'sub':sub,
'swt':swt,
'hyd':hyd,
'hob':hob,
'vdf':vdf,
'vsc':vsc,
'drt':drt,
'pvl':pvl,
'ets':ets,
'bas':bas,
'nam':nam,
}
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
app_dir = app.get_app_workspace().path
# app_dir = '/Users/travismcstraw/tethysdev/hs_modflow/tethysapp/hs_modflow/workspaces/app_workspace/'
fileliststr = session.query(Model).filter(Model.displayname == displayname).first()
filelist = [i for i in fileliststr.modelfiles.strip('{}').split(',')]
json.dumps(filelist)
model = session.query(Model).filter(Model.displayname==displayname).first()
mainid = model.id
for fi in filelist:
ext = fi.split(".")[1]
setattr(model, ext + 'id', mainid)
with open(
os.path.join(app_dir, fi),
'r'
) as myfile:
data = myfile.read()
json.dumps(data)
session.query(dbs[ext]).filter(dbs[ext].id==mainid).one().data = data
os.remove(os.path.join(app_dir, fi))
session.commit()
session.close()
return
def load_resource(request):
dbs = {
'zone': zone,
'mult': mult,
'pval': pval,
'bas6': bas6,
'dis': dis,
'disu': disu,
'bcf6': bcf6,
'lpf': lpf,
'hfb6': hfb6,
'chd': chd,
'fhb': fhb,
'wel': wel,
'mnw1': mnw1,
'mnw2': mnw2,
'mnwi': mnwi,
'drn': drn,
'rch': rch,
'evt': evt,
'ghb': ghb,
'gmg': gmg,
'lmt6': lmt6,
'lmt7': lmt7,
'riv': riv,
'str': str,
'swi2': swi2,
'pcg': pcg,
'pcgn': pcgn,
'nwt': nwt,
'pks': pks,
'sms': sms,
'sfr': sfr,
'lak': lak,
'gage': gage,
'sip': sip,
'sor': sor,
'de4': de4,
'oc': oc,
'uzf': uzf,
'upw': upw,
'sub': sub,
'swt': swt,
'hyd': hyd,
'hob': hob,
'vdf': vdf,
'vsc': vsc,
'drt': drt,
'pvl': pvl,
'ets': ets,
'bas': bas,
'nam': nam,
}
app_dir = app.get_app_workspace().path
ex_filelist = os.listdir(app_dir)
for ex_file in ex_filelist:
if ex_file == 'models':
continue
else:
os.remove(os.path.join(app_dir, ex_file))
displayname = request.POST.get('displayname')
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
model = session.query(Model).filter(Model.displayname == displayname).first()
filelist = [i for i in model.modelfiles.strip('{}').split(',')]
for fi in filelist:
ext = fi.split(".")[1]
extid = getattr(model, ext + 'id')
ext_data = session.query(dbs[ext]).filter(dbs[ext].id == extid).first().data
filepath = os.path.join(app.get_app_workspace().path, fi)
# filepath = os.path.join('/Users/travismcstraw/tethysdev/hs_modflow/tethysapp/hs_modflow/workspaces/app_workspace/', fi)
with open(
filepath,
'w'
) as myfile:
myfile.write(ext_data)
session.close()
return_obj = {'success': True, 'filelist': filelist}
return JsonResponse(return_obj)
def save_to_db_newentry(resourceid, displayname, new_display_name, modeltype):
dbs = {
'zone':zone,
'mult':mult,
'pval':pval,
'bas6':bas6,
'dis':dis,
'disu':disu,
'bcf6':bcf6,
'lpf':lpf,
'hfb6':hfb6,
'chd':chd,
'fhb':fhb,
'wel':wel,
'mnw1':mnw1,
'mnw2':mnw2,
'mnwi':mnwi,
'drn':drn,
'rch':rch,
'evt':evt,
'ghb':ghb,
'gmg':gmg,
'lmt6':lmt6,
'lmt7':lmt7,
'riv':riv,
'str':str,
'swi2':swi2,
'pcg':pcg,
'pcgn':pcgn,
'nwt':nwt,
'pks':pks,
'sms':sms,
'sfr':sfr,
'lak':lak,
'gage':gage,
'sip':sip,
'sor':sor,
'de4':de4,
'oc':oc,
'uzf':uzf,
'upw':upw,
'sub':sub,
'swt':swt,
'hyd':hyd,
'hob':hob,
'vdf':vdf,
'vsc':vsc,
'drt':drt,
'pvl':pvl,
'ets':ets,
'bas':bas,
'nam':nam,
}
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
app_dir = app.get_app_workspace().path
# app_dir = '/Users/travismcstraw/tethysdev/hs_modflow/tethysapp/hs_modflow/workspaces/app_workspace/'
fileliststr = session.query(Model).filter(Model.displayname == displayname).first()
filelist = [i for i in fileliststr.modelfiles.strip('{}').split(',')]
json.dumps(filelist)
fav = Model(
resourceid=resourceid,
displayname=new_display_name,
modeltype=modeltype,
modelfiles=filelist
)
# Add the model to the session, commit, and close
session.add(fav)
session.commit()
model = session.query(Model).filter(Model.displayname==new_display_name).first()
mainid = model.id
for fi in filelist:
ext = fi.split(".")[1]
filepath = os.path.join(app.get_app_workspace().path, fi)
with open(
filepath,
'r'
) as myfile:
data = myfile.read()
json.dumps(data)
tbl = dbs[ext](
data=data,
)
# Add the model to the session, commit, and close
session.add(tbl)
session.commit()
setattr(model, ext + 'id', tbl.id)
session.commit()
os.remove(os.path.join(app_dir, fi))
session.commit()
session.close()
return
def run_model(request):
displayname = request.POST.get('displayname')
app_dir = app.get_app_workspace().path
# app_dir = '/Users/travismcstraw/tethysdev/hs_modflow/tethysapp/hs_modflow/workspaces/app_workspace/'
ex_filelist = os.listdir(app_dir)
for ex_file in ex_filelist:
if ex_file == 'models':
continue
else:
if ex_file.split(".")[1] == 'nam':
modelnam = ex_file
break
Session = app.get_persistent_store_database('primary_db', as_sessionmaker=True)
session = Session()
model = session.query(Model).filter(Model.displayname == displayname).one()
model_type = model.modeltype
app_dir = app.get_app_workspace().path
# app_dir = '/Users/travismcstraw/tethysdev/hs_modflow/tethysapp/hs_modflow/workspaces/app_workspace/'
# first lets load an existing model
ml = flopy.modflow.Modflow.load(modelnam, model_ws=app_dir, verbose=False,
check=False, exe_name=os.path.join(app_dir, 'models', model_type))
ml.run_model()
mfl = flopy.utils.MfListBudget(os.path.join(app_dir, modelnam.split(".")[0] + ".lst"))
df_flux, df_vol = mfl.get_dataframes()
dict_flux = df_flux.to_dict()
dict_vol = df_vol.to_dict()
flux_results = {}
vol_results = {}
for key1 in dict_flux:
for key2 in dict_flux[key1]:
datetime = key2.strftime('%Y-%m-%d %X')
if datetime not in flux_results:
flux_results[datetime] = {}
flux_results[datetime][key1] = dict_flux[key1][key2]
else:
flux_results[datetime][key1] = dict_flux[key1][key2]
for key1 in dict_vol:
for key2 in dict_vol[key1]:
datetime = key2.strftime('%Y-%m-%d %X')
if datetime not in vol_results:
vol_results[datetime] = {}
vol_results[datetime][key1] = dict_flux[key1][key2]
else:
vol_results[datetime][key1] = dict_flux[key1][key2]
print(vol_results)
return_obj = {'success': True, 'vol': vol_results, "flux": flux_results}
return JsonResponse(return_obj)
|
[
"coreykrewson@gmail.com"
] |
coreykrewson@gmail.com
|
7fe7a2d3a65e3c80c48c513dbca332369b6f1feb
|
8dcddd65561a5c0cca8e0727d4df260ac9a17062
|
/pycat
|
8b81ddf3eb89bf8a14f78703ed46f15babe3b056
|
[] |
no_license
|
lverweijen/pytools
|
edb08a462530b4275389a514a57ec88fd0228b3d
|
bad12f57ee1d5be6eaed23d047e9a9614da77284
|
refs/heads/master
| 2021-01-10T07:43:33.093956
| 2016-02-09T18:51:08
| 2016-02-09T18:51:08
| 47,980,400
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,369
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Utility to display part of a python.
Usage:
pycat <filename> <identifier>
Dependencies:
astunparse or meta
Unsupported:
astor, astmonkey, macropy
Bugs:
meta (v0.4.1) can't handle some assignments. Use astunparse instead.
"""
__author__ = __maintainer__ = "Laurent Verweijen"
__license__ = "GPL3"
import sys
import ast
INDENT_WIDTH = 2
try:
import astunparse
def unparse(node):
"""Undo the effect of ast.parse using astunparse."""
return astunparse.unparse(node).strip()
except ImportError:
import meta
def unparse(node):
"""Undo the effect of ast.parse using meta.
Install astunparse for a stabler version.
"""
try:
return meta.dump_python_source(node).strip()
except Exception as e:
print('oo', repr(e))
if hasattr(node, 'targets'):
targets = ", ".join(target.id for target in node.targets)
return "{} = BUG <{}>".format(targets, repr(e))
else:
return "BUG <{}>".format(repr(e))
def main():
"""Program entry point."""
with open(sys.argv[1]) as code_file:
parsed = ast.parse(code_file.read())
t = DocumentationVisitor(sys.argv[2].split('.'))
t.visit(parsed)
class DocumentationVisitor(ast.NodeVisitor):
"""Visits the document looking for the required parts."""
def __init__(self, keywords):
"""Create instance."""
self.keywords = keywords
def visit(self, node):
"""Try to match. If a partial match is found delegate to a submatch."""
if hasattr(node, 'name') and node.name == self.keywords[0]:
if len(self.keywords) == 1:
try:
from pygments import highlight
from pygments.lexers import PythonLexer
from pygments.formatters import TerminalFormatter
print(highlight(unparse(node), PythonLexer(),
TerminalFormatter()))
except ImportError:
print(unparse(node))
else:
dv = DocumentationVisitor(self.keywords[1:])
dv.visit(node)
else:
self.generic_visit(node)
if __name__ == "__main__":
main()
|
[
"lverweijen"
] |
lverweijen
|
|
8135d4bbab19fd1ba6a3d4ba541a7fddfe935954
|
5a281cb78335e06c631181720546f6876005d4e5
|
/swift-2.21.0/swift/container/replicator.py
|
ea18fbd962d46eb609540a60bd1167faf07b0d60
|
[
"Apache-2.0"
] |
permissive
|
scottwedge/OpenStack-Stein
|
d25b2a5bb54a714fc23f0ff0c11fb1fdacad85e8
|
7077d1f602031dace92916f14e36b124f474de15
|
refs/heads/master
| 2021-03-22T16:07:19.561504
| 2020-03-15T01:31:10
| 2020-03-15T01:31:10
| 247,380,811
| 0
| 0
|
Apache-2.0
| 2020-03-15T01:24:15
| 2020-03-15T01:24:15
| null |
UTF-8
|
Python
| false
| false
| 16,737
|
py
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR, SHARDED
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.swob import HTTPOk, HTTPAccepted
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.utils import Timestamp, majority_size, get_db_files
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def __init__(self, conf, logger=None):
super(ContainerReplicator, self).__init__(conf, logger=logger)
self.reconciler_cleanups = self.sync_store = None
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region=False):
if is_success(response.status):
remote_info = json.loads(response.data.decode('ascii'))
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp.now()
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
if 'shard_max_row' in remote_info:
# Grab remote's shard ranges, too
self._fetch_and_merge_shard_ranges(http, broker)
return super(ContainerReplicator, self)._handle_sync_response(
node, response, info, broker, http, different_region)
def _sync_shard_ranges(self, broker, http, local_id):
# TODO: currently the number of shard ranges is expected to be _much_
# less than normal objects so all are sync'd on each cycle. However, in
# future there should be sync points maintained much like for object
# syncing so that only new shard range rows are sync'd.
shard_range_data = broker.get_all_shard_range_data()
if shard_range_data:
if not self._send_replicate_request(
http, 'merge_shard_ranges', shard_range_data, local_id):
return False
self.logger.debug('%s synced %s shard ranges to %s',
broker.db_file, len(shard_range_data),
'%(ip)s:%(port)s/%(device)s' % http.node)
return True
def _choose_replication_mode(self, node, rinfo, info, local_sync, broker,
http, different_region):
if 'shard_max_row' in rinfo:
# Always replicate shard ranges to new-enough swift
shard_range_success = self._sync_shard_ranges(
broker, http, info['id'])
else:
shard_range_success = False
self.logger.warning(
'%s is unable to replicate shard ranges to peer %s; '
'peer may need upgrading', broker.db_file,
'%(ip)s:%(port)s/%(device)s' % node)
if broker.sharding_initiated():
if info['db_state'] == SHARDED and len(
broker.get_objects(limit=1)) == 0:
self.logger.debug('%s is sharded and has nothing more to '
'replicate to peer %s',
broker.db_file,
'%(ip)s:%(port)s/%(device)s' % node)
else:
# Only print the scary warning if there was something that
# didn't get replicated
self.logger.warning(
'%s is able to shard -- refusing to replicate objects to '
'peer %s; have shard ranges and will wait for cleaving',
broker.db_file,
'%(ip)s:%(port)s/%(device)s' % node)
self.stats['deferred'] += 1
return shard_range_success
success = super(ContainerReplicator, self)._choose_replication_mode(
node, rinfo, info, local_sync, broker, http,
different_region)
return shard_range_success and success
def _fetch_and_merge_shard_ranges(self, http, broker):
response = http.replicate('get_shard_ranges')
if is_success(response.status):
broker.merge_shard_ranges(json.loads(
response.data.decode('ascii')))
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
return node
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
broker = ContainerBroker.create_broker(
os.path.join(self.root, node['device']), part, account, container,
logger=self.logger, put_timestamp=timestamp,
storage_policy_index=0)
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on
# replication
broker.update_reconciler_sync(max_sync)
def cleanup_post_replicate(self, broker, orig_info, responses):
if broker.sharding_required():
# despite being a handoff, since we're sharding we're not going to
# do any cleanup so we can continue cleaving - this is still
# considered "success"
self.logger.debug(
'Not deleting db %s (requires sharding, state %s)',
broker.db_file, broker.get_db_state())
return True
return super(ContainerReplicator, self).cleanup_post_replicate(
broker, orig_info, responses)
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
if self.sync_store:
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def replicate_reconcilers(self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _db_file_exists(self, db_path):
return bool(get_db_files(db_path))
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp.now().internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info
def _abort_rsync_then_merge(self, db_file, old_filename):
if super(ContainerReplicatorRpc, self)._abort_rsync_then_merge(
db_file, old_filename):
return True
# if the local db has started sharding since the original 'sync'
# request then abort object replication now; instantiate a fresh broker
# each time this check if performed so to get latest state
broker = ContainerBroker(db_file)
return broker.sharding_initiated()
def _post_rsync_then_merge_hook(self, existing_broker, new_broker):
# Note the following hook will need to change to using a pointer and
# limit in the future.
new_broker.merge_shard_ranges(
existing_broker.get_all_shard_range_data())
def merge_shard_ranges(self, broker, args):
broker.merge_shard_ranges(args[0])
return HTTPAccepted()
def get_shard_ranges(self, broker, args):
return HTTPOk(headers={'Content-Type': 'application/json'},
body=json.dumps(broker.get_all_shard_range_data()))
|
[
"Wayne Gong@minbgong-winvm.cisco.com"
] |
Wayne Gong@minbgong-winvm.cisco.com
|
413330fcb08d0e45e153a765303e131cb0d38ace
|
68bca3f840d11cb3e611e3b2227a857f170a5cf9
|
/imio/dms/mail/browser/viewlets.py
|
edd484d647808c331e995d21bd34b2c4d154d09c
|
[] |
no_license
|
gotcha/imio.dms.mail
|
8e2e6e465313ef02e16dc890a7aaba03a55ed9aa
|
4bde3665d67ebd8a6ebca31fd1a497806c64fce6
|
refs/heads/master
| 2021-01-17T18:08:26.411910
| 2016-06-01T11:39:29
| 2016-06-01T11:39:29
| 60,174,237
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,965
|
py
|
# -*- coding: utf-8 -*-
from zope.component import getUtility
from zope.intid.interfaces import IIntIds
from zc.relation.interfaces import ICatalog
from plone.app.layout.viewlets import ViewletBase
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from collective.contact.widget.interfaces import IContactContent
from collective.task.browser.viewlets import TaskParentViewlet
class ContactContentBackrefsViewlet(ViewletBase):
#def update(self):
# super(ContactContentBackrefsViewlet, self).update()
def backrefs(self):
# indirection method added to be easier overrided
return sorted(self.find_relations(), key=lambda obj: obj.created(), reverse=True)
def find_relations(self, from_attribute=None, from_interfaces_flattened=None):
"""
Parameters:
- from_attribute: schema attribute string
- from_interfaces_flattened: Interface class (only one)
"""
ret = []
catalog = getUtility(ICatalog)
intids = getUtility(IIntIds)
query = {'to_id': intids.getId(self.context)}
if from_attribute is not None:
query['from_attribute'] = from_attribute
if from_interfaces_flattened is not None:
query['from_interfaces_flattened'] = from_interfaces_flattened
for relation in catalog.findRelations(query):
# we skip relations between contacts (already shown)
# nevertheless what about heldposition references for a person: subquery ?
if IContactContent.providedBy(relation.from_object):
continue
# PERFORMANCE TEST TO DO: use directly objects or use the path as request in the portal_catalog to find brain
ret.append(relation.from_object)
return ret
index = ViewPageTemplateFile("templates/contactcontent_backrefs.pt")
class DMSTaskParentViewlet(TaskParentViewlet):
display_above_element = False
|
[
"s.geulette@imio.be"
] |
s.geulette@imio.be
|
dea57509913619d26d849ff19edfbe19e5a3a580
|
8e6530405b5bba192c5a2855eee9d0adb14574fa
|
/widget.py
|
e5b4fff1b74263952367bd0feeb88c939eaeb2bb
|
[] |
no_license
|
kstep/ajenti-mpd
|
3b07c1527a45b90ac980e5998b41971215f7f8d9
|
666676fe08d01a5c0e6fd478c7c496e4ee0ebd02
|
refs/heads/master
| 2023-08-18T01:55:15.321386
| 2014-09-03T17:10:30
| 2014-09-03T17:10:30
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 407
|
py
|
from ajenti.api import plugin
from ajenti.api.sensors import Sensor
from ajenti.plugins.dashboard.api import DashboardWidget
from ajenti.ui.binder import Binder
@plugin
class MpdWidget (DashboardWidget):
name = _('MPD Status')
icon = 'music'
def init(self):
self.append(self.ui.inflate('mpd:widget'))
Binder(Sensor.find('mpd').value(),
self.find('mpd')).populate()
|
[
"me@kstep.me"
] |
me@kstep.me
|
767c1e70373576b3d4094240c8a8600259f84576
|
26e91aead18d0fad6f5ce8fc4adf7d8e05a2f07f
|
/byceps/services/ticketing/barcode_service.py
|
559b9b6769e392dfbe4934e9227ba03bcb05f85b
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
leathe/byceps
|
40c1f8a1aab3521fcac45d88eab6364d448d4e67
|
cd0c618af63fed1cd7006bb67da46eac0ddbb1c7
|
refs/heads/master
| 2020-12-02T09:02:51.087511
| 2019-12-14T17:00:22
| 2019-12-14T17:00:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,595
|
py
|
"""
byceps.services.ticketing.barcode_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Render Code 128 (set B) barcodes as SVG images.
This implementation only supports code set B.
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from jinja2 import Template
# As seen on https://en.wikipedia.org/wiki/Code_128#Bar_code_widths
#
# (Value, 128B, Widths)
VALUES_CHARS_WIDTHS = [
( 0, ' ', '212222' ),
( 1, '!', '222122' ),
( 2, '"', '222221' ),
( 3, '#', '121223' ),
( 4, '$', '121322' ),
( 5, '%', '131222' ),
( 6, '&', '122213' ),
( 7, '\'', '122312' ),
( 8, '(', '132212' ),
( 9, ')', '221213' ),
( 10, '*', '221312' ),
( 11, '+', '231212' ),
( 12, ',', '112232' ),
( 13, '-', '122132' ),
( 14, '.', '122231' ),
( 15, '/', '113222' ),
( 16, '0', '123122' ),
( 17, '1', '123221' ),
( 18, '2', '223211' ),
( 19, '3', '221132' ),
( 20, '4', '221231' ),
( 21, '5', '213212' ),
( 22, '6', '223112' ),
( 23, '7', '312131' ),
( 24, '8', '311222' ),
( 25, '9', '321122' ),
( 26, ':', '321221' ),
( 27, ';', '312212' ),
( 28, '<', '322112' ),
( 29, '=', '322211' ),
( 30, '>', '212123' ),
( 31, '?', '212321' ),
( 32, '@', '232121' ),
( 33, 'A', '111323' ),
( 34, 'B', '131123' ),
( 35, 'C', '131321' ),
( 36, 'D', '112313' ),
( 37, 'E', '132113' ),
( 38, 'F', '132311' ),
( 39, 'G', '211313' ),
( 40, 'H', '231113' ),
( 41, 'I', '231311' ),
( 42, 'J', '112133' ),
( 43, 'K', '112331' ),
( 44, 'L', '132131' ),
( 45, 'M', '113123' ),
( 46, 'N', '113321' ),
( 47, 'O', '133121' ),
( 48, 'P', '313121' ),
( 49, 'Q', '211331' ),
( 50, 'R', '231131' ),
( 51, 'S', '213113' ),
( 52, 'T', '213311' ),
( 53, 'U', '213131' ),
( 54, 'V', '311123' ),
( 55, 'W', '311321' ),
( 56, 'X', '331121' ),
( 57, 'Y', '312113' ),
( 58, 'Z', '312311' ),
( 59, '[', '332111' ),
( 60, '\\', '314111' ),
( 61, ']', '221411' ),
( 62, '^', '431111' ),
( 63, '_', '111224' ),
( 64, '`', '111422' ),
( 65, 'a', '121124' ),
( 66, 'b', '121421' ),
( 67, 'c', '141122' ),
( 68, 'd', '141221' ),
( 69, 'e', '112214' ),
( 70, 'f', '112412' ),
( 71, 'g', '122114' ),
( 72, 'h', '122411' ),
( 73, 'i', '142112' ),
( 74, 'j', '142211' ),
( 75, 'k', '241211' ),
( 76, 'l', '221114' ),
( 77, 'm', '413111' ),
( 78, 'n', '241112' ),
( 79, 'o', '134111' ),
( 80, 'p', '111242' ),
( 81, 'q', '121142' ),
( 82, 'r', '121241' ),
( 83, 's', '114212' ),
( 84, 't', '124112' ),
( 85, 'u', '124211' ),
( 86, 'v', '411212' ),
( 87, 'w', '421112' ),
( 88, 'x', '421211' ),
( 89, 'y', '212141' ),
( 90, 'z', '214121' ),
( 91, '{', '412121' ),
( 92, '|', '111143' ),
( 93, '}', '111341' ),
( 94, '~', '131141' ),
( 95, 'DEL', '114113' ),
( 96, 'FNC_3', '114311' ),
( 97, 'FNC_2', '411113' ),
( 98, 'Shift_A', '411311' ),
( 99, 'Code_C', '113141' ),
(100, 'FNC_4', '114131' ),
(101, 'Code_A', '311141' ),
(102, 'FNC_1', '411131' ),
(103, 'Start_A', '211412' ),
(104, 'Start_B', '211214' ),
(105, 'Start_C', '211232' ),
(106, 'Stop', '2331112'),
]
VALUES_TO_WIDTHS = {value: width for value, _, width in VALUES_CHARS_WIDTHS}
CHARS_TO_VALUES = {char: value for value, char, _ in VALUES_CHARS_WIDTHS}
SVG_TEMPLATE = Template('''
<svg xmlns="http://www.w3.org/2000/svg" width="{{ image_width }}" height="{{ image_height }}" viewBox="0 0 {{ image_width }} {{ image_height }}">
<rect width="{{ image_width }}" height="{{ image_height }}" fill="white"/>
{%- for bar_x, bar_width in bars %}
<rect x="{{ bar_x }}" width="{{ bar_width }}" height="{{ image_height }}"/>
{%- endfor %}
</svg>
'''.strip())
def render_svg(text, *, thickness=3):
values = list(_generate_values(text))
bar_widths = list(_generate_bars(values, thickness))
return _generate_svg(bar_widths)
def _generate_values(text):
check_digit_calculation_values = []
# start symbol
start_symbol_value = _to_value('Start_B')
yield start_symbol_value
check_digit_calculation_values.append(start_symbol_value)
text_values = list(map(_to_value, text))
yield from text_values
check_digit_calculation_values.extend(text_values)
# check digit symbol
check_digit_value = _calculate_check_digit_value(
check_digit_calculation_values
)
yield check_digit_value
# stop symbol
stop_symbol_value = _to_value('Stop')
yield stop_symbol_value
def _to_value(char):
return CHARS_TO_VALUES[char]
def _calculate_check_digit_value(values):
# Important: *Both* the start code *and* the
# first encoded symbol are in position 1.
symbol_products_sum = sum(
max(1, position) * value for position, value in enumerate(values)
)
return symbol_products_sum % 103
def _generate_bars(values, thickness):
for value in values:
for width in VALUES_TO_WIDTHS[value]:
bar_width = int(width) * thickness
yield bar_width
def _generate_svg(bar_widths, *, image_height=100):
image_width = sum(bar_widths)
x = 0
# Calculate where the individual bars are positioned
# horizontally and how wide they are.
bar_positions_and_widths = list(
_calculate_bar_positions_and_widths(x, bar_widths)
)
# Render template.
return SVG_TEMPLATE.render(
image_width=image_width,
image_height=image_height,
bars=bar_positions_and_widths,
)
def _calculate_bar_positions_and_widths(start_x, bar_widths):
"""Yield a (horizontal position, width) pair for each bar."""
x = start_x
draw_bar = True
for width in bar_widths:
if draw_bar:
yield x, width
draw_bar = not draw_bar
x += width
|
[
"homework@nwsnet.de"
] |
homework@nwsnet.de
|
0bc79cb00c697176464895afec40839910b3ffad
|
8c85ec94d1d7cfd5c0df2053b46321ff8e006c9c
|
/cm.py
|
89bd9ab07cf5223d2b182157acfb7afe2575023a
|
[] |
no_license
|
onikazu/Doppel
|
c63ade484dd4aaec31a6147a0355412532907324
|
4eef6867f6aa94fec6a4f25f535179055e5ef110
|
refs/heads/master
| 2020-04-12T13:45:54.053763
| 2018-12-20T05:30:29
| 2018-12-20T05:30:29
| 162,531,021
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 146
|
py
|
import cv2
cap = cv2.VideoCapture(0) # 引数はカメラのデバイス番号
while True:
ret, frame = cap.read()
print(frame.shape)
|
[
"kazuki.masumura@adminnoMacBook-puro-8.local"
] |
kazuki.masumura@adminnoMacBook-puro-8.local
|
14e4c8929a0590efb9cd81847f91e95ae862b8a5
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_110/ch4_2020_10_07_12_11_33_920910.py
|
a8fd249a14f02121d81fa5213aecbdbb654e4af1
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628
| 2020-12-16T05:21:31
| 2020-12-16T05:21:31
| 306,735,108
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 177
|
py
|
def classifica_idade(idade):
if idade <= 11:
print ("crianca")
elif 12 <= idade and idade<= 17:
print ("adolescente")
else:
print("adulto")
|
[
"you@example.com"
] |
you@example.com
|
6db3e27cddc408c3fa4ad706ac9ab210f5fe9b00
|
eb05fd62ad7085dbe37e93db1beeb423ccbd1dbe
|
/5.10-shellsort.py
|
5fcfad8fd3bc286627ada9406a048413391e4234
|
[] |
no_license
|
hechty/datastructure
|
cb552099f329fa7c2a500b24b62380d279cdc90f
|
32adfe893b384e7a004fc045b9c6dbf1870ba7a0
|
refs/heads/master
| 2021-01-20T09:41:43.998692
| 2017-06-03T03:45:12
| 2017-06-03T03:45:12
| 90,276,738
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,296
|
py
|
#! /usr/bin/env python3
from time import clock
from random import randint
def insertion_sort(alist):
orded_list = alist[:1]
for i in range(len(alist)-1):
new_data = alist[i+1]
orded_list = insertion(new_data, orded_list)
alist = orded_list
def insertion(new_data, orded_list):
posi = 0
for i in range(len(orded_list) - 1, -1, -1):
if new_data >= orded_list[i]:
posi = i + 1
break
orded_list.insert(posi, new_data)
return orded_list
def shellsort(alist):
gap = len(alist) // 2
while gap > 0:
for startposi in range(gap):
for i in range(startposi + gap, len(alist), gap):
currposi = i
while alist[currposi] < alist[currposi - gap] and currposi - gap >= 0:
alist[currposi],alist[currposi - gap] = alist[currposi - gap],alist[currposi]
currposi = currposi - gap
gap = gap // 2
return alist
ls = [randint(0,1000) for _ in range(1000)]
start = clock()
for i in range(10):
insertion_sort(ls)
end = clock()
print("insertion_sort runningtime:",end-start)
print(ls[:15])
start = clock()
for i in range(10):
shellsort(ls)
end = clock()
print("shell_sort runningtime:", end - start)
print(ls[:15])
|
[
"hchw1230@qq.com"
] |
hchw1230@qq.com
|
06f183d8e03559a241d25b8ab307285ffe3613c2
|
19b701d35c1f566ac4e1823708603889924c91cb
|
/routes.py
|
fcf6bc61692426954d14232f4c4a3bc3c19b004d
|
[] |
no_license
|
peterdorsi/made-with-angular
|
dae89649c824f949a3eaf54991f38399612e684d
|
96981448bd05868b097a83a219ce1261855ee010
|
refs/heads/master
| 2021-01-22T19:13:45.849537
| 2017-09-05T01:15:47
| 2017-09-05T01:15:47
| 102,416,384
| 0
| 0
| null | 2017-09-05T00:59:47
| 2017-09-05T00:59:47
| null |
UTF-8
|
Python
| false
| false
| 5,746
|
py
|
from flask import Flask, render_template, request, redirect
import requests
# import requests_toolbelt.adapters.appengine
import json
from flask_sslify import SSLify
app = Flask(__name__)
sslify = SSLify(app)
# Use the App Engine Requests adapter. This makes sure that Requests uses
# URLFetch.
# requests_toolbelt.adapters.appengine.monkeypatch()
@app.route("/")
def index():
categories = {
"google": {"name": "By Google", "sites": []},
"books-reference": {"name": "Books & Reference", "sites": []},
"business": {"name": "Business", "sites": []},
"communication": {"name": "Communication", "sites": []},
"education": {"name": "Education", "sites": []},
"entertainment": {"name": "Entertainment", "sites": []},
"finance": {"name": "Finance", "sites": []},
"health-fitness": {"name": "Health & Fitness", "sites": []},
"lifestyle": {"name": "Lifestyle", "sites": []},
"media-video": {"name": "Media & Video", "sites": []},
"music-audio": {"name": "Music & Audio", "sites": []},
"news-magazines": {"name": "News & Magazines", "sites": []},
"photography": {"name": "Photography", "sites": []},
"productivity": {"name": "Productivity", "sites": []},
"shopping": {"name": "Shopping", "sites": []},
"social": {"name": "Social", "sites": []},
"sports": {"name": "Sports", "sites": []},
"tools": {"name": "Tools", "sites": []},
"travel-local": {"name": "Travel & Local", "sites": []},
"transportation": {"name": "Transportation", "sites": []},
"weather": {"name": "Weather", "sites": []},
"community": {"name": "From the Community", "sites": []}
}
r = requests.get('https://s3.amazonaws.com/madewithangular.com/projects.json')
projects = json.loads(r.text)
for project in reversed(projects):
for tag in project['tags']:
categories[tag]["sites"].append(project)
return render_template('index.html', categories=categories)
@app.route("/categories/<category>")
def categories(category):
categories = {
"google": {"name": "By Google", "sites": []},
"books-reference": {"name": "Books & Reference", "sites": []},
"business": {"name": "Business", "sites": []},
"communication": {"name": "Communication", "sites": []},
"education": {"name": "Education", "sites": []},
"entertainment": {"name": "Entertainment", "sites": []},
"finance": {"name": "Finance", "sites": []},
"health-fitness": {"name": "Health & Fitness", "sites": []},
"lifestyle": {"name": "Lifestyle", "sites": []},
"media-video": {"name": "Media & Video", "sites": []},
"music-audio": {"name": "Music & Audio", "sites": []},
"news-magazines": {"name": "News & Magazines", "sites": []},
"photography": {"name": "Photography", "sites": []},
"productivity": {"name": "Productivity", "sites": []},
"shopping": {"name": "Shopping", "sites": []},
"social": {"name": "Social", "sites": []},
"sports": {"name": "Sports", "sites": []},
"tools": {"name": "Tools", "sites": []},
"travel-local": {"name": "Travel & Local", "sites": []},
"transportation": {"name": "Transportation", "sites": []},
"weather": {"name": "Weather", "sites": []},
"community": {"name": "From the Community", "sites": []}
}
r = requests.get('https://s3.amazonaws.com/madewithangular.com/projects.json')
projects = json.loads(r.text)
for project in reversed(projects):
for tag in project['tags']:
if tag == category:
categories[tag]["sites"].append(project)
return render_template('category.html', category=categories[category])
@app.route("/sites/<site>")
def sites(site):
r = requests.get('https://s3.amazonaws.com/madewithangular.com/projects.json')
projects = json.loads(r.text)
s = {}
for project in projects:
if project['slug'] == site:
s = project
print s
return render_template('site.html', site=s)
@app.route("/about")
def about():
return render_template('about.html')
@app.route("/sitemap.xml")
def sitemap():
categories = {
"google": {"name": "By Google", "sites": []},
"books-reference": {"name": "Books & Reference", "sites": []},
"business": {"name": "Business", "sites": []},
"communication": {"name": "Communication", "sites": []},
"education": {"name": "Education", "sites": []},
"entertainment": {"name": "Entertainment", "sites": []},
"finance": {"name": "Finance", "sites": []},
"health-fitness": {"name": "Health & Fitness", "sites": []},
"lifestyle": {"name": "Lifestyle", "sites": []},
"media-video": {"name": "Media & Video", "sites": []},
"music-audio": {"name": "Music & Audio", "sites": []},
"news-magazines": {"name": "News & Magazines", "sites": []},
"photography": {"name": "Photography", "sites": []},
"productivity": {"name": "Productivity", "sites": []},
"shopping": {"name": "Shopping", "sites": []},
"social": {"name": "Social", "sites": []},
"sports": {"name": "Sports", "sites": []},
"tools": {"name": "Tools", "sites": []},
"travel-local": {"name": "Travel & Local", "sites": []},
"transportation": {"name": "Transportation", "sites": []},
"weather": {"name": "Weather", "sites": []},
"community": {"name": "From the Community", "sites": []}
}
r = requests.get('https://s3.amazonaws.com/madewithangular.com/projects.json')
projects = json.loads(r.text)
return render_template('sitemap.xml', categories=categories, projects=projects), {'Content-Type': 'application/xml'}
@app.route("/.well-known/acme-challenge/992pNK1lUhKrHf3iIWuASHa9Wq-5-MtDsHnDS25JU5E")
def challenge():
return '992pNK1lUhKrHf3iIWuASHa9Wq-5-MtDsHnDS25JU5E.JOWRB-Zapoo_KR7LqPnalfsmcjZbBDdjBvaVR213WYg'
if __name__ == "__main__":
app.run(debug=True)
|
[
"lpolepeddi@gmail.com"
] |
lpolepeddi@gmail.com
|
454a94a13985924e0e27c3014dde5c053e7d4662
|
ff5c2c9f390da92e083eff64e4ea12483d6bf1e9
|
/mit601/ch4_hanoi_tower.py
|
919261f044905ee7f2dc7e53a40cedefc33ebbcb
|
[] |
no_license
|
MI-7/python
|
5942a713c7dea355ac6a39df87e129c33ca0e262
|
df0f1fac9bd6af27a49086026e31595ee7b3926e
|
refs/heads/master
| 2021-01-12T09:50:11.963456
| 2017-01-17T13:38:05
| 2017-01-17T13:38:05
| 76,270,209
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 437
|
py
|
# towers of hanoi
steps = 0
# n levels of tower, from pole A to pole B, using pole C
def move(n, A, B, C):
if n == 1:
print "move 1 from ", A, " to ", B
global steps
steps+=1
else:
move(n-1, A, C, B)
# move(n, A, B, C)
print "move ", n , " from ", A, " to ", B
global steps
steps+=1
move(n-1, C, B, A)
move(5, 'a', 'b', 'c')
print "total steps: ", steps
|
[
"leonchan@chenliangdeiMac-2.local"
] |
leonchan@chenliangdeiMac-2.local
|
5a73c4dea7915f11e4b8c87c2ccec2c0b32b93f2
|
9aa0a487f5ec98b752b079a4cc283223ac1d9299
|
/src/pathbuild.py
|
8c4076e2b7f9a2e7d4ba0bbfa3b8645255b0a56d
|
[] |
no_license
|
subsetOfInsanity/tourent-plans
|
8e8e5430f6ef1de04caf01ed8308ee321da6601e
|
3014d794b09b1eb56a249e8e8fd812bea13d1080
|
refs/heads/master
| 2020-12-24T12:52:26.334462
| 2012-10-27T16:40:43
| 2012-10-27T16:40:43
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,615
|
py
|
#!/usr/bin/env python
# encoding: utf-8
"""
pathbuild.py
creates paths around various places (TSP)
uses google maps api for distance measurement and
point-to-point route creation
Created by Benjamin Fields on 2012-01-28.
Copyright (c) 2012 . All rights reserved.
"""
import sys
import os
import unittest
import urllib2
import random
from simplejson import loads
import numpy as np
class PathBuild:
uri = "http://maps.googleapis.com/maps/api/distancematrix/json?origins={points_list}&destinations={points_list}&sensor=false"
def __init__(self, points):
self.points = points
self.dists = None
def get_matrix(self):
if not self.dists:
points_as_str = '|'.join([','.join([lat,lon]) for (lat,lon) in self.points])
print "opening", PathBuild.uri.format(points_list=points_as_str)
raw_matrix = loads(urllib2.urlopen(PathBuild.uri.format(points_list=points_as_str)).read())['rows']
print raw_matrix
self.dists = np.zeros((len(self.points),len(self.points)))
for idx, row in enumerate(raw_matrix):
print "adding", row, "to", idx
self.dists[idx] =[item['distance']['value'] for item in row['elements'] if item['status']=='OK']
return self.dists
def get_random_path(self):
return random.sample(self.points, len(self.points))
def get_optimal_path(self):
"""
tsp here
"""
return self.get_random_path()
class PathBuildTests(unittest.TestCase):
def setUp(self):
pass
if __name__ == '__main__':
unittest.main()
|
[
"me@benfields.net"
] |
me@benfields.net
|
a70a5b7c8d235f92204efb35d6b0f4c4767e6633
|
7d192e7a63ce7f4e294ea0f2b6639aecddcc7167
|
/models.py
|
023e9609e2a722a04f26f325b5d90ac180b33803
|
[] |
no_license
|
WaterCountry/BottleBlog
|
2a48149301733354d686caaf3fb438783d45d87f
|
2cf19a1d63045cd6972d35cabebb1cf00474a37d
|
refs/heads/master
| 2020-05-26T06:12:46.695588
| 2019-06-05T14:34:14
| 2019-06-05T14:34:14
| 188,132,501
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,352
|
py
|
from pony.orm import *
from datetime import date,datetime
db=Database('sqlite', filename="blog.sqlite", create_db=True)
class User(db.Entity):
name=Required(str)
nick=Required(str)
password=Required(str)
email=Required(str,unique=True)
regdate=Required(date)
blogs=Set("Blog")
photos=Set("Photo")
class Blog(db.Entity):
title=Required(str)
content=Required(str)
update=Required(date)
author=Required(User)
class Photo(db.Entity):
name=Required(str)
ext=Required(str)
url=Required(str)
size=Required(str)
update=Required(date)
author=Required(User)
class Program(db.Entity):
title=Required(str)
code=Required(str)
level=Required(str)
update=Required(date)
sql_debug(True)
db.generate_mapping(create_tables=True)
today=datetime.now()
@db_session
def populate_database():
if select(u for u in User).count()>0:
addtable_photo()
addtable_program()
return
u1=User(name='zhou',nick='fun',password='123',email='zhou@bottle.com',regdate=today)
u2=User(name='shen',nick='阳光',password='123',email='shen@bottle.com',regdate=today)
Blog(title='Every day',content='Meet a better self every day !',update=today,author=u1)
Blog(title='每一天',content='每一天遇见更好的自己!',update=today,author=u2)
@db_session
def addtable_program():
if select(p for p in Program).count()>0:
return
Program(title='a+b',code='a=1'
'b=2'
'c=a+b'
'print(c) '
,level='0',update=today)
@db_session
def addtable_photo():
if select(p for p in Photo).count()>0:
return
Photo(name='1',ext='jpg',url='/store/1.jpg',size='557kb',update=today,author=User[1])
Photo(name='2',ext='jpg',url='/store/2.jpg',size='40kb',update=today,author=User[1])
populate_database()
@db_session
def check_login(username,password):
loginuser = select(u for u in User if u.name == username and u.password == password).first()
return loginuser
@db_session
def update_upload():
ps=select(p for p in Photo)
if ps.count()>0:
for p in ps:
p.url='/static'+p.url
@db_session
def del_upload():
ps=select(p for p in Photo)
for p in ps:
p.delete()
|
[
"33112897@qq.com"
] |
33112897@qq.com
|
bcfc8ca641c899996b681c98278808626a82b869
|
fc4bd93298961d886f59ac8f75cfe713b127eaa4
|
/prog10.py
|
90f953415a309cef9a631d20337c6ec2eb742925
|
[] |
no_license
|
Kamaljeetsingh02/pythonProject1
|
39f13c2226a80c6bff6795288b40efb29a4bc86d
|
e9fd64350c436a99d134ccf1b02008f84cdf10d1
|
refs/heads/master
| 2023-07-28T21:40:04.943198
| 2021-09-20T06:20:55
| 2021-09-20T06:20:55
| 408,332,618
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 464
|
py
|
# def sum(*sum):
# # return result
# # for x in num:
# # result += (x)
# # sum(1,2)
opt=int(input("plz enter 1 for add,2 for subtract,3 for multiply and 4 for divide"))
a=[]
b=int(input("please en no"))
a.append(int(b))
while b:
b = input("please en no")
if b:
a.append(int(b))
print(a)
def calculator(opt,*args):
res=0
for arg in args:
if(opt==1):
res +=arg
return res
calculator()
|
[
"kamalrai98765@gmail.com"
] |
kamalrai98765@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.