blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7abf345a31d0ddef1b30276521f26fa294a9afe5 | 36932de19a1217e1329f66de8f6c598775177a7b | /job_source_resolver/job_source_resolver/urls.py | f689607792447e2768db2604c7964c71c6307c22 | [] | no_license | JMahal0/job_source_resolver | 1ac40a0517b143f8d30cc0265a90f1b8b46cd278 | 7286f92d461286b5298b721fe480a85194aa89db | refs/heads/main | 2023-03-04T21:24:48.333858 | 2021-02-09T09:00:17 | 2021-02-09T09:00:17 | 337,210,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | from django.contrib import admin
from django.urls import path, include
from resolver import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.index, name='Index'),
path('jobsource', views.job_source, name='Job Source')
]
| [
"tajm27@gmail.com"
] | tajm27@gmail.com |
82bd0903b237c435b32e78a3cffc5904f9cd8530 | 66aab3cd312f3f97467cfc0f5d6d36b2e2c350d5 | /billing/migrations/0002_auto_20200612_1811.py | 97ee1a3d9ef1efb3ad4eec2820bf1241000546b3 | [] | no_license | AlohaOttawa/FoodMenu | 511680ee715c1e8fefc3566ac34d26c6cc1b0bbd | ca084dce2a97a15ccdddfb49e604a0b6a1416da9 | refs/heads/master | 2022-11-28T06:53:41.782337 | 2020-08-11T22:16:14 | 2020-08-11T22:16:14 | 268,637,998 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 601 | py | # Generated by Django 3.0.4 on 2020-06-12 22:11
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('billing', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='billingprofile',
name='user',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
| [
"61981451+AlohaOttawa@users.noreply.github.com"
] | 61981451+AlohaOttawa@users.noreply.github.com |
37258d2856640a46044547ebbdfce25554c51def | ab3af215296c12b5e1dcfb35c5ac2a7c08094da7 | /circular_linked_list.py | 1c7f8c8af85a9714be8dae215b5da58f811c2026 | [] | no_license | MVReddy/CircularLinkedList | e42c36f647a711e9862cb532ca5589d305094e6f | b502724fae0ac6817699cce8f76dc591c8e2effb | refs/heads/master | 2021-01-15T13:18:18.914633 | 2015-06-09T09:31:20 | 2015-06-09T09:31:20 | 37,123,096 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 808 | py | """
This is a simple python script to identify given linked list is
Circular or not.
"""
class Node(object):
def __init__(self, value, n=None):
self.next = n
self.value = value
def create_list():
last = Node(8)
head = Node(7, last)
head = Node(6, head)
head = Node(5, head)
head = Node(4, head)
head = Node(3, head)
head = Node(2, head)
head = Node(1, head)
last.next = head
return head
def is_circular(head):
slow = head
fast = head
while True:
slow = slow.next
fast = fast.next.next
print(slow.value, fast.value)
if slow.value == fast.value:
return True
elif slow is fast:
return False
if __name__ == "__main__":
node = create_list()
print(is_circular(node)) | [
"venkatareddy.mulam@techmahindra.com"
] | venkatareddy.mulam@techmahindra.com |
e0687c401b2a2f41a0d216891d993b549b11f61b | 51a37b7108f2f69a1377d98f714711af3c32d0df | /src/leetcode/P3639.py | c1a00d09259b34ab68d74d1c8881c2f67e11c842 | [] | no_license | stupidchen/leetcode | 1dd2683ba4b1c0382e9263547d6c623e4979a806 | 72d172ea25777980a49439042dbc39448fcad73d | refs/heads/master | 2022-03-14T21:15:47.263954 | 2022-02-27T15:33:15 | 2022-02-27T15:33:15 | 55,680,865 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | class Solution:
def isBipartite(self, graph):
n = len(graph)
c = [-1] * n
ret = [True]
def color(x, w):
for y in graph[x]:
if c[y] == -1:
c[y] = 1 - w
color(y, 1 - w)
if not ret[0]:
return
elif c[y] != 1 - w:
ret[0] = False
return
for i in range(n):
if c[i] == -1:
color(i, 0)
return ret[0]
| [
"stupidchen@foxmail.com"
] | stupidchen@foxmail.com |
e00f59a705bf22b90151588bdfa1d862ad0cf9ce | 7479d9b5988f3ab7fbfd4d2aa5966e7d395999fa | /Edge Detection/EdgeDetection.py | 2d81149c8d98b66f8c539b691ef3c47856cc58a8 | [] | no_license | kuro8bit/Image-Processing | f532c3353b3bb3a5770b8fad444138b1a3d1afaf | c1d3ff29e3bdf3d30b9b0f9c887d05887061c193 | refs/heads/master | 2023-01-13T10:22:27.328403 | 2023-01-10T18:15:42 | 2023-01-10T18:15:42 | 158,499,137 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,840 | py | import numpy as np
import cv2
import time
# input image filepath
filepath = 'images/input.png'
# Edge Detection Class
class edge:
# available filters (some with horizontal(_h) and vertical(_v) direction)
filters = {
'diff_h' : np.array(([0, 0, 0],
[0,-1, 1],
[0, 0, 0])),
'diff_v' : np.array(([0, 0, 0],
[0,-1, 0],
[0, 1, 0])),
'prewitt_h' : np.array([[-1,0,1],
[-1,0,1],
[-1,0,1]]),
'prewitt_v' : np.array([[-1,-1,-1],
[0,0,0],
[1,1,1]]),
'roberts_h' : np.array([[0,0,0],
[0,0,1],
[0,-1,0]]),
'roberts_v' : np.array([[0,0,0],
[0,1,0],
[0,0,-1]]),
'sobel_h' : np.array([[-1,0,1],
[-2,0,2],
[-1,0,1]]),
'sobel_v' : np.array([[-1,-2,-1],
[0,0,0],
[1,2,1]]),
'laplacian4' : np.array([[0, 1, 0],
[1,-4, 1],
[0, 1, 0]]),
'laplacian8' : np.array([[1, 1, 1],
[1,-8, 1],
[1, 1, 1]]),
}
def detection(img, filtername):
# get Horizontal/Vertical mask of filter or Laplacian
mask1 = edge.filters[filtername] if(filtername.startswith('laplacian')) else edge.filters[filtername + '_h']
mask2 = np.zeros((3,3)) if(filtername.startswith('laplacian')) else edge.filters[filtername + '_v']
M,N = img.shape[:2] # image size
imgf = np.zeros((M,N), dtype=np.uint8) # output image
for x in range(0,M):
for y in range(0,N):
x0, y0 = max(x-1, 0), max(y-1,0) # image start row/col indexes
xf, yf = min(x+1, M-1), min(y+1, N-1) # image final row/col indexes
u0, v0 = 0 if(x>0) else 1, 0 if(y>0) else 1 # mask start row/col indexes
uf, vf = 2 if(x<M-1) else 1, 2 if(y<N-1) else 1 # mask final row/col indexes
# multiply mask to image
tmp1 = np.multiply(img[x0:xf+1, y0:yf+1], mask1[u0:uf+1,v0:vf+1])
tmp2 = np.multiply(img[x0:xf+1, y0:yf+1], mask2[u0:uf+1,v0:vf+1])
# sum of all values
g1 = np.sum(tmp1)
g2 = np.sum(tmp2)
# square root (g1^2 + g2^2)
g = int(np.sqrt(g1*g1+g2*g2))
# set value g in output image g E [0,255]
imgf[x,y] = max(min(g, 255), 0)
return imgf
# read input image as grayscale
img = cv2.imread(filepath, 0)
# display input image
cv2.namedWindow('Input Image')
cv2.imshow('Input Image', img)
# list of all available filters
filters = ['diff', 'prewitt', 'roberts', 'sobel', 'laplacian4', 'laplacian8']
for flt in filters:
t0 = time.time() # start time
img1 = edge.detection(img, flt) # calculate edges
t1 = time.time() # end time
# display output image of current filter
title = flt + ' (time: {:.2f}s)'.format(t1-t0)
cv2.namedWindow(title)
cv2.imshow(title, img1)
# save output image
cv2.imwrite('images/output-' + flt + '.png', img1)
cv2.waitKey()
cv2.destroyAllWindows()
| [
"noreply@github.com"
] | kuro8bit.noreply@github.com |
a09e85a9b007d8d8f1876c8ad3143999c4e42bf6 | 099b57613250ae0a0c3c75cc2a9b8095a5aac312 | /leetcode/Graph/Union并查集/500. 朋友圈(并查集).py | 771d42fa8d7df2fd6876c58eea3fff89d96ca4bb | [] | no_license | MitsurugiMeiya/Leetcoding | 36e41c8d649b777e5c057a5241007d04ad8f61cd | 87a6912ab4e21ab9be4dd6e90c2a6f8da9c68663 | refs/heads/master | 2022-06-17T19:48:41.692320 | 2020-05-13T16:45:54 | 2020-05-13T16:45:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,487 | py | class UF:
parent = {}
cnt = 0
def __init__(self, M):
# 我们要理解,为什么这里self.parent只构造到n位
# 因为这是本题的特性,因为例如本题的3*3 矩阵,其实这只有3个人
n = len(M)
for i in range(n):
self.parent[i] = i
# 我们每创建一个新的父亲,就把父亲的数量,self.cnt +1
self.cnt += 1
def find(self, x):
while x != self.parent[x]:
x = self.parent[x]
return x
def union(self, p, q):
if self.connected(p, q):
return
self.parent[self.find(p)] = self.find(q)
# 我们找到两个最高节点,然后让一个指向另外一个,这个时候self.cnt -= 1
# 父节点变小
self.cnt -= 1
def connected(self, p, q):
return self.find(p) == self.find(q)
class Solution:
def findCircleNum(self, M):
n = len(M)
uf = UF(M)
for i in range(n):
for j in range(n):
# [i][j] == 1, 那就说 i,j有朋友关系,意思就是他们共属于同一个父亲
if M[i][j] == 1:
uf.union(i, j)
return uf.cnt
"""
时间复杂度:O(n^3) 访问整个矩阵一次,并查集操作需要最坏O(n) 的时间。
空间复杂度:O(n),parent 大小为 n。
https://leetcode-cn.com/problems/friend-circles/solution/mo-ban-ti-bing-cha-ji-python3-by-fe-lucifer-2/
""" | [
"yifu3@ualberta.ca"
] | yifu3@ualberta.ca |
97122be51c1ffb6ff79dbc7974cf4f02820372a7 | 49a6cc88f56cfd7c928c1f474a6df8ecc35d04f8 | /setup.py | 86260dbc4e0b9b1871e3d286d5863cac1122de8b | [] | no_license | bergkampben/discoverfy | 49f924cf7dbbd0b09f60b50e9696bf9ae6492c47 | 59bc2e97c2995e263c5aa820666612c82e381952 | refs/heads/master | 2022-12-10T08:15:56.460686 | 2018-09-17T14:49:59 | 2018-09-17T14:49:59 | 259,083,735 | 0 | 0 | null | 2022-12-07T21:39:59 | 2020-04-26T16:50:47 | Python | UTF-8 | Python | false | false | 487 | py | """Discoverfy python package configuration."""
from setuptools import setup
setup(
name='discoverfy',
version='0.1.0',
packages=['discoverfy'],
include_package_data=True,
install_requires=[
'Flask==0.12.2',
'html5validator==0.2.8',
'pycodestyle==2.3.1',
'pydocstyle==2.0.0',
'pylint==1.8.1',
'nodeenv==1.2.0',
'sh==1.12.14',
'arrow==0.10.0',
'requests==2.18.4',
'apscheduler'
],
)
| [
"asofian@umich.edu"
] | asofian@umich.edu |
ab9badddec57b1934b7655a1a401667740fc1840 | 7a6d3ce61ff2972246972800807caf21f134c4c4 | /tests/test_parse.py | 97150fb7c14aaba41e4118e494d2b913383156dc | [] | no_license | mbryla/training-python-basics | 090f886e1a4568deaece83a28f5912279f963b46 | 0bd2378168a5323e88ce556c099f36737ab38d7c | refs/heads/master | 2021-05-30T22:16:52.610683 | 2015-10-14T16:54:47 | 2015-10-14T16:54:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,999 | py | from unittest import TestCase, skipUnless
from functions.parse import TESTED, Parse
__author__ = 'MKVJ48'
SKIP_MESSAGE = 'not_yet_implemented'
@skipUnless('words_count' in TESTED, SKIP_MESSAGE)
class TestWordsCount(TestCase):
def test_words_count_simple(self):
line = 'To Sherlock Holmes she is always the woman'
words = ['Sherlock', 'woman']
self.assertDictEqual({'Sherlock': 1, 'woman': 1}, Parse.count_words(words, line))
def test_words_count_simple_with_dot(self):
line = ('I have seldom heard him mention her under any other name.'
'In his eyes she eclipses and predominates the whole of he'
'r sex. It was not that he felt any emotion akin to love f'
'or Irene Adler. All emotions, and that one particularly, '
'were abhorrent to his cold, precise but admirably balance'
'd mind.')
words = ['and', 'her', 'Irene']
self.assertDictEqual({'and': 2, 'her': 2, 'Irene': 1}, Parse.count_words(words, line))
@skipUnless('words_count_case_insensitive' in TESTED, SKIP_MESSAGE)
class TestWordsCountCaseSensitive(TestCase):
def test_words_count_simple(self):
line = 'To Sherlock Holmes she is always the woman'
words = ['sherlock', 'woman']
self.assertDictEqual({'sherlock': 1, 'woman': 1}, Parse.count_words(words, line, False))
def test_words_count_simple_with_dot(self):
line = ('I have seldom heard him mention her under any other name.'
'In his eyes she eclipses and predominates the whole of he'
'r sex. It was not that he felt any emotion akin to love f'
'or Irene Adler. All emotions, and that one particularly, '
'were abhorrent to his cold, precise but admirably balance'
'd mind.')
words = ['and', 'Her', 'irene']
self.assertDictEqual({'and': 2, 'Her': 2, 'irene': 1}, Parse.count_words(words, line, False))
| [
"brylamat@gmail.com"
] | brylamat@gmail.com |
5fedfa2c0bded81ccf8927ae6cc9219e37655bfa | 062a7ecf904b75d45ae08f0b50282d0396b9d5a0 | /scratch11/ex04.py | f80fb51af2b01902381538f91b7147caf380e524 | [] | no_license | chojiwon1727/lab_python | 3ce0da871a01dc3509ed25d4d8552672a7f62917 | e997e4f86de8ccebc614f8f0a16849f047728e67 | refs/heads/master | 2020-12-12T01:59:14.630941 | 2020-01-15T06:37:58 | 2020-01-15T06:37:58 | 234,015,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,618 | py | from sklearn.metrics import classification_report, confusion_matrix
from scratch11.ex03 import train_test_split, MyScaler, MYKnnClassifier
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
if __name__ == '__main__':
# 1. iris데이터
col_name = ['sepal_length', 'sepal_width', 'petal_length', 'petal_width', 'Class']
iris = pd.read_csv('iris.csv', header=None, names=col_name)
# print(iris.shape)
# print(iris.head())
# 데이터 프레임을 이용해서 각 특성(변수)들과 Class(레이블)과의 관계 그래프
iris_by_class = iris.groupby('Class')
for name, group in iris_by_class:
# print(name, len(group))
plt.scatter(group['sepal_length'], group['sepal_width'], label=name) # -> 그래프 class수만큼 그려줌
plt.legend()
plt.xlabel('sepal_length')
plt.ylabel('sepal_width')
plt.show()
for name, group in iris_by_class:
# print(name, len(group))
plt.scatter(group['petal_length'], group['petal_width'], label=name) # -> 그래프 class수만큼 그려줌
plt.legend()
plt.xlabel('petal_length')
plt.ylabel('petal_width')
plt.show()
iris_point = iris.iloc[:,0:3].to_numpy()
iris_label = iris.iloc[:,4].to_numpy()
point_train, point_test, label_train, label_test = train_test_split(iris_point, iris_label, test_size=0.2)
scaler = MyScaler()
scaler.fit(point_train)
point_train = scaler.transform(point_train)
point_test = scaler.transform(point_test)
knn = MYKnnClassifier(5)
knn.fit(point_train, label_train)
pred = knn.predict(point_test)
report = classification_report(label_test, pred)
confusion = confusion_matrix(label_test, pred)
# print('아이리스 데이터')
# print(np.mean(label_test == pred))
# print(confusion)
# print(report)
# 2. 암데이터
wisc = pd.read_csv('wisc_bc_data.csv')
# print(wisc.head())
wisc_point = wisc.iloc[:,2:].to_numpy()
wisc_label = wisc.iloc[:, 1].to_numpy()
point_train, point_test, label_train, label_test = train_test_split(wisc_point, wisc_label, test_size=0.2)
scaler = MyScaler()
scaler.fit(point_train)
point_train = scaler.transform(point_train)
point_test = scaler.transform(point_test)
knn = MYKnnClassifier()
knn.fit(point_train, label_train)
pred = knn.predict(point_test)
report = classification_report(label_test, pred)
confusion = confusion_matrix(label_test, pred)
# print('암데이터')
# print(np.mean(label_test == pred))
# print(confusion)
# print(report)
| [
"56914237+chojiwon1727@users.noreply.github.com"
] | 56914237+chojiwon1727@users.noreply.github.com |
d0f57b7b7e0be92eaac31a2372c1b34ae089990d | e6da5f6d36c2976e2a31e364ffedec23ad3e8973 | /gis_test1/settings/deploy.py | c302993b0022220333defc40ae867aa9b1dbca11 | [] | no_license | SeoMinJong/gis_test1 | 7e8a2aa3c3b5b0f96fa1b8af91a171eab81bfbc6 | 5ab704d522f579b86c61a2b67df96752873770d0 | refs/heads/master | 2023-07-30T04:19:22.233362 | 2021-09-28T01:59:47 | 2021-09-28T01:59:47 | 382,846,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | from .base import *
def read_secret(secret_name):
file = open('/run/secrets/'+ secret_name)
secret = file.read()
secret = secret.lstrip().rstrip()
file.close()
return secret
SECRET_KEY = read_secret('DJANGO_SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ["*"]
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'USER': read_secret('MARIADB_USER'),
'PASSWORD': read_secret('MARIADB_PASSWORD'),
'HOST': 'mariadb',
'PORT': '3306',
}
}
| [
"tjalswhd113@naver.com"
] | tjalswhd113@naver.com |
903a8a93f9a81e2b437cba9a74911c2df4bbae29 | 389dfef2204fd925cff51a3f39d626e86159fbe3 | /pairs_generator.py | d16ffc0e343b297a2724395fdbd1f9ab369d854c | [] | no_license | DanielSeehausen/pseudo_smart_random_pairing | e18c8728d5903d069e9f94e053d5ea49e5b39d21 | f51a043bb046dc96feb7d43d62c8aaa757ebc70a | refs/heads/master | 2020-07-23T09:03:33.898337 | 2017-09-05T18:37:21 | 2017-09-05T18:37:21 | 94,354,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 278 | py | def true_random_assign_pairs(arr):
print([arr.pop(random.randrange(len(arr))) for _ in range(2)])
if len(arr) > 1:
random_assign(arr)
else:
print(arr)
def get_all_pairs(matchee, arr):
return {matcher: 0 for matcher in arr if matchee != matcher}
| [
"Daniel.Seehausen@gmail.com"
] | Daniel.Seehausen@gmail.com |
ff1c9844fbad63c900a19d26273c2232d85ac465 | 51b82ba56d63f4ed9d705a4b3ffa2423509a6d31 | /caloriemeter/asgi.py | ba05a5c38ef277adcab2e352180ebbd61f2c0a7d | [] | no_license | Riya-Regmi/Calorie-Counter-Django-python-project | eff22a46daeb9327ca6831de00ccfaa580a984f0 | f63e8042e2c2122003db467c6e3b5af7d6c8c60b | refs/heads/master | 2022-11-19T06:02:11.183579 | 2020-07-22T10:09:55 | 2020-07-22T10:09:55 | 281,634,460 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
ASGI config for caloriemeter project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'caloriemeter.settings')
application = get_asgi_application()
| [
"riyaregmi19@gmail.com"
] | riyaregmi19@gmail.com |
a307d1e3e929d878090e28d9d79600c1101e651e | c91e63b1f688aa4efd8db66eda2aff11f2391ed5 | /python/helpers/typeshed/stdlib/@python2/site.pyi | c77c9397f61235cd1a6fc8609638fda26a5c0785 | [
"Apache-2.0",
"MIT"
] | permissive | Mattlk13/intellij-community | c49d9b2742b61e98370ffb8604611db1e680d2e7 | 43b95d766e988814033b202ebe97a0ad53df59ee | refs/heads/master | 2023-08-28T16:52:36.973221 | 2022-01-27T14:23:41 | 2022-02-11T13:43:51 | 83,334,899 | 0 | 0 | Apache-2.0 | 2023-03-08T04:13:59 | 2017-02-27T17:00:46 | null | UTF-8 | Python | false | false | 381 | pyi | from typing import Iterable, List
PREFIXES: List[str]
ENABLE_USER_SITE: bool | None
USER_SITE: str | None
USER_BASE: str | None
def main() -> None: ...
def addsitedir(sitedir: str, known_paths: Iterable[str] | None = ...) -> None: ...
def getsitepackages(prefixes: Iterable[str] | None = ...) -> List[str]: ...
def getuserbase() -> str: ...
def getusersitepackages() -> str: ...
| [
"intellij-monorepo-bot-no-reply@jetbrains.com"
] | intellij-monorepo-bot-no-reply@jetbrains.com |
e5603077f853e29050a73a1802510f6255dc8019 | 2adb627cfd97cb3b56ac85f2c1613e64ec8aff2f | /manage.py | fd154b60cac1c1218751f73a59576d89475b3a16 | [] | no_license | AthifSaheer/techbyheart-machine-test | 28307a103e694965df965aefc8576cd558204585 | 2cc74e4b9a5dfb3e0a73b1b36b245b8bb92d2b9b | refs/heads/main | 2023-08-31T20:12:13.082225 | 2021-10-23T01:35:48 | 2021-10-23T01:35:48 | 420,179,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'techbyheart.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"liteboook@gmail.com"
] | liteboook@gmail.com |
a89e7f7411dc4a3e1f3a484b0f92a7d10fc29d16 | 96342f566349db101ecc4a2965331ace6303dc5a | /5.Overlap.py | ad12f7ce13203a0f0bd8a3812b769d4a3fcae62a | [] | no_license | vinodsundaram/Practise-Python | 7fa5e0e2075b73b8fa3826e0d0c6cbe3792a14e8 | 0195b21de0d3ff5f8f35dfe0b1c03eb4cf19c364 | refs/heads/master | 2021-01-12T15:54:16.455655 | 2017-08-22T16:57:43 | 2017-08-22T16:57:43 | 71,901,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Oct 25 19:50:04 2016
@author: Vinod
"""
a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
b = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
overlap = list()
for item in a:
if item in b and item not in overlap:
overlap.append(item)
print(overlap)
## Single line random
import random
a= [random.randrange(1,100) for i in range(10)]
b= [random.randrange(1,100) for i in range(10)]
print(a)
print(b)
common=[]
common=[(item1) for item1 in a for item2 in b if item1==item2]
print(common) | [
"noreply@github.com"
] | vinodsundaram.noreply@github.com |
c7112ce6756cca20d41b9968367c7e41d3461727 | c1ce8cdeff0a7ef98a5d229c2adecdab2af4d1b2 | /xinke.py | 82b062d6e8a4661e4616bfb28df4cae44d7f80ea | [] | no_license | wuruxue/python | f20f04b3239e7db41d44290e7aad16262258ae1f | bd0ba8b72c7231851db0d1f4547cb6dbf7dead83 | refs/heads/master | 2020-04-08T11:23:42.164248 | 2018-12-25T06:41:19 | 2018-12-25T06:41:19 | 159,304,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,312 | py | import os
from chardet import detect
#a = os.path.isfile(r"C:\Users\Administrator\Desktop\root\a.txt")
#print(a)
root_path = os.getcwd()
dir_count,file_count = 0,0
for root,dirs,files in os.walk(root_path):
#print(root)
if not os.path.isfile(root):
dir_count += 1
for f in files:
#print(f)
if os.path.isfile(os.path.join(root,f)):
file_count == 1
#print(os.path.join(root,f))
#print(dir_count-1)
print(dir_count-1,"folders")
print(file_count,"files")
# with open("a.txt","rb") as fp:
# encode = detect(fp.read())['encoding']
# print("ENCODING:",encode)
# #print(detect(fp.read()))
# # for f in fp:
# # print(f)
# line_count,blank_count = 0,0
# with open("a.txt",'r',encoding=encode) as fp:
# while True:
# line = fp.readline()
# if not line:
# break
# line_count += 1
# if len(line.strip()) == 0:
# blank_count += 1
# print(line_count,"lines(",blank_count,"blanks)")
# # root_path = os.getcwd()
# # offset = len(root_path.split("\\"))
# # #print(offset)
# # #print(root_path.split("\\"))
# # for root,files,dirs in os.walk(root_path):
# # current_dir = root.split("\\")
# # indent_level = len(current_dir) - offset
# # print(indent_level*"\t",current_dir[-1])
# # for f in dirs:
# # print("\t"*(indent_level+1),f)
| [
"noreply@github.com"
] | wuruxue.noreply@github.com |
ef3c2b3e595325d085987d6e071d1f68922d3a5d | f674eaaf46491ab376199238143e18837d3f85c0 | /thesis/figures/friction/coefficientOfFriction.py | 9944ff4ee49977026b4d8cbaafe329fb721d2bb9 | [] | no_license | filiphl/master-thesis | 7402dafb3d2f3ec70f61ec97cd7a235405fc40fc | 54aa0d1d49765c82c4dcaea149f2c2cf00f54bec | refs/heads/master | 2021-06-18T14:09:08.652591 | 2017-06-19T08:39:48 | 2017-06-19T08:39:48 | 66,485,624 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,767 | py | from matplotlib import pyplot as plt
import numpy as np
from matplotlib import rc
# SMOOTH
def smoother(degree, function):
smoothed = np.zeros(len(function))
smoothed[0] = function[0]
for i in xrange(1,degree+1):
smoothed[i] = smooth(i,function, i)
for i in xrange(degree, len(function)-degree):
smoothed[i] = smooth(degree,function,i)
for i in xrange(2,degree+2):
smoothed[-i] = smooth(i-1,function, -i)
smoothed[-1] = function[-1]
return smoothed
def smooth(degree, function, atIndex):
value = 0.0
dividor = 0.0
localCoeffisient = 1.0
for i in xrange(-degree, degree+1):
dividor += localCoeffisient
value += localCoeffisient*function[atIndex+i]
if i < 0:
localCoeffisient += 1
if i == 0:
localCoeffisient -= 1
if i > 0:
localCoeffisient -= 1
localCoeffisient +=1
return value/dividor
def decay(x,c,a):
return np.exp(c*(x[0]-x)) + a*( 1-np.exp(c*(x[0]-x)) )
N = 2000
N2 = N/2
x = np.linspace(0,2,N+1)
y = np.linspace(0,2,N+1)
y[N2:] = decay(x[N2:], 50, 0.7)
y=smoother(7,y)
rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
rc('text', usetex=True)
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=16)
lc = ["#5FA38E", "#3F5F7F"]
LC = [(95/255.,163/255.,142/255.), (63/255.,95/255.,127/255.)]
fig,ax = plt.subplots(1,1, figsize=(10,5))
l1 = [max(y) , max(y)]
l2 = [y[-1], y[-1]]
ax.plot([x[N2/4], x[N2]], l1,
'--',
color='#666666',
linewidth=3)
ax.plot([x[N2/4], x[N2+N2/8]], l2,
'--',
color='#666666',
linewidth=3)
less = 15
more = 3
d = float(less+more)
ax.plot(x[3:N2-less], y[3:N2-less],
'-',
color=lc[0],
linewidth=4,
label='Loading')
plt.hold('on')
for i in xrange(less+more):
ax.plot(x[N2-less+i:N2-less+i+2], y[N2-less+i:N2-less+i+2],
'-',
color=( (i/d) * (LC[1][0]-LC[0][0]) + LC[0][0], (i/d) * (LC[1][1]-LC[0][1]) + LC[0][1], (i/d) * (LC[1][2]-LC[0][2]) + LC[0][2] ),
linewidth=4,
)
ax.plot(x[N2+more:], y[N2+more:],
'-',
color=lc[1],
linewidth=4,
label='Sliding')
plt.text(0.1, 0.96, r'$F_s$', fontsize=27)
plt.text(0.1, 0.67, r'$F_k$', fontsize=27)
ax.legend(loc=1, fontsize=25)
ax.grid()
ax.set_ylabel(r"Friction force", fontsize=30)
ax.set_xlabel(r"Time" , fontsize=30)
ax.xaxis.set_label_coords(0.5, -0.05)
ax.yaxis.set_label_coords(-0.025, 0.5)
ax.set_ylim([0,max(y)*1.1])
#------------------------------------------------------------------------------#
ax.set_xticks([])
ax.set_yticks([])
#ax.set_yticks([0.7, 1.0])
#ax.set_yticklabels([r'$F_k$', r'$F_s$'])
xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
# removing the default axis on all sides:
for side in ['bottom','right','top','left']:
ax.spines[side].set_visible(False)
# get width and height of axes object to compute
# matching arrowhead length and width
dps = fig.dpi_scale_trans.inverted()
bbox = ax.get_window_extent().transformed(dps)
width, height = bbox.width, bbox.height
# manual arrowhead width and length
hw = 1./30.*(ymax-ymin)
hl = 1./50.*(xmax-xmin)
lw = 2.5 # axis line width
ohg = 0.3 # arrow overhang
# compute matching arrowhead length and width
yhw = hw/(ymax-ymin)*(xmax-xmin)* height/width
yhl = hl/(xmax-xmin)*(ymax-ymin)* width/height
# draw x and y axis
ax.arrow(xmin, 0, xmax-xmin, 0., fc='#333333', ec='#333333', lw = lw,
head_width=hw, head_length=hl, overhang = ohg,
length_includes_head= True, clip_on = False)
ax.arrow(0, ymin, 0., ymax-ymin, fc='#333333', ec='#333333', lw = lw,
head_width=yhw, head_length=yhl, overhang = ohg,
length_includes_head= True, clip_on = False)
plt.savefig('steadySlide.pdf')
plt.show()
| [
"filiphenriklarsen@gmail.com"
] | filiphenriklarsen@gmail.com |
508a42fbc1cf17886a1ad5e49ccb90f22ad05950 | afa8adc5861bc9e5cfc87cd012a62394d6f8c207 | /doctor/views.py | 8373cfac92db85a4a488f82020b84c709a3af61e | [] | no_license | ahmedshaabanix/AttendanceForStudent | d92254fe585291147648b940c314213ff880ee56 | b2ffcaadd256bb991771274a9cb839c8853d16e1 | refs/heads/main | 2023-02-12T08:30:43.277746 | 2021-01-05T17:41:57 | 2021-01-05T17:41:57 | 327,072,924 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,807 | py | from django.shortcuts import render,redirect
from django.contrib.auth.decorators import login_required
import socket
from .models import *
import pyqrcode
import png
from django.http import JsonResponse,HttpResponse
# from django.http import JsonResponse
# from .models import Attendance, exmaple
from django.core import serializers
############## Generate QR ####################################################################
def generate_qr(request):
host_name = socket.gethostname()
host_ip = socket.gethostbyname(host_name)
password = request.POST['password']
lecture_number = request.POST['lecture_number']
# save subject in database
subject = request.POST.get('subjects')
usersubjects = Lecture_date(subject_id = int(subject),lecture_number = lecture_number).save()
#lecture_id, department and level data
#values_list('id')[0][0] for returning an integer value not a QuerySet
lecture_id = Lecture_date.objects.filter(subject_id = int(subject),lecture_number = lecture_number).values_list('id')[0][0]
print(lecture_id)
department = Departments.objects.filter(id = int(subject)).values_list('name')[0][0]
print(department)
level = Subjects.objects.filter(id = int(subject)).values_list('level')[0][0]
print(level)
# create qr
qr_code_text = password+ "&"+ host_ip+ "&" +str(lecture_id) + "&" + department + "&" + str(level)
url = pyqrcode.create(qr_code_text)
url.png('uca-url.png',scale= 40)
url.show(scale=40)
#save QR data in database
qr_text = Qr_code( qr_code_text = qr_code_text )
qr_text.save()
return redirect('report')
#############################################################################################################
####### render html pages ###################################################################################
@login_required
def home(request):
subjects = Subjects.objects.all()
context = {
'subjects' :subjects,
'title':'home'
}
return render(request, 'doctor/home.html',context)
#@login_required
def report(request):
return render(request, 'doctor/onetime_report.html',{'title':'report'})
@login_required
def final_reports(request):
return render(request, 'doctor/final_reports.html',{'title':'reports'})
#############################################################################################################
########### show_attendance ##################################################################################
from accounts.models import Student
from .models import Attendance
from django.views.decorators.http import require_http_methods
@login_required
@require_http_methods(["GET"])
def show_attendance(request):
attendance = Attendance.objects.all()
for student in attendance:
if student.student_id and student.status == 1:
student_attendnce = Student.objects.all().filter(id = student.student_id).values('id','name')
print(student_attendnce)
data = list(student_attendnce)
return JsonResponse(data,safe=False)
@login_required
@require_http_methods(["GET"])
def show_absence(request):
attendance = Attendance.objects.all()
for student in attendance:
if student.student_id and student.status == 0:
student_absence = Student.objects.all().filter(id = student.student_id).values('id','name')
print(student_absence)
data = list(student_absence)
return JsonResponse(data,safe=False)
### show All#################################
@login_required
@require_http_methods(["GET"])
def show_all(request):
attendance = Student.objects.all()
data = list(attendance)
data = {
'data' : data
}
return JsonResponse(data,safe=False)
| [
"gabr40641@gmail.com"
] | gabr40641@gmail.com |
32bed7a2c873250a9542737059bba91719dd5bd3 | f8ecf2d7a4f3c74286ae2ea7ef592548c7a3b267 | /backtrace_subset.py | 639a643d366dfedd8a2f515939109a2661d0ab85 | [] | no_license | fatterZhang/leetcodeComments | beebec976465986f15ddf119f58fbe7702e55839 | 1535a92c2e141f86ce6c5a784d7d55ad05ed044f | refs/heads/master | 2021-03-25T13:11:09.816365 | 2020-04-30T02:39:19 | 2020-04-30T02:39:19 | 247,621,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 786 | py | # -*- coding: utf-8 -*-#
# Name: subset
# Author: ARCHI
# Date: 2020/4/27
# Description: 回溯法求,数组所有子集
# 参考链接:https://labuladong.gitbook.io/algo/suan-fa-si-wei-xi-lie/zi-ji-pai-lie-zu-he#er-zu-he
# -------------------------------------------------------------------------------
from typing import List
rst = []
def getSubset(nums: List[int]) -> List[List[int]]:
if len(nums) == 0:
return rst
backtrace(nums, 0, [])
return rst
def backtrace(nums: List[int], start: int, path: List[int]):
rst.append(path[:])
for i in range(start, len(nums)):
path.append(nums[i])
backtrace(nums, i + 1, path)
path.pop()
if __name__ == "__main__":
print(getSubset([i for i in range(4)]))
| [
"1210188542@qq.com"
] | 1210188542@qq.com |
9375160007447a6592f95441903e9a34e36f570f | f5e6f9301d0b7ec43a16a2ddcec41f538e16676a | /project/app/db.py | e8c20700e29b6626ee7ae350e2775824de4a5ec7 | [] | no_license | daisuke8000/fastapi-tdd-docker | e4b4950bd7674db616c4de67b96b7e283b14d6db | 507ef5d6fb958e54fbceca59ad749a0cfb8654b0 | refs/heads/master | 2023-06-22T13:41:44.268574 | 2021-07-23T03:36:54 | 2021-07-23T03:36:54 | 384,939,690 | 0 | 0 | null | 2021-07-23T08:26:55 | 2021-07-11T12:13:59 | Python | UTF-8 | Python | false | false | 1,074 | py | import logging
import os
from fastapi import FastAPI
from tortoise import Tortoise, run_async
from tortoise.contrib.fastapi import register_tortoise
log = logging.getLogger("uvicorn")
TORTOISE_ORM = {
"connections": {"default": os.environ.get("DATABASE_URL")},
"apps": {
"models": {
"models": ["app.models.tortoise", "aerich.models"],
"default_connection": "default",
},
},
}
def init_db(app: FastAPI) -> None:
register_tortoise(
app,
db_url=os.environ.get("DATABASE_URL"),
modules={"models": ["app.models.tortoise"]},
generate_schemas=False,
add_exception_handlers=True,
)
async def generate_schema() -> None:
log.info("Initializing Tortoise...")
await Tortoise.init(
db_url=os.environ.get("DATABASE_URL"), modules={"models": ["models.tortoise"]}
)
log.info("Generating database schema via Tortoise")
await Tortoise.generate_schemas()
await Tortoise.close_connections()
if __name__ == "__main__":
run_async(generate_schema())
| [
"daisuke12108000@gmail.com"
] | daisuke12108000@gmail.com |
2356cdb59ac87dc7e430cfd173d9cad9d664a40e | 8f4d46ebd1d61616f9cb50f1d7bf423399e3f614 | /run.py | cd7a1fb8f5cd5e5171be3e2df7cc49d116444559 | [
"Apache-2.0"
] | permissive | nwinds/demoServer | b089a0d9ea825bb86133d30b79bcfc9a10527f49 | 73f8f6547bcede5e348bff205d8e48bbd84cf7e1 | refs/heads/master | 2021-01-01T05:12:32.590055 | 2016-04-25T07:22:19 | 2016-04-25T07:22:19 | 56,697,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | import os.path
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.wsgi
from tornado.options import define, options
define("port", default=8000, help="run on the given port", type=int)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
self.render('index.html')
class FormPageHandler(tornado.web.RequestHandler):
def post(self):
# url = self.get_argument('url')
# noun2 = self.get_argument('noun2')
# verb = self.get_argument('verb')
# noun3 = self.get_argument('noun3')
self.redirect('gateway', permanent=True)# TODO handle url
# self.render('form.html', roads=gwurl, wood=noun2, made=verb,
# difference=noun3)
if __name__ == '__main__':
tornado.options.parse_command_line()
# wsgi.WSGIApplication
app = tornado.wsgi.WSGIApplication(
handlers=[(r'/', IndexHandler), (r'/poem', FormPageHandler),
(r"/gateway", tornado.web.RedirectHandler,
dict(url="https://gateway/index.php")),
],
template_path=os.path.join(os.path.dirname(__file__), "templates")
)
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
| [
"namingwinds@gmail.com"
] | namingwinds@gmail.com |
49cdfb3b2bfc20d46dcb661d3aab44bb1c8e426b | abca56cc77dac9184a6a92bdc6af1059f470d8e1 | /bin/wheel | e259bf47fe9b6c3f855276313bb95c2654bf98d5 | [] | no_license | maxpeng041/prop-64-analysis | 1b5c47fcc9eab6e492f274b50ad5f56d6bec5a2d | c405b6382bc4b8b38b3a500956a1c7ed4f6c7c0c | refs/heads/master | 2021-10-10T23:13:24.507109 | 2019-01-18T20:36:33 | 2019-01-18T20:36:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | #!/Users/ziyuanpeng/Code/first-python-notebook/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"maxpeng041@gmail.com"
] | maxpeng041@gmail.com | |
0c3823a879ea6b77ccda558c0705ef0127df2273 | de28d889d79bddf8e8aff45988a0f67a9bb012fa | /models.py | 4240ac64cc61e02ff84696e6e601fcad6727c74f | [] | no_license | smarandi/python-jwt-boilerplate | e8a4b390096195fe707e12eb204782a182dfffb8 | 3eb6cb8b10e9af41b5814c30998358f55b55c557 | refs/heads/master | 2020-04-28T16:35:24.138814 | 2019-03-13T12:31:36 | 2019-03-13T12:31:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,661 | py |
from run import db
from passlib.hash import pbkdf2_sha256 as sha256
class UserModel(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.String(120), unique = True, nullable = False)
password = db.Column(db.String(120), nullable = False)
def save_to_db(self):
db.session.add(self)
db.session.commit()
@staticmethod
def generate_hash(password):
return sha256.hash(password)
@staticmethod
def verify_hash(password, hash):
return sha256.verify(password, hash)
@classmethod
def find_by_username(cls, username):
return cls.query.filter_by(username = username).first()
@classmethod
def return_all(cls):
def to_json(x):
return {
'username': x.username,
'password': x.password
}
return {'users': list(map(lambda x: to_json(x), UserModel.query.all()))}
@classmethod
def delete_all(cls):
try:
num_rows_deleted = db.session.query(cls).delete()
db.session.commit()
return {'message': '{} row(s) deleted'.format(num_rows_deleted)}
except:
return {'message': 'Something went wrong'}
class RevokedTokenModel(db.Model):
__tablename__ = 'revoked_tokens'
id = db.Column(db.Integer, primary_key = True)
jti = db.Column(db.String(120))
def add(self):
db.session.add(self)
db.session.commit()
@classmethod
def is_jti_blacklisted(cls, jti):
query = cls.query.filter_by(jti = jti).first()
return bool(query) | [
"samuel.marandi@evivehealth.com"
] | samuel.marandi@evivehealth.com |
c47893545684fda31f3a7369647c734237e7c93c | f1488619d32e72360deb25c2ae709fdb299e3269 | /dvae/bayesian_optimization/sparse_gp.py | 027ced2b7b0937ffcbd5a36dfb06c04f8b344f7f | [
"MIT"
] | permissive | animeshbchowdhury/DAGNN | 5cf0794d56ff4d989da36e4deab87e9536ddcad3 | 02062bd2b24c6a23ef1fa8093d082df72ece98cd | refs/heads/main | 2023-04-29T10:47:36.050410 | 2021-05-07T17:41:11 | 2021-05-07T17:41:11 | 365,308,808 | 0 | 0 | MIT | 2021-05-07T17:36:10 | 2021-05-07T17:36:10 | null | UTF-8 | Python | false | false | 14,800 | py | ##
# This class represents a node within the network
#
from __future__ import print_function
# import sys
# import os
# sys.path.append('%s/Theano-master/' % os.path.dirname(os.path.realpath(__file__)))
import pdb
import theano
import theano.tensor as T
from sparse_gp_theano_internal import *
import scipy.stats as sps
import scipy.optimize as spo
import numpy as np
import sys
import time
from tqdm import tqdm
def casting(x):
return np.array(x).astype(theano.config.floatX)
def global_optimization(grid, lower, upper, function_grid, function_scalar, function_scalar_gradient):
grid_values = function_grid(grid)
best = grid_values.argmin()
# We solve the optimization problem
X_initial = grid[ best : (best + 1), : ]
def objective(X):
X = casting(X)
X = X.reshape((1, grid.shape[ 1 ]))
value = function_scalar(X)
gradient_value = function_scalar_gradient(X).flatten()
return np.float(value), gradient_value.astype(np.float)
lbfgs_bounds = zip(lower.tolist(), upper.tolist())
x_optimal, y_opt, opt_info = spo.fmin_l_bfgs_b(objective, X_initial, bounds = list(lbfgs_bounds), iprint = 0, maxiter = 150)
x_optimal = x_optimal.reshape((1, grid.shape[ 1 ]))
return x_optimal, y_opt
def adam_theano(loss, all_params, learning_rate = 0.001):
b1 = 0.9
b2 = 0.999
e = 1e-8
gamma = 1 - 1e-8
updates = []
all_grads = theano.grad(loss, all_params)
alpha = learning_rate
t = theano.shared(casting(1.0))
for theta_previous, g in zip(all_params, all_grads):
m_previous = theano.shared(np.zeros(theta_previous.get_value().shape, dtype=theano.config.floatX))
v_previous = theano.shared(np.zeros(theta_previous.get_value().shape, dtype=theano.config.floatX))
m = b1 * m_previous + (1 - b1) * g # (Update biased first moment estimate)
v = b2 * v_previous + (1 - b2) * g**2 # (Update biased second raw moment estimate)
m_hat = m / (1 - b1**t) # (Compute bias-corrected first moment estimate)
v_hat = v / (1 - b2**t) # (Compute bias-corrected second raw moment estimate)
theta = theta_previous - (alpha * m_hat) / (T.sqrt(v_hat) + e) #(Update parameters)
updates.append((m_previous, m))
updates.append((v_previous, v))
updates.append((theta_previous, theta) )
updates.append((t, t + 1.))
return updates
class SparseGP:
# The training_targets are the Y's which in the case of regression are real numbers in the case of binary
# classification are 1 or -1 and in the case of multiclass classification are 0, 1, 2,.. n_class - 1
def __init__(self, input_means, input_vars, training_targets, n_inducing_points):
self.input_means = theano.shared(value = input_means.astype(theano.config.floatX), borrow = True, name = 'X')
self.input_vars = theano.shared(value = input_vars.astype(theano.config.floatX), borrow = True, name = 'X')
self.original_training_targets = theano.shared(value = training_targets.astype(theano.config.floatX), borrow = True, name = 'y')
self.training_targets = self.original_training_targets
self.n_points = input_means.shape[ 0 ]
self.d_input = input_means.shape[ 1 ]
self.sparse_gp = Sparse_GP(n_inducing_points, self.n_points, self.d_input, self.input_means, self.input_vars, self.training_targets)
self.set_for_prediction = False
self.predict_function = None
def initialize(self):
self.sparse_gp.initialize()
def setForTraining(self):
self.sparse_gp.setForTraining()
def setForPrediction(self):
self.sparse_gp.setForPrediction()
def get_params(self):
return self.sparse_gp.get_params()
def set_params(self, params):
self.sparse_gp.set_params(params)
def getEnergy(self):
self.sparse_gp.compute_output()
return self.sparse_gp.getContributionToEnergy()[ 0, 0 ]
def predict(self, means_test, vars_test):
self.setForPrediction()
means_test = means_test.astype(theano.config.floatX)
vars_test = vars_test.astype(theano.config.floatX)
if self.predict_function is None:
self.sparse_gp.compute_output()
predictions = self.sparse_gp.getPredictedValues()
X = T.matrix('X', dtype = theano.config.floatX)
Z = T.matrix('Z', dtype = theano.config.floatX)
self.predict_function = theano.function([ X, Z ], predictions, givens = { self.input_means: X, self.input_vars: Z })
predicted_values = self.predict_function(means_test, vars_test)
self.setForTraining()
return predicted_values
# This trains the network via LBFGS as implemented in scipy (slow but good for small datasets)
def train_via_LBFGS(self, input_means, input_vars, training_targets, max_iterations = 500):
# We initialize the network and get the initial parameters
input_means = input_means.astype(theano.config.floatX)
input_vars = input_vars.astype(theano.config.floatX)
training_targets = training_targets.astype(theano.config.floatX)
self.input_means.set_value(input_means)
self.input_vars.set_value(input_vars)
self.original_training_targets.set_value(training_targets)
self.initialize()
self.setForTraining()
X = T.matrix('X', dtype = theano.config.floatX)
Z = T.matrix('Z', dtype = theano.config.floatX)
y = T.matrix('y', dtype = theano.config.floatX)
e = self.getEnergy()
energy = theano.function([ X, Z, y ], e, givens = { self.input_means: X, self.input_vars: Z, self.training_targets: y })
all_params = self.get_params()
energy_grad = theano.function([ X, Z, y ], T.grad(e, all_params), \
givens = { self.input_means: X, self.input_vars: Z, self.training_targets: y })
initial_params = theano.function([ ], all_params)()
params_shapes = [ s.shape for s in initial_params ]
def de_vectorize_params(params):
ret = []
for shape in params_shapes:
if len(shape) == 2:
ret.append(params[ : np.prod(shape) ].reshape(shape))
params = params[ np.prod(shape) : ]
elif len(shape) == 1:
ret.append(params[ : np.prod(shape) ])
params = params[ np.prod(shape) : ]
else:
ret.append(params[ 0 ])
params = params[ 1 : ]
return ret
def vectorize_params(params):
return np.concatenate([ s.flatten() for s in params ])
def objective(params):
params = de_vectorize_params(params)
self.set_params(params)
energy_value = energy(input_means, input_vars, training_targets)
gradient_value = energy_grad(input_means, input_vars, training_targets)
return -energy_value, -vectorize_params(gradient_value)
# We create a theano function that evaluates the energy
initial_params = vectorize_params(initial_params)
x_opt, y_opt, opt_info = spo.fmin_l_bfgs_b(objective, initial_params, bounds = None, iprint = 1, maxiter = max_iterations)
self.set_params(de_vectorize_params(x_opt))
return y_opt
def train_via_ADAM(self, input_means, input_vars, training_targets, input_means_test, input_vars_test, test_targets, \
max_iterations = 500, minibatch_size = 4000, learning_rate = 1e-3, ignoroe_variances = True):
input_means = input_means.astype(theano.config.floatX)
input_vars = input_vars.astype(theano.config.floatX)
training_targets = training_targets.astype(theano.config.floatX)
n_data_points = input_means.shape[ 0 ]
selected_points = np.random.choice(n_data_points, n_data_points, replace = False)[ 0 : min(n_data_points, minibatch_size) ]
self.input_means.set_value(input_means[ selected_points, : ])
self.input_vars.set_value(input_vars[ selected_points, : ])
self.original_training_targets.set_value(training_targets[ selected_points, : ])
print('Initializing network')
sys.stdout.flush()
self.setForTraining()
self.initialize()
X = T.matrix('X', dtype = theano.config.floatX)
Z = T.matrix('Z', dtype = theano.config.floatX)
y = T.matrix('y', dtype = theano.config.floatX)
e = self.getEnergy()
all_params = self.get_params()
print('Compiling adam updates')
sys.stdout.flush()
process_minibatch_adam = theano.function([ X, Z, y ], -e, updates = adam_theano(-e, all_params, learning_rate), \
givens = { self.input_means: X, self.input_vars: Z, self.original_training_targets: y })
# Main loop of the optimization
print('Training')
sys.stdout.flush()
n_batches = int(np.ceil(1.0 * n_data_points / minibatch_size))
pbar = tqdm(range(max_iterations))
for j in pbar:
suffle = np.random.choice(n_data_points, n_data_points, replace = False)
input_means = input_means[ suffle, : ]
input_vars = input_vars[ suffle, : ]
training_targets = training_targets[ suffle, : ]
for i in range(n_batches):
minibatch_data_means = input_means[ i * minibatch_size : min((i + 1) * minibatch_size, n_data_points), : ]
minibatch_data_vars = input_vars[ i * minibatch_size : min((i + 1) * minibatch_size, n_data_points), : ]
minibatch_targets = training_targets[ i * minibatch_size : min((i + 1) * minibatch_size, n_data_points), : ]
start = time.time()
current_energy = process_minibatch_adam(minibatch_data_means, minibatch_data_vars, minibatch_targets)
elapsed_time = time.time() - start
#print('Epoch: {}, Mini-batch: {} of {} - Energy: {} Time: {}'.format(j, i, n_batches, current_energy, elapsed_time))
#sys.stdout.flush()
pred, uncert = self.predict(input_means_test, input_vars_test)
test_error = np.sqrt(np.mean((pred - test_targets)**2))
test_ll = np.mean(sps.norm.logpdf(pred - test_targets, scale = np.sqrt(uncert)))
pred = np.zeros((0, 1))
uncert = np.zeros((0, uncert.shape[ 1 ]))
for i in range(n_batches):
minibatch_data_means = input_means[ i * minibatch_size : min((i + 1) * minibatch_size, n_data_points), : ]
minibatch_data_vars = input_vars[ i * minibatch_size : min((i + 1) * minibatch_size, n_data_points), : ]
pred_new, uncert_new = self.predict(minibatch_data_means, minibatch_data_vars)
pred = np.concatenate((pred, pred_new), 0)
uncert = np.concatenate((uncert, uncert_new), 0)
training_error = np.sqrt(np.mean((pred - training_targets)**2))
training_ll = np.mean(sps.norm.logpdf(pred - training_targets, scale = np.sqrt(uncert)))
pbar.set_description('Epoch {}, Train error: {:.4f} Test error: {:.4f} Test ll: {:.4f}'.format(j, training_error, test_error, test_ll))
sys.stdout.flush()
#print('Train error: {:.4f} Train ll: {:.4f}'.format(training_error, training_ll))
#sys.stdout.flush()
def get_incumbent(self, grid, lower, upper):
self.sparse_gp.compute_output()
m, v = self.sparse_gp.getPredictedValues()
X = T.matrix('X', dtype = theano.config.floatX)
function_grid = theano.function([ X ], m, givens = { self.input_means: X, self.input_vars: 0 * X })
function_scalar = theano.function([ X ], m[ 0, 0 ], givens = { self.input_means: X, self.input_vars: 0 * X })
function_scalar_gradient = theano.function([ X ], T.grad(m[ 0, 0 ], self.input_means), \
givens = { self.input_means: X, self.input_vars: 0 * X })
return global_optimization(grid, lower, upper, function_grid, function_scalar, function_scalar_gradient)[ 1 ]
def optimize_ei(self, grid, lower, upper, incumbent):
X = T.matrix('X', dtype = theano.config.floatX)
log_ei = self.sparse_gp.compute_log_ei(X, incumbent)
function_grid = theano.function([ X ], -log_ei)
function_scalar = theano.function([ X ], -log_ei[ 0, 0 ])
function_scalar_gradient = theano.function([ X ], -T.grad(log_ei[ 0, 0 ], X))
return global_optimization(grid, lower, upper, function_grid, function_scalar, function_scalar_gradient)[ 0 ]
def batched_greedy_ei(self, q, lower, upper, mean, std, n_samples = 1, sample='normal'):
self.setForPrediction()
grid_size = 10000
if sample == 'normal':
grid = casting(mean + np.random.randn(grid_size, self.d_input) * std)
elif sample == 'uniform':
grid = casting(lower + np.random.rand(grid_size, self.d_input) * (upper - lower))
incumbent = self.get_incumbent(grid, lower, upper)
X_numpy = self.optimize_ei(grid, lower, upper, incumbent)
randomness_numpy = casting(0 * np.random.randn(X_numpy.shape[ 0 ], n_samples).astype(theano.config.floatX))
randomness = theano.shared(value = randomness_numpy.astype(theano.config.floatX), name = 'randomness', borrow = True)
X = theano.shared(value = X_numpy.astype(theano.config.floatX), name = 'X', borrow = True)
x = T.matrix('x', dtype = theano.config.floatX)
log_ei = self.sparse_gp.compute_log_averaged_ei(x, X, randomness, incumbent)
function_grid = theano.function([ x ], -log_ei)
function_scalar = theano.function([ x ], -log_ei[ 0 ])
function_scalar_gradient = theano.function([ x ], -T.grad(log_ei[ 0 ], x))
# We optimize the ei in a greedy manner
print("Batch greedy EI selection...")
pbar = tqdm(range(1, q))
for i in pbar:
new_point = global_optimization(grid, lower, upper, function_grid, function_scalar, function_scalar_gradient)[ 0 ]
X_numpy = casting(np.concatenate([ X_numpy, new_point ], 0))
randomness_numpy = casting(0 * np.random.randn(X_numpy.shape[ 0 ], n_samples).astype(theano.config.floatX))
X.set_value(X_numpy)
randomness.set_value(randomness_numpy)
#print(i, X_numpy)
#print(i, new_point)
m, v = self.predict(X_numpy, 0 * X_numpy)
print("Predictive mean at selected points:\n", m.flatten())
return X_numpy
| [
"veronika.thost@ibm.com"
] | veronika.thost@ibm.com |
1c86de5b1579680f98739fe49ee6fb08a7b50c53 | 6fdfe2f08732c8a329c2540e00e9a97c1e3e25b0 | /Study/login_test.py | 499cf79e422a1a373e444e191508915c8abe5275 | [] | no_license | yuanjunling/UnittestCaseDemo | 33814ed9afef1b32c05580d33e6808370032bd61 | 464e322b07228480ba3d38658238adb742b3bc13 | refs/heads/master | 2021-05-16T20:34:28.085907 | 2020-07-20T09:19:26 | 2020-07-20T09:19:26 | 250,458,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 364 | py | import requests,json
session = requests.session()
headers = {
"Content-Type": "application/json"
}
json1 = {
"userAccount":151001,
"userPassword":"a047a095710f46c941d771898c893eb5"
}
url = "http://106.75.37.93/user/login"
res = session.post(url=url,json=json1,headers=headers).json()
| [
"admin@890903"
] | admin@890903 |
f30d5d9533c725600b95a340256eba65211935d5 | 93173b80d84f317eb7f698e826881b09d5c55f28 | /temp.py | 446a51a98d1049b4edaf070b7ef29f1d0bb43dbb | [] | no_license | ZhdanovichTimofey/Project-astrofight.-Command-B | 8a99835ee309eebe63cbbb49addf1a6fdda1a5ac | 492d0cde0b47956ca6393efe1fa8d9c644d5cddd | refs/heads/main | 2023-02-10T21:48:47.093787 | 2021-01-06T15:21:02 | 2021-01-06T15:21:02 | 314,564,492 | 0 | 1 | null | 2020-12-18T06:44:53 | 2020-11-20T13:37:56 | Python | UTF-8 | Python | false | false | 4,424 | py | import model
import numpy as np
import pygame
import sys
import tkinter as t
import pygame.locals
class Player:
def __init__(self, turn):
self.score = 0
self.mistakes = 3
self.turn = turn
self.path = np.array([], dtype = '<U13')
pygame.init()
FPS = 20
stell_graph = model.Graph('Data.txt')
start_3str, stop_3str = stell_graph.rnd_start_stop()
current = stell_graph.constellations[start_3str]
stop = stell_graph.constellations[stop_3str]
window = pygame.display.set_mode((800, 670))
pygame.display.set_caption('ASTROWARS')
def win_blit(window, master_file_name, name_file_name, start, stop, lasts):
screen = pygame.image.load(master_file_name)
info = pygame.image.load(name_file_name)
window.blit(screen, (0, 150))
window.blit(info, (70, 0))
f = pygame.font.Font(None, 48)
start_text = f.render(start, True, (251, 243, 0))
window.blit(start_text, (150, 186))
stop_text = f.render(stop, True, (251, 243, 0))
window.blit(stop_text, (94, 237))
try:
last1 = f.render(lasts[0], True, (251, 243, 0))
window.blit(last1, (150, 370))
except IndexError:
pass
try:
last2 = f.render(lasts[1], True, (251, 243, 0))
window.blit(last2, (530, 370))
except IndexError:
pass
def get_text(lasts, start_3str, stop_3str):
applicant = ''
font = pygame.font.Font(None, 52)
done = False
while not done:
for event in pygame.event.get():
if event.type == pygame.locals.KEYDOWN:
if event.unicode.isalpha():
applicant += event.unicode
elif event.key == pygame.locals.K_BACKSPACE:
applicant = applicant[:-1]
elif event.key == pygame.locals.K_RETURN:
return applicant
elif event.type == pygame.QUIT:
return 'EXIT'
win_blit(window, 'master.jpg', 'name.png', start_3str, stop_3str, lasts)
applicant_text = font.render(applicant, True, (251, 243, 0))
rect = applicant_text.get_rect()
rect.center = (400, 620)
window.blit(applicant_text, rect)
clock.tick(FPS)
pygame.display.flip()
def special_event(window, file_name):
screen = pygame.image.load(file_name)
window.blit(screen, (0, 0))
pygame.display.update()
win_blit(window, 'master.jpg', 'name.png', start_3str, stop_3str, [])
pygame.display.flip()
clock = pygame.time.Clock()
finished = False
player1 = Player(True)
player2 = Player(False)
while not finished:
lasts = []
try:
lasts.append(player1.path[len(player1.path) - 1])
except IndexError:
pass
try:
lasts.append(player2.path[len(player2.path) - 1])
except IndexError:
pass
current.mark = 1
if player1.turn:
current_player = player1
else:
current_player = player2
applicant_str = get_text(lasts, start_3str, stop_3str)
if applicant_str == 'EXIT':
finished = True
continue
applicant = stell_graph.is_neighbours(current, applicant_str)
if applicant:
if applicant.mark:
current_player.mistakes -= 1
special_event(window, 'mistake.jpg')
clock.tick(1)
else:
current = applicant
current_player.path = np.append(current_player.path,
current.names[0])
player1.turn = not player1.turn
player2.turn = not player2.turn
print('Meow')
else:
current_player.mistakes -= 1
special_event(window, 'mistake.jpg')
clock.tick(1)
if not current_player.mistakes:
if current_player is player1:
special_event(window, 'pl2win.jpg')
clock.tick(0.2)
finished = 1
else:
special_event(window, 'pl1win.jpg')
clock.tick(0.2)
finished = 1
if current is stop:
if current_player is player1:
special_event(window, 'pl1win.jpg')
clock.tick(0.2)
finished = 1
else:
special_event(window, 'pl2win.jpg')
clock.tick(0.2)
finished = 1
pygame.display.update()
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
finished = True
pygame.quit() | [
"reva.sk@phystech.edu"
] | reva.sk@phystech.edu |
e63dd89cc048415ede1333d985917602fd401912 | 46a93de665323f81824806359e350cc07ea43dae | /backend/src/api.py | 90d260c6db4a33df4df587e1eed4b5be6285ffad | [] | no_license | ardeshirsaadat/coffee_shop | f8f4cb124150b29548c9fc0004d8ac703c194846 | 48c16714c784db447b5659a5eacbc3b4716cbb9f | refs/heads/master | 2023-02-26T02:12:21.086030 | 2021-02-04T15:29:26 | 2021-02-04T15:29:26 | 335,502,643 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,395 | py | import os
from flask import Flask, request, jsonify, abort
from sqlalchemy import exc
import json
from flask_cors import CORS
from .database.models import db_drop_and_create_all, setup_db, Drink
from .auth.auth import AuthError, requires_auth
app = Flask(__name__)
setup_db(app)
CORS(app)
'''
@TODO uncomment the following line to initialize the datbase
!! NOTE THIS WILL DROP ALL RECORDS AND START YOUR DB FROM SCRATCH
!! NOTE THIS MUST BE UNCOMMENTED ON FIRST RUN
'''
db_drop_and_create_all()
# ROUTES
'''
@TODO implement endpoint
GET /drinks
it should be a public endpoint
it should contain only the drink.short() data representation
returns status code 200 and json {"success": True, "drinks": drinks} where drinks is the list of drinks
or appropriate status code indicating reason for failure
'''
@app.route('/drinks', methods=['GET'])
def get_drinks():
try:
drinks_objects = Drink.query.order_by(Drink.id).all()
drinks = [drink.short() for drink in drinks_objects]
return jsonify({
'success': True,
'drinks': drinks
}), 200
except BaseException:
abort(422)
'''
@TODO implement endpoint
GET /drinks-detail
it should require the 'get:drinks-detail' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drinks} where drinks is the list of drinks
or appropriate status code indicating reason for failure
'''
@app.route('/drinks-detail', methods=['GET'])
@requires_auth('get:drinks-detail')
def get_drinks_detail(payload):
try:
drinks_objects = Drink.query.order_by(Drink.id).all()
drinks = [drink.long() for drink in drinks_objects]
return jsonify({
'success': True,
'drinks': drinks,
}), 200
except AuthError:
abort(AuthError)
except BaseException:
abort(422)
'''
@TODO implement endpoint
POST /drinks
it should create a new row in the drinks table
it should require the 'post:drinks' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drink} where drink an array containing only the newly created drink
or appropriate status code indicating reason for failure
'''
@app.route('/drinks', methods=['POST'])
@requires_auth('post:drinks')
def add_drinks(payload):
try:
body = request.get_json()
title = body.get('title', None)
recipe_new = [body.get('recipe', None)]
new_drink = Drink(title=title, recipe=json.dumps(recipe_new))
Drink.insert(new_drink)
return jsonify({
'success': True,
'drinks': [Drink.query.filter(Drink.title == title).first().long()]
}), 200
except AuthError:
abort(AuthError)
except BaseException:
abort(422)
'''
@TODO implement endpoint
PATCH /drinks/<id>
where <id> is the existing model id
it should respond with a 404 error if <id> is not found
it should update the corresponding row for <id>
it should require the 'patch:drinks' permission
it should contain the drink.long() data representation
returns status code 200 and json {"success": True, "drinks": drink} where drink an array containing only the updated drink
or appropriate status code indicating reason for failure
'''
@app.route('/drinks/<int:id>', methods=['PATCH'])
@requires_auth('patch:drinks')
def update_drink(payload, id):
drink_object_to_update = Drink.query.filter(Drink.id == id).first()
if drink_object_to_update is None:
abort(422)
try:
body = request.get_json()
title = body.get('title', None)
recipe = body.get('recipe', None)
if title is not None:
drink_object_to_update.title = title
if recipe is not None:
drink_object_to_update.recipe = json.dumps(recipe)
drink_object_to_update.update()
return jsonify({
'success': True,
'drinks': [Drink.query.filter(Drink.id == id).first().long()]
}), 200
except AuthError:
abort(AuthError)
except BaseException:
abort(422)
'''
@TODO implement endpoint
DELETE /drinks/<id>
where <id> is the existing model id
it should respond with a 404 error if <id> is not found
it should delete the corresponding row for <id>
it should require the 'delete:drinks' permission
returns status code 200 and json {"success": True, "delete": id} where id is the id of the deleted record
or appropriate status code indicating reason for failure
'''
@app.route('/drinks/<int:id>', methods=['DELETE'])
@requires_auth('delete:drinks')
def delete_drink(payload, id):
drink_object_to_delete = Drink.query.filter(Drink.id == id).first()
if drink_object_to_delete is None:
abort(422)
try:
drink_object_to_delete.delete()
return jsonify({
'success': True,
'drinks': id
}), 200
except AuthError:
abort(AuthError)
except BaseException:
abort(404)
# Error Handling
'''
Example error handling for unprocessable entity
'''
@app.errorhandler(422)
def unprocessable(error):
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
'''
@TODO implement error handlers using the @app.errorhandler(error) decorator
each error handler should return (with approprate messages):
jsonify({
"success": False,
"error": 404,
"message": "resource not found"
}), 404
'''
'''
@TODO implement error handler for 404
error handler should conform to general task above
'''
@app.errorhandler(404)
def bad_request(error):
return jsonify({
"success": False,
"error": 404,
"message": "bad request"
}), 404
'''
@TODO implement error handler for AuthError
error handler should conform to general task above
'''
@app.errorhandler(AuthError)
def handle_auth_error(error):
return jsonify({
"success": False,
"error": error.error,
"message": error.status_code
}), error.status_code
| [
"ardeshirsaadat@gmail.com"
] | ardeshirsaadat@gmail.com |
f92beff1f0298c50018c7592ee189f185dca5c4e | 0629087901f26f77f6a1f8f11425340715f18bb2 | /rectangle_calculator.py | bd0f4b1377199f2db2721ca78c025cf4dbc22fe9 | [] | no_license | Douglass-Jeffrey/ICS3U-Unit2-02-Python | 47058cc5d27fd265938a9e07b7443c86868c0578 | 168f9ebd35ad37ef229cfd6d0cc5e98624bb4d9c | refs/heads/master | 2020-07-27T14:47:45.605642 | 2019-09-20T21:55:11 | 2019-09-20T21:55:11 | 209,129,598 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | #!/usr/bin/env python3
# Created by: Douglass Jeffrey
# Created on: September 2019
# This program calculates the area and perimeter of a rectangle
# with user input
def main():
# this function calculates area and perimeter
# input
length = int(input("Enter length of the rectangle (mm): "))
width = int(input("Enter width of the rectangle (mm): "))
# process
area = length*width
perimeter = 2*(length+width)
# output
print("")
print("Area is {}mm2".format(area))
print("Perimeter is {}mm".format(perimeter))
if __name__ == "__main__":
main()
| [
"ubuntu@ip-172-31-19-198.ec2.internal"
] | ubuntu@ip-172-31-19-198.ec2.internal |
e4a64bf109862acc5d6c94db2370558dfead2208 | 67c32563fad4813f87cfb3f79b09218f93fde244 | /intake/tests/models/test_prebuilt_pdf_bundle.py | 96330965d777aa37f19248f52e1afb99663e0e37 | [] | no_license | codefordayton/intake | a810685a492b4937d8b094e1e105b53c8a93f012 | 02df2716f4d7622a01127762eaf9d741f44c871c | refs/heads/develop | 2021-01-21T19:06:11.541418 | 2017-07-26T10:09:24 | 2017-07-26T10:09:24 | 92,113,879 | 0 | 1 | null | 2017-05-23T01:08:12 | 2017-05-23T01:08:11 | null | UTF-8 | Python | false | false | 1,778 | py | from django.test import TestCase
from user_accounts.tests.factories import FakeOrganizationFactory
from intake.tests.factories import FormSubmissionWithOrgsFactory
from intake import models
class TestPrebuiltPDFBundle(TestCase):
def test_default_attributes(self):
fake_org = FakeOrganizationFactory()
subs = FormSubmissionWithOrgsFactory.create_batch(
4, organizations=[fake_org], answers={})
sub_ids = [sub.id for sub in subs]
fake_apps = models.Application.objects.filter(
form_submission__id__in=sub_ids)
prebuilt = models.PrebuiltPDFBundle(organization=fake_org)
prebuilt.save()
prebuilt.applications.add(*fake_apps)
self.assertFalse(prebuilt.pdf)
self.assertEqual(prebuilt.organization, fake_org)
self.assertEqual(set(prebuilt.applications.all()), set(fake_apps))
self.assertIn('Unbuilt', str(prebuilt))
def test_set_pdf_to_bytes(self):
prebuilt = models.PrebuiltPDFBundle(
organization=FakeOrganizationFactory())
bytes_ = b'zomg'
prebuilt.set_bytes(bytes_)
prebuilt.save()
expected_filename = 'org-1_newapps'
# pull from db to ensure changes persist
fetched = models.PrebuiltPDFBundle.objects.first()
self.assertIn(expected_filename, fetched.pdf.name)
self.assertEqual(bytes_, fetched.pdf.read())
def test_set_pdf_to_empty_bytes(self):
prebuilt = models.PrebuiltPDFBundle(
organization=FakeOrganizationFactory())
bytes_ = b''
prebuilt.set_bytes(bytes_)
prebuilt.save()
# pull from db to ensure cahnges persist
fetched = models.PrebuiltPDFBundle.objects.first()
self.assertFalse(fetched.pdf)
| [
"jennifermarie@users.noreply.github.com"
] | jennifermarie@users.noreply.github.com |
4d958559230bf83817d75a92d948de49ce033433 | 9360c502531b94e73239f2aed9aedeaf9a8745fd | /Analyse exploratoire.py | 6494b30235787bda577b02c60e7880b086d45812 | [] | no_license | Marigleta/Simplon | 4371129d1161eb4ee1b5bab52525923cf42c2db2 | f333d6094ac3b9a05545a7b58fef847a9168207e | refs/heads/main | 2023-05-06T22:37:44.460788 | 2021-05-26T18:27:53 | 2021-05-26T18:27:53 | 355,127,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,029 | py | #!/usr/bin/env python
# coding: utf-8
# In[4]:
import pandas as pd
fandango=pd.read_csv("fandango_scores.csv")
fandango
# In[5]:
norm_reviews=fandango[["FILM", "RT_user_norm", "Metacritic_user_nom", "IMDB_norm", "Fandango_Ratingvalue", "Fandango_Stars"]]
# In[6]:
norm_reviews.head()
# In[4]:
norm_reviews.isna()
# In[2]:
norm_reviews.head()
# In[6]:
norm_reviews.isna().sum()
# In[7]:
import matplotlib.pyplot as plt
from numpy import arange
fig =plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.show()
# In[8]:
import matplotlib.pyplot as plt
ax.plot(fandango["FILM"], fandango["RT_user_norm"], fandango["Metacritic_user_nom"], fandango["IMDB_norm"], fandango["Fandango_Ratingvalue"], fandango["Fandango_Stars"])
plt.show()
# In[9]:
pyplot.hist[]
# In[ ]:
# graph avec barres
# In[23]:
import matplotlib.pyplot as plt
import numpy as np
fig, ax = plt.subplots()
num_cols = ['RT_user_norm', 'Metacritic_user_nom', 'IMDB_norm','Fandango_Ratingvalue', 'Fandango_Stars']
bar_heights = norm_reviews.loc[norm_reviews["FILM"]=='Avengers: Age of Ultron (2015)'][num_cols].values
#print(bar_heights)
#print(bar_heights[0])
bar_positions = np.array([1,2,3,4,5]) # the label locations
width = 0.50 # the width of the bars
rects1 = ax.bar(bar_positions, bar_heights[0], width)
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Note moyenne')
ax.set_xlabel('Sources de notation')
ax.set_title('Moyenne des notes utilisateurs pour le film Avengers: Age of Ultron(2015)')
plt.xticks(bar_positions,num_cols, rotation=90)
#ax.set_xticklabels(num_cols)
ax.legend()
fig.tight_layout()
plt.show()
# In[ ]:
import matplotlib.pyplot as plt
import numpy as np
fig, ax = plt.subplots()
num_cols = ['RT_user_norm', 'Metacritic_user_nom', 'IMDB_norm','Fandango_Ratingvalue', 'Fandango_Stars']
bar_heights = norm_reviews.loc[norm_reviews["FILM"]=='Avengers: Age of Ultron (2015)'][num_cols].values
#print(bar_heights)
#print(bar_heights[0])
bar_positions = np.array([1,2,3,4,5]) # the label locations
width = 0.50 # the width of the bars
rects1 = ax.bar(bar_positions, bar_heights[0], width)
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Note moyenne')
ax.set_xlabel('Sources de notation')
ax.set_title('Moyenne des notes utilisateurs pour le film Avengers: Age of Ultron(2015)')
plt.xticks(bar_positions,num_cols, rotation=90)
#ax.set_xticklabels(num_cols)
ax.legend()
fig.tight_layout()
plt.show()
# In[31]:
import matplotlib.pyplot as plt
import numpy as np
fig, ax = plt.subplots()
num_cols = ['RT_user_norm', 'Metacritic_user_nom', 'IMDB_norm','Fandango_Ratingvalue', 'Fandango_Stars']
bar_heights = norm_reviews.loc[norm_reviews["FILM"]=='Avengers: Age of Ultron (2015)'][num_cols].values
#print(bar_heights)
#print(bar_heights[0])
bar_positions = np.array([1,2,3,4,5]) # the label locations
width = 0.50 # the width of the bars
rects1 = ax.barh(bar_positions, bar_heights[0], width)
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Note moyenne')
ax.set_xlabel('Sources de notation')
ax.set_title('Moyenne des notes utilisateurs pour le film Avengers: Age of Ultron(2015)')
plt.xticks(bar_positions,num_cols, rotation=90)
#ax.set_xticklabels(num_cols)
ax.legend()
fig.tight_layout()
plt.show()
# In[39]:
import matplotlib.pyplot as plt
import numpy as np
figure_=plt.figure(figsize=[15,10])
ax1=figure_.add_subplot(1,2,1)
ax1.scatter(norm_reviews['Fandango_Ratingvalue'], norm_reviews['RT_user_norm'] )
ax1.set_xlabel( 'Fandango_Ratingvalue ')
ax1.set_ylabel( 'RT_user_norm ')
ax2=figure_.add_subplot(1,2,2, sharey=ax1)
ax2.scatter(norm_reviews['RT_user_norm'], norm_reviews['Fandango_Ratingvalue'] )
ax2.set_xlabel( 'RT_user_norm ')
ax2.set_ylabel( 'Fandango_Ratingvalue')
plt.show()
# In[41]:
import matplotlib.pyplot as plt
import numpy as np
figure_=plt.figure(figsize=[15,10])
ax1=figure_.add_subplot(1,2,1)
ax1.scatter(norm_reviews['Fandango_Ratingvalue'], norm_reviews['RT_user_norm'] )
ax1.set_xlabel( 'Fandango_Ratingvalue ')
ax1.set_ylabel( 'RT_user_norm ')
figure_=plt.figure(figsize=[15,10])
ax1=figure_.add_subplot(1,2,2)
ax1.scatter(fandango['Fandango_Ratingvalue'], fandango['RottenTomatoes'] )
ax1.set_xlabel( 'Fandango ')
ax1.set_ylabel( 'RottenTomatoes')
# In[26]:
#intervertir les axes (question 4)
figure_=plt.figure(figsize=[15,10])
ax1=figure_.add_subplot(1,2,1)
ax1.scatter(norm_reviews['Fandango_Ratingvalue'], norm_reviews['RT_user_norm'] )
ax1.set_xlabel( 'Fandango_Ratingvalue ')
ax1.set_ylabel( 'RT_user_norm ')
ax2=figure_.add_subplot(1,2,2, sharey=ax1)
ax2.scatter(norm_reviews['RT_user_norm'], norm_reviews['Fandango_Ratingvalue'] )
ax2.set_xlabel( 'RT_user_norm ')
ax2.set_ylabel( 'Fandango_Ratingvalue')
plt.show()
# In[27]:
#comparaison des correlations
fig = plt.figure(figsize=(15,10))
ax1 = fig.add_subplot(3,3,1)
ax2 = fig.add_subplot(3,3,2)
ax3 = fig.add_subplot(3,3,3)
ax1.scatter(fandango["Fandango_Ratingvalue"], fandango["RT_user_norm"],color="red")
ax1.set_xlabel("Fandango")
ax1.set_ylabel("RottenTomatoes")
ax1.set_xlim(0,5)
ax2.scatter(fandango["Fandango_Ratingvalue"], fandango["Metacritic_user_nom"],color="green")
ax2.set_xlabel("Fandango")
ax2.set_ylabel("Metacritic")
ax2.set_xlim(0,5)
ax3.scatter(fandango["Fandango_Ratingvalue"],fandango["IMDB_norm"],color="blue")
ax3.set_xlabel("Fandango")
ax3.set_ylabel("IMDB")
ax3.set_xlim(0,5)
plt.show()
# comparaison des correlations: de ces graphs nous pouvons deduire que la correlation des evaluations entre
# fandango et rotten tomatoes est la plus forte.
# In[28]:
# comparaison des histogrammes
fig=plt.figure('fig')
ax1=fig.add_subplot(2,2,1)
ax2=fig.add_subplot(2,2,2)
ax3=fig.add_subplot(2,2,3)
ax4=fig.add_subplot(2,2,4)
ax1.hist(norm_reviews['Fandango_Ratingvalue'],bins=20,range=(0,5))
ax2.hist(norm_reviews['RT_user_norm'],bins=20,range=(0,5))
ax3.hist(norm_reviews['Metacritic_user_nom'],bins=20,range=(0,5))
ax4.hist(norm_reviews['IMDB_norm'],bins=20,range=(0,5))
ax4.set_ylim(0,50)
ax3.set_ylim(0,50)
ax2.set_ylim(0,50)
ax1.set_ylim(0,50)
ax1.set_title('Fandango_Ratingvalue')
ax2.set_title('RT_user_norm')
ax3.set_title('Metacritic_user_nom')
ax4.set_title('IMDB_norm')
fig.subplots_adjust(wspace=None,hspace=0.5)
plt.show
# dans ces graphs, on remarque des ressemblances entre les evaluation que nous pouvons trouver dans ces
# differents sites: les piques pour les quatres sites se situent autour de "quatre".
#
# In[29]:
#diagrames à boites
import matplotlib.pyplot as plt
num_cols=['RT_user_norm','Metacritic_user_nom','IMDB_norm','Fandango_Ratingvalue']
fig.ax = plt.subplots
plt.boxplot(norm_reviews[num_cols])
ax.set_xticklabels(num_cols, rotation=90)
ax.set_ylim(0,50)
plt.show()
# # nous pouvons voir de ce graph que les reviews dans le cas de la colonne 'RT_user_norm' de 2,5 jusqu'à 4,
# #ce qui est le cas des autres websites
# In[ ]:
| [
"Marigleta"
] | Marigleta |
075729fc5141abc16128074dd9b5ca0c3afc0c3c | e214a310c1d2430242824d386a149dec63035c99 | /6024.py | 2abb0f0fbb13a00fac98d108aa6c7aae26ad929c | [] | no_license | JIWON1923/CodeUp_basic100 | 3704feeb4fdafc8a946eb99b84aa86d3786fa331 | 81a72eaf62f636032a8ce75560d0302bd37c36ea | refs/heads/main | 2023-04-03T19:36:05.989830 | 2021-04-13T12:38:19 | 2021-04-13T12:38:19 | 347,349,150 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 45 | py | str, num = input().split()
print (str + num)
| [
"noreply@github.com"
] | JIWON1923.noreply@github.com |
385f2663830a41939366cc1c6ae07330af7caa45 | aaa4eb09ebb66b51f471ebceb39c2a8e7a22e50a | /Lista 06/exercício 07.py | 7c604351ca6dbdf257a20dd0fad89ae1db55590c | [
"MIT"
] | permissive | Brenda-Werneck/Listas-CCF110 | c0a079df9c26ec8bfe194072847b86b294a19d4a | 271b0930e6cce1aaa279f81378205c5b2d3fa0b6 | refs/heads/main | 2023-09-03T09:59:05.351611 | 2021-10-17T00:49:03 | 2021-10-17T00:49:03 | 411,115,920 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | #Escreva um algoritmo que armazene em um vetor o quadrado dos números ímpares no intervalo fechado de 1 a 20. Após isso, o algoritmo deve escrever todos os valores armazenados.
vetor = []
for i in range(21):
quadrado = i ** 2
vetor.append(quadrado)
for i in range(21):
if i % 2 == 1:
print(vetor[i]) | [
"89711195+Brenda-Werneck@users.noreply.github.com"
] | 89711195+Brenda-Werneck@users.noreply.github.com |
5aec1cc5416619cddfec817247ff56252302b835 | 576a25f30d67f29ef83ad9a493b80816822bcee6 | /Output/stats.py | 85cf24fc4ddd8d8b819ce68cd198a8bc0a32b763 | [] | no_license | yorozultd/hun-eng | c001736a260fd19b3e36777d3f8ff29b778b573d | d922b48ea05bb8c1676ba8dad727d0487934d103 | refs/heads/master | 2022-02-28T03:05:36.020076 | 2019-11-15T15:48:52 | 2019-11-15T15:48:52 | 208,487,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | import xml.etree.ElementTree as ET
import json
path = './xmlDataSanitized.xml'
tree=ET.parse(path);
root = tree.getroot()
products =root.findall('product')
print("Total Number of Products : "+ str(len(products)))
stat={}
categories=0
for product in products:
if product.find('category') != None :
if stat.get(product.find('category').text.split('-')[0].strip()) == None :
stat[product.find('category').text.split('-')[0].strip()]=1 ;
categories+=1;
else :
stat[product.find('category').text.split('-')[0].strip()]=stat[product.find('category').text.split('-')[0].strip()]+1
print("Total Number of Categories : "+ str(categories))
stat= sorted(stat.items(), key = lambda kv:(kv[1], kv[0]),reverse=True)
with open('result.json', 'w') as fp:
json.dump(stat, fp) | [
"ch34tc0d3@pop-os.localdomain"
] | ch34tc0d3@pop-os.localdomain |
5fc846f4e95988a233b9edc7a92b3c1f81dd76f9 | 72e497722c5033d15c200436faa39eb574557159 | /backend/todo_api/urls.py | 35f8edd77995fe77fc1ef188c64d216e89aba6ef | [] | no_license | jhefreyzz/todo | 8db820e7b904a132998c0e44685cf37fad491224 | 5ab17a40cf959694ad405137cff7dc288e374f3f | refs/heads/master | 2020-04-14T08:37:35.510293 | 2019-01-01T13:42:19 | 2019-01-01T13:42:19 | 163,739,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | """todo_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('todos.urls'))
]
| [
"jhefreysajot@gmail.com"
] | jhefreysajot@gmail.com |
a8579ed8f40715b4fd4a9295b637676a7ce56de3 | 00e0deb938e17401fa4b94c60080abbcc8ecfc62 | /Django/bookAuth/bookAuth/urls.py | e5b77b03398fbe020ed56df907484556f9f86926 | [] | no_license | Jacobgives/My_Projects | a818a91c8db7b541e6ecdaf1a6856525ca153aa9 | 1788e0fa27b202386d6daa1fed25fc820405dd50 | refs/heads/master | 2021-08-07T05:41:51.426224 | 2017-11-07T16:06:31 | 2017-11-07T16:06:31 | 104,389,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 758 | py | """bookAuth URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
| [
"jacobgives01@gmail.com"
] | jacobgives01@gmail.com |
c3465e10e85bb0295360983bd61ab81020a95c4a | aee6e20a3a6c602f1464e72fd0a6c4f684e5edcd | /ArticleProj/ArticleApp/migrations/0001_initial.py | fed8cdcda6ea132962f95de36222ee520933bfa7 | [] | no_license | Ataulla/article | fc117d8b7081b42fca8d4f383fd00c8f831c87a2 | 00f6ed2d865cda72cb4abafcd344a120ebf2dbf9 | refs/heads/master | 2021-01-13T08:19:15.473797 | 2016-10-24T13:29:53 | 2016-10-24T13:29:53 | 71,789,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=200)),
('author', models.CharField(max_length=200)),
('publication_date', models.DateTimeField(verbose_name=b'publication date')),
('category', models.CharField(max_length=200)),
('hero_image_name', models.CharField(max_length=200)),
('optional_image_name', models.CharField(max_length=200)),
('body_text', models.TextField()),
],
options={
},
bases=(models.Model,),
),
]
| [
"ataulla@gmail.com"
] | ataulla@gmail.com |
0bf5ad29b1dcadc95381dd3c70d9454c0dd71bec | 191a669fa6933e803efddd3b99cb8eff603e4091 | /psdf_main/helpers.py | a71f4c23aa4c58abadc404d170fdcda5dabdfdbe | [] | no_license | AbbasHaiderAbidi/PSDF_main | cbbd2369bf06baed8cb875b025225115345b26ec | 2ffc6a1de79280f4ab5b3f94ac20f9f31618ec0e | refs/heads/master | 2023-09-03T15:06:24.664724 | 2021-11-01T05:58:58 | 2021-11-01T05:58:58 | 339,616,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,614 | py | from .helper_imports import *
def useronline(request):
if (request.session.has_key('user')):
return True
else:
return False
def adminonline(request):
if (request.session.has_key('user') and request.session.has_key('admin')):
return True
else:
return False
def auditoronline(request):
if (request.session.has_key('auditor')):
return True
else:
return False
def proj_of_user(request, projid):
if useronline(request):
if (projects.objects.get(id = projid).userid.username == request.session['user']):
return True
else:
return False
else:
return False
def getuser(request, username):
return users.objects.filter(username = request.session['user'])[:1].get()
def projectofuser(request, username,projid):
if projects.objects.get(id = projid).userid == getuser(request, username):
return True
else:
return False
def oops(request):
return render(request, 'psdf_main/404.html')
def getadmin_id():
userobj = users.objects.filter(admin = True)[:1]
for user in userobj:
return user.id
def userDetails(username):
user = {}
userobj = users.objects.filter(username = username)[:1]
for user1 in userobj:
user['id'] = user1.id
user['username'] = user1.username
user['password'] = user1.password
user['nodal'] = user1.nodal
user['contact'] = user1.contact
user['address'] = user1.address
user['utilname'] = user1.utilname
user['region'] = user1.region
user['lastlogin'] = user1.lastlogin
user['reqdate'] = user1.reqdate
user['aprdate'] = user1.aprdate
user['admin'] = user1.admin
user['auditor'] = user1.auditor
user['active'] = user1.active
if user1.notification:
user['notifications'] = user1.notification.split(']*[')[1:]
else:
user['notifications'] = ""
if user1.tpd:
user['temp_boq'] = yaml.load(user1.tpd, yaml.FullLoader)
else:
user['temp_boq'] = ''
user['activate'] = user1.activate
return user
def projectDetails(projid):
proj = {}
proj1 = projects.objects.get(id = projid)
sub_boq = boqdata.objects.filter(project = proj1, boqtype = '1')
approved_boq = boqdata.objects.filter(project = proj1, boqtype = '2')
if proj1:
proj['id'] = proj1.id
proj['name'] = proj1.name
proj['dprsubdate'] = proj1.dprsubdate
proj['amt_asked'] = proj1.amt_asked
proj['amt_released'] = proj1.amt_released
proj['schedule'] = proj1.schedule
proj['fundcategory'] = proj1.fundcategory
proj['quantum'] = proj1.quantumOfFunding
proj['status'] = proj1.status
proj['remark'] = proj1.remark
proj['submitted_boq'] = sub_boq
# proj['submitted_boq'] = get_boq_details(proj1.submitted_boq)
proj['submitted_boq_Gtotal'] = Gtotal
proj['user_username'] = proj1.userid.username
proj['user_nodal'] = proj1.userid.nodal
proj['user_region'] = proj1.userid.region
proj['user_utilname'] = proj1.userid.utilname
proj['user_contact'] = proj1.userid.contact
proj['user_address'] = proj1.userid.address
proj['user_reqdate'] = proj1.userid.reqdate
proj['user_aprdate'] = proj1.userid.reqdate
proj['user_lastlogin'] = proj1.userid.lastlogin
proj['user_active'] = proj1.userid.active
return proj
else:
return False
def get_boq_details(submitted_boq):
print(submitted_boq)
# print()
eachboq = submitted_boq[1:-1].replace("\'", "\"").replace("}, {","}&%#{").split('&%#')
abc = []
for boq in eachboq :
print(boq)
one_boq = json.loads(boq)
# attrlist = boq.split(', ')
# one_boq={}
# for attr in attrlist:
# # print(attr)
# attrname = attr.split(':')[0][1:-1]
# attrvalue = attr.split(':')[1][:-1]
# one_boq[attrname] = attrvalue
# # print(one_boq)
abc.append(one_boq)
return abc
def get_Gtotal_list(abc):
item_Gtotal = {}
Gtotal_list = []
for boq in abc:
if boq['itemname'] in item_Gtotal.keys():
item_Gtotal[boq['itemname']] = item_Gtotal[boq['itemname']] + boq['itemcost']
else:
item_Gtotal[boq['itemname']] = boq['itemcost']
for key, value in item_Gtotal.items():
Gtotal_list.append({'itemname':key, 'grandtotal':value})
return Gtotal_list
def get_Gtotal(abc):
item_Gtotal = {}
Gtotal_list = []
totalval = 0
for boq in abc:
if boq['itemname'] in item_Gtotal.keys():
item_Gtotal[boq['itemname']] = item_Gtotal[boq['itemname']] + boq['itemcost']
else:
item_Gtotal[boq['itemname']] = boq['itemcost']
for key, value in item_Gtotal.items():
totalval = totalval + value
return totalval
def boq_grandtotal(givenboq):
Gtotal = 0
for boq in givenboq:
Gtotal = Gtotal + float(boq.itemqty)*float(boq.unitcost)
return Gtotal
# def temp_projectDetails(projid):
# proj = {}
# proj1 = temp_projects.objects.get(id = projid)
# if proj1:
# proj['id'] = proj1.id
# proj['name'] = proj1.proname
# proj['dprsubdate'] = proj1.dprsubdate
# proj['amt_asked'] = proj1.amountasked
# proj['deny'] = proj1.deny
# proj['schedule'] = proj1.schedule
# proj['remark'] = proj1.remark
# proj['removed'] = proj1.removed
# proj['submitted_boq'] = get_boq_details(proj1.submitted_boq)
# proj['submitted_boq_Gtotal'] = get_Gtotal_list(proj['submitted_boq'])
# proj['user_username'] = proj1.userid.username
# proj['user_nodal'] = proj1.userid.nodal
# proj['user_region'] = proj1.userid.region
# proj['user_utilname'] = proj1.userid.utilname
# proj['user_contact'] = proj1.userid.contact
# proj['user_address'] = proj1.userid.address
# proj['user_reqdate'] = proj1.userid.reqdate
# proj['user_aprdate'] = proj1.userid.reqdate
# proj['user_lastlogin'] = proj1.userid.lastlogin
# proj['user_active'] = proj1.userid.active
# return proj
# else:
# return False
def pen_users(request):
if adminonline(request):
penuser = users.objects.filter(activate = False)
return penuser
else:
return oops(request)
def pen_users_num(request):
if adminonline(request):
penuser = pen_users(request)
if penuser:
return penuser.count()
else:
return 0
else:
return oops(request)
def get_all_users(request):
if adminonline(request):
usersobj = users.objects.filter(admin = False)
allusers = []
for userobj in usersobj:
allusers.append(userDetails(userobj.username))
return allusers
else:
return False
def isfloat(value):
try:
float(value)
return True
except:
return False
def isnum(value):
try:
int(value)
return True
except:
return False
def smkdir(dir_path):
try:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
return True
except:
return False
def sremove(filepath):
try:
os.remove(filepath)
return True
except OSError:
return False
def srmdir(filename):
try:
shutil.rmtree(filename, ignore_errors=True)
return True
except OSError:
return False
def handle_uploaded_file(path, f):
try:
destination = open(path, 'wb+')
for chunk in f.chunks():
destination.write(chunk)
destination.close()
return True
except:
return False
def handle_download_file(filepath, request):
print("DOWNLOAD STARTED")
if os.path.exists(filepath):
print("EXISTS")
with open(filepath,'rb') as fh:
response = HttpResponse(fh.read(), content_type = "application/adminupload")
response['Content-Disposition'] = 'inline;filename =' + filepath.split('/')[-1]
return response
else:
print("DOES NOT EXISTS")
return oops(request)
# def getTempProjects(request):
# if adminonline(request):
# temp_project_list = []
# temp_project = temp_projects.objects.all().exclude(deny = True)
# for proj in temp_project:
# temp_project_list.append(temp_projectDetails(proj.id))
# return temp_project_list
# return False
def sanitize(str0):
str1 = str0.replace(",","")
str2 = str1.replace(":","")
str3 = str2.replace("/","")
str4 = str3.replace("]["," ")
return str4
def username_sanitize(str0):
str1 = sanitize(str0)
str2 = str1.replace(" ","")
return str2
def getTempProjects_user(request, userid):
if useronline(request):
temp_project_list = []
temp_project = temp_projects.objects.filter(userid = userid).exclude(deny = True)
for proj in temp_project:
proj.submitted_boq = get_boq_details(proj.submitted_boq)
proj.submitted_boq_Gtotal = get_Gtotal_list(proj.submitted_boq)
return temp_project_list
return False
def full_admin_context(request):
if adminonline(request):
# return {'user':userDetails(request.session['user']), 'nopendingusers' : users.objects.filter(activate = False).count(), 'nopendingprojects' : temp_projects.objects.all().count()}
context = {'user':userDetails(request.session['user']) , 'nopendingusers' : pen_users_num(request), 'nopendingprojects' : temp_projects.objects.all().exclude(deny = True).count()}
context['tesgprojects'] = projects.objects.filter(status = '1', deny = False)
context['appraisal_projects'] = projects.objects.filter(status = '2', deny = False)
context['monitoring_projects'] = projects.objects.filter(status = '3', deny = False)
context['noTESG'] = context['tesgprojects'].count()
context['noappr'] = context['appraisal_projects'].count()
context['nomon'] = context['monitoring_projects'].count()
return context
else:
return {}
def full_auditor_context(request):
if auditoronline(request):
context = {'user':userDetails('auditor')}
context['pending_projects'] = temp_projects.objects.all()
context['all_projs'] = projects.objects.all()
context['tesgs'] = TESG_admin.objects.all()
context['apprs'] = Appraisal_admin.objects.all()
context['monis'] = Monitoring_admin.objects.all()
return context
else:
return {}
def full_user_context(request):
if useronline(request):
context = {'user':userDetails(request.session['user'])}
context['tesgprojects'] = projects.objects.filter(status = '1', userid = users.objects.get(id = context['user']['id']), deny = False)
context['appraisal_projects'] = projects.objects.filter(status = '2', userid = users.objects.get(id = context['user']['id']), deny = False)
context['monitoring_projects'] = projects.objects.filter(status = '3', userid = users.objects.get(id = context['user']['id']), deny = False)
context['noTESG'] = context['tesgprojects'].count()
context['noappr'] = context['appraisal_projects'].count()
context['nomon'] = context['monitoring_projects'].count()
userobj = users.objects.get(id = context['user']['id'])
projectobj = temp_projects.objects.filter(userid = userobj, deny = False)
context['projectobj']= projectobj
context['noprojobj']= projectobj.count()
return context
else:
return {}
def notification(userid, notification):
project_user = users.objects.get(id = userid)
project_user.notification = str(project_user.notification) + ']*[' + str(notification)
project_user.save(update_fields=['notification'])
def get_TESG_id(request,tesgnum, projid):
if adminonline(request):
return TESG_master.objects.filter(tesgnum = TESG_admin.objects.filter(TESG_no = int(tesgnum))[:1].get(), project = projects.objects.get(id = projid))[:1].get().id
else:
return oops(request)
def emp_check(celldata):
if celldata == '' or celldata == ' ' or celldata == ' ' or celldata == None:
return True
else:
return False | [
"abbashaider2131995@gmail.com"
] | abbashaider2131995@gmail.com |
534ba0dcf7a9aeeb061afae9ac534fbab3a2f718 | 8ed1b95f8f2394ebe0d856dd98405ad2e30485d9 | /src/feature_extract/check_features.py | a2363c0281c207e1a5d81f298a0e03e38fb409f8 | [
"MIT"
] | permissive | j592213697/Depression-Identification | 7c6052b2841c51fcac1eeed906248b6b837faa3a | 31b5e6f44ecd6a87b1a181fcd9e8388edb15a176 | refs/heads/master | 2020-03-14T20:28:06.541618 | 2017-05-05T05:37:27 | 2017-05-05T05:37:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 976 | py | import pandas as pd
def get_test_data():
test_data = pd.read_csv('data/classification_data/dev_split.csv')
#print test_data
test = test_data['Participant_ID'].tolist()
#print test
#test.append(video)
clm_d = pd.read_csv('data/disc_nondisc/discriminative_CLM.csv')
covarep_d = pd.read_csv('data/disc_nondisc/discriminative_COVAREP.csv')
liwc_d = pd.read_csv('data/disc_nondisc/discriminative_LIWC.csv')
clm_nd = pd.read_csv('data/disc_nondisc/nondiscriminative_CLM.csv')
covarep_nd = pd.read_csv('data/disc_nondisc/nondiscriminative_COVAREP.csv')
liwc_nd = pd.read_csv('data/disc_nondisc/nondiscriminative_LIWC.csv')
for key in test:
if not ((clm_nd['video'] == key).any() ):
print "visual ",key
if not ((covarep_nd['video'] == key).any() ):
print "acoustic ", key
#print key
if not((liwc_nd['video'] == key).any()):
print "liwc ", key
get_test_data()
| [
"kamalakumar.indhu@gmail.com"
] | kamalakumar.indhu@gmail.com |
1d0ffab25ac94e98b8f050128f92921206c82111 | 1eb0213140ada1c48edc5fb97b439d6556e6c3a9 | /0x04-python-more_data_structures/9-multiply_by_2.py | 9888234f55ffbf8f0f04fb4116bee93857f1096a | [] | no_license | HeimerR/holbertonschool-higher_level_programming | 53d2a3c536fd9976bb7fea76dd2ecf9a6ba3297e | 892c0f314611c0a30765cf673e8413dbee567a2d | refs/heads/master | 2020-05-18T02:24:11.829328 | 2020-04-30T03:59:04 | 2020-04-30T03:59:04 | 184,112,468 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | #!/usr/bin/python3
def multiply_by_2(a_dictionary):
if a_dictionary is not None:
aux = a_dictionary.copy()
for k, v in sorted(aux.items()):
val = v * 2
tuple_aux = {k: val}
aux.update(tuple_aux)
return aux
| [
"732@holbertonschool.com"
] | 732@holbertonschool.com |
4a92105125c6c47d36ad1b16a4d42bbca9b1f117 | 601999cfbbd44292520284dbc69c16d4a1d0955a | /jingdong/JD_summaries/JD_summaries/middlewares.py | 6f303e05748059a3d042bcefcc1de0193b62685a | [] | no_license | SwimmingFish96/py2_spider | 1a8cd7e97bf472894192b7d93e5648083f778556 | c8c263d9343406aa5c285a404f2b75facd6efec1 | refs/heads/master | 2022-02-20T17:43:55.345922 | 2017-07-01T11:33:19 | 2017-07-01T11:33:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,885 | py | # -*- coding: utf-8 -*-
# Define here the models for your spider middleware
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/spider-middleware.html
from scrapy import signals
class JdSummariesSpiderMiddleware(object):
# Not all methods need to be defined. If a method is not defined,
# scrapy acts as if the spider middleware does not modify the
# passed objects.
@classmethod
def from_crawler(cls, crawler):
# This method is used by Scrapy to create your spiders.
s = cls()
crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
return s
def process_spider_input(response, spider):
# Called for each response that goes through the spider
# middleware and into the spider.
# Should return None or raise an exception.
return None
def process_spider_output(response, result, spider):
# Called with the results returned from the Spider, after
# it has processed the response.
# Must return an iterable of Request, dict or Item objects.
for i in result:
yield i
def process_spider_exception(response, exception, spider):
# Called when a spider or process_spider_input() method
# (from other spider middleware) raises an exception.
# Should return either None or an iterable of Response, dict
# or Item objects.
pass
def process_start_requests(start_requests, spider):
# Called with the start requests of the spider, and works
# similarly to the process_spider_output() method, except
# that it doesn’t have a response associated.
# Must return only requests (not items).
for r in start_requests:
yield r
def spider_opened(self, spider):
spider.logger.info('Spider opened: %s' % spider.name)
| [
"3071938610@qq.com"
] | 3071938610@qq.com |
23031e0e20c5400a098bda6953c93140ef4f20e8 | a6e004e96506911a58238ed21e30f4cfd0c12148 | /project/utils/utils.py | e40be178d3ef6541e2ed20b64b9d9a7e94917cb6 | [] | no_license | MuZiLiHui/HybridFeatureSelection | c257f444dd889ef8e0d0f6ce626ad61de4cb76e6 | 18aefa1762e235125ec54bd31f8a8e86e75194e8 | refs/heads/master | 2020-12-02T03:21:09.983280 | 2018-06-21T23:17:39 | 2018-06-21T23:17:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 673 | py | import time
from project.utils.io import save_pickle
from project.utils.io import load_pickle
def calculate_time_lapse(function, *args, **kwargs):
start = time.time()
result = function(*args, **kwargs)
lapse = time.time() - start
return lapse, result
class ResultMixin(object):
"""Mixin generico de resultados.
Atributos:
- PATH: (string) Dirección en la que se guardará el resultado.
"""
def save(self):
filename = self.PATH + self.__str__()
save_pickle(filename, self)
def load(self):
filename = self.PATH + self.__str__()
obj = load_pickle(filename)
return obj if obj else self
| [
"daniel.matos@pucp.pe"
] | daniel.matos@pucp.pe |
525d22141a592c5bad7e3c9ec2c9dc7adca5fcb0 | 6ab3d02c6b5426cd122b3d3c7b31faee7ea917d4 | /DP_subsetsum.py | 63e52ba7a55d50d5251646b8159442d9577f5fe2 | [] | no_license | AishwaryalakshmiSureshKumar/DS-Algo | e54967ed24c641059fe15b286359f1b71141eeff | a624b29182c92b5fa8017aae597eb4ad2475deae | refs/heads/main | 2023-04-21T17:17:10.342833 | 2021-04-18T18:03:57 | 2021-04-18T18:03:57 | 356,888,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | #code
def subsetSum(n, val, m):
T = [[0 for i in range(m+1)] for i in range(n)]
for i in range(n):
T[i][0]=1
for i in range(n):
for j in range(1, m+1):
if j<val[i]:
T[i][j] = T[i-1][j]
else:
T[i][j] = T[i-1][j-val[i]]
if T[n-1][m]:
print('YES')
else:
print('NO')
case = int(input())
for i in range(case):
n = int(input())
val = list(map(int, input().split()))
maxx = 0
for i in val:
if maxx<i:
maxx=i
subsetSum(n, val, maxx)
| [
"noreply@github.com"
] | AishwaryalakshmiSureshKumar.noreply@github.com |
0963622ace28ea6c4ec5df7b4a460444a5def95f | f79fcc48f20625bc30a0e68faeeeb90c0ffdfcda | /Queues/ReverseFirstKInQueue.py | 65690f3ebc2c356bd4a1fc92f429c5c7673619fc | [] | no_license | KurinchiMalar/DataStructures | 10e6e99e73451d88e932edff8ae40a231ed0f444 | db14efa24b98bb0f5121ffcf02340a85d63fb2bb | refs/heads/master | 2021-01-21T04:31:45.138640 | 2016-06-29T02:51:16 | 2016-06-29T02:51:16 | 48,610,802 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 993 | py | '''
Given an integer k and a queue of integers, how do you reverse the order of the first k elements of the queue, leaving
the other elements in the same relative order?
For eg. if k = 4 and queue has the elements [10,20,30,40,50,60,70,80,90] the output should be [40,30,20,10,50,60,70,80,90]
'''
from Stacks import Stack
import Queue
# Time Complexity : O(n)
# Space Complexity : O(n)
def reverse_k_elements(queue,k):
stk = Stack.Stack()
for i in range(0,k):
stk.push(queue.dequeue().get_data())
stk.print_stack()
queue.print_queue()
while stk.size > 0:
queue.enqueue(stk.pop().get_data())
queue.print_queue()
for i in range(0,queue.size - k):
queue.enqueue(queue.dequeue().get_data())
queue.print_queue()
def create_queue(Ar,queue):
for elem in Ar:
queue.enqueue(elem)
return queue
Ar = [10,20,30,40,50,60,70,80,90]
queue = Queue.Queue()
queue = create_queue(Ar,queue)
reverse_k_elements(queue,5) | [
"kurinchimalar.n@gmail.com"
] | kurinchimalar.n@gmail.com |
03b9c005465cd8808120ac82b7a3c858de5579e3 | bf0dec26b2459f1cc134173e8fb83cfc84fcee9b | /Issue.py | cc5923e36ce69a54c646d70a764290a5a74e0d67 | [] | no_license | Sgrygorczuk/TurkSystemDB | b0153d68cfb67d4685de7474650ba24b3706ef45 | ef0754a70da07fa232c138ba132cb6b3206ca7e4 | refs/heads/master | 2020-06-28T14:30:13.241132 | 2017-12-05T05:48:46 | 2017-12-05T05:48:46 | 200,255,617 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,423 | py | import jsonIO
class Issue:
db = "issue_db"
def __init__(self, referred_id = 'Nan', issue_desc = "", admin_review = "", date_resolved = "", resolved = False):
self.id = 'Nan'
#might call new_issue later on
self.new_issue(referred_id, issue_desc, admin_review, date_resolved, resolved)
#create a new issue in db and in class
def new_issue(self, referred_id, issue_desc, admin_review = "", date_resolved = "", resolved = False):
self.set_all(referred_id, issue_desc, admin_review, date_resolved, resolved)
#make new class if not called explicitly
if referred_id != 'Nan':
self.id = jsonIO.get_last_id(self.db)
#if no ids made
if self.id == None:
self.id = 0
else:
self.id += 1 #last+1 for new
jsonIO.add_row(self.db, self.get_all())
#create a new issue in class only
def set_all(self, referred_id, issue_desc, admin_review, date_resolved, resolved, modify_db = 0):
self.referred_id = referred_id # if it a new_project, ref_id = project_id
self.issue_desc = issue_desc # new user, blacklisted, rating, rejected, balance, quit team, quit user
self.admin_review = admin_review # admin's decision and explanation
self.date_resolved = date_resolved # date the admin resolved this (used for blacklisted user, 1 year after)
self.resolved = resolved # true/false
if modify_db:
jsonIO.set_row(self.db, self.get_all())
#will load db into the class (must at least set id) will return 1 or 0 upon success or failure respectively
def load_db(self, id):
array = jsonIO.get_row(self.db, id)
if array:
self.id = id
self.dump(array)
return array
else:
return []
#breakdown the dictionary and load into the class
def dump(self,dict):
self.set_all(dict["referred_id"], dict["issue_desc"], dict["admin_review"], dict["date_resolved"], dict["resolved"])
#get_ methods
def get_id(self):
return self.id
def get_referred_id(self):
return self.referred_id
def get_issue_desc(self):
return self.issue_desc
def get_admin_review(self):
return self.admin_review
def get_date_resolved(self):
return self.date_resolved
def get_resolved(self):
return self.resolved
def get_next_issue(self):
return jsonIO.find_id(self.db, "resovled", False)
def get_all(self):
return {"id":self.id, "referred_id":self.referred_id, "issue_desc":self.issue_desc,
"admin_review":self.admin_review, "date_resolved":self.date_resolved, "resolved":self.resolved}
#update bid_db
def set_id(self, id):
jsonIO.set_value(self.db, self.id, "id", id)
self.id = id
return 1
def set_referred_id(self, referred_id):
self.referred_id = referred_id
jsonIO.set_value(self.db, self.id, "referred_id", referred_id)
return 1
def set_issue_desc(self, issue_desc):
self.issue_desc = issue_desc
jsonIO.set_value(self.db, self.id, "issue_desc", issue_desc)
return 1
def set_admin_review(self, admin_review):
self.admin_review = admin_review
jsonIO.set_value(self.db, self.id, "admin_review", admin_review)
return 1
def set_date_resolved(self,date_resolved):
self.date_resolved = date_resolved
jsonIO.set_value(self.db, self.id, "date_resolved", date_resolved)
return 1
def set_resolved(self, resolved):
self.resolved = resolved
jsonIO.set_value(self.db, self.id, "resolved", resolved)
return 1
#destructor
def remove(self):
jsonIO.del_row(self.db, self.id)
print (self.id, ' was destroyed.')
del self
return 1 | [
"k.eun121@gmail.com"
] | k.eun121@gmail.com |
f20b03ea9f1d88a2917a2b85bbb9d054631dfcf4 | b935f118a730130b7111e8a2d4a8e7fabc5be069 | /plugins/hipchat/hipchat.py | 522c73c9771ba091a6f4679744ea4cedb4237e25 | [
"Apache-2.0"
] | permissive | jeanpralo/alerta-contrib | e1e8d6f098a85032ca7fc34d144d59532332b30b | 67f6105ab753f5f6b7e505bbd9b092f5a2199a56 | refs/heads/master | 2021-01-16T20:27:12.540073 | 2015-08-29T17:17:49 | 2015-08-29T17:17:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,736 | py |
import json
import requests
from alerta.app import app
from alerta.plugins import PluginBase
LOG = app.logger
HIPCHAT_URL = 'https://api.hipchat.com/v2'
HIPCHAT_ROOM = 'room' # Room Name or Room API ID
HIPCHAT_API_KEY = 'INSERT_API_KEY' # Room Notification Token
class SendRoomNotification(PluginBase):
def pre_receive(self, alert):
return alert
def post_receive(self, alert):
if alert.repeat:
return
url = '%s/room/%s/notification' % (HIPCHAT_URL, HIPCHAT_ROOM)
summary = "<b>[%s] %s %s - <i>%s on %s</i></b> <a href=\"http://try.alerta.io/#/alert/%s\">%s</a>" % (
alert.status.capitalize(), alert.environment, alert.severity.capitalize(), alert.event, alert.resource,
alert.id, alert.get_id(short=True)
)
if alert.severity == 'critical':
color = "red"
elif alert.severity == 'major':
color = "purple"
elif alert.severity == 'minor':
color = "yellow"
elif alert.severity == 'warning':
color = "gray"
else:
color = "green"
payload = {
"color": color,
"message": summary,
"notify": True,
"message_format": "html"
}
LOG.debug('HipChat payload: %s', payload)
headers = {
'Authorization': 'Bearer ' + HIPCHAT_API_KEY,
'Content-type': 'application/json'
}
try:
r = requests.post(url, data=json.dumps(payload), headers=headers, timeout=2)
except Exception as e:
raise RuntimeError("HipChat connection error: %s", e)
LOG.debug('HipChat response: %s - %s', r.status_code, r.text)
| [
"nick.satterly@guardian.co.uk"
] | nick.satterly@guardian.co.uk |
81e1a69690f1c4fa4716970b0f68d6bae4f2b0cb | b0ee373987313a540e53a2b964e14cac728e0ce3 | /raytracer/matrix.py | 6344ec759ce9790f32a670546a8f9ed9bee9d70a | [] | no_license | ozy/tracey | 2af621895cecdff66920887eddaec42280997f6b | 841d278e36ac35e25a74b9f04fb6c211cd0fff6f | refs/heads/master | 2022-01-25T03:39:21.727267 | 2019-07-20T20:47:26 | 2019-07-20T20:47:26 | 197,979,141 | 0 | 0 | null | 2019-07-20T20:42:16 | 2019-07-20T20:42:16 | null | UTF-8 | Python | false | false | 245 | py | from collections import UserList
class Matrix(UserList):
def __init__(self, width, length, fill_with = None):
filler = fill_with or (lambda: 0,)
self.data = [([filler[0](*filler[1:])] * width).copy() for _ in range(length)]
| [
"btaskaya33@gmail.com"
] | btaskaya33@gmail.com |
4ca04415ca82f6c78a49f2c05e33bea128a35396 | 7ecfc46560944bd327ff206b4300a77a36c34ba8 | /homeassistant/components/nam/const.py | a9d044f2c1d1674244cc0ee5aa4d72d5cae01b4c | [
"Apache-2.0"
] | permissive | joshs85/core | 228eb9f34a362431a56b9eb61f2c8f3f8516b0c6 | 1661de5c19875205c77ee427dea28909ebbbec03 | refs/heads/dev | 2023-07-27T10:22:48.813114 | 2021-08-17T00:12:45 | 2021-08-17T00:12:45 | 334,783,818 | 0 | 0 | Apache-2.0 | 2021-08-17T00:27:21 | 2021-02-01T00:00:07 | Python | UTF-8 | Python | false | false | 7,366 | py | """Constants for Nettigo Air Monitor integration."""
from __future__ import annotations
from datetime import timedelta
from typing import Final
from homeassistant.components.sensor import (
STATE_CLASS_MEASUREMENT,
SensorEntityDescription,
)
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
DEVICE_CLASS_CO2,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_PRESSURE,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_TIMESTAMP,
PERCENTAGE,
PRESSURE_HPA,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
TEMP_CELSIUS,
)
SUFFIX_P0: Final = "_p0"
SUFFIX_P1: Final = "_p1"
SUFFIX_P2: Final = "_p2"
SUFFIX_P4: Final = "_p4"
ATTR_BME280_HUMIDITY: Final = "bme280_humidity"
ATTR_BME280_PRESSURE: Final = "bme280_pressure"
ATTR_BME280_TEMPERATURE: Final = "bme280_temperature"
ATTR_BMP280_PRESSURE: Final = "bmp280_pressure"
ATTR_BMP280_TEMPERATURE: Final = "bmp280_temperature"
ATTR_DHT22_HUMIDITY: Final = "dht22_humidity"
ATTR_DHT22_TEMPERATURE: Final = "dht22_temperature"
ATTR_HECA_HUMIDITY: Final = "heca_humidity"
ATTR_HECA_TEMPERATURE: Final = "heca_temperature"
ATTR_MHZ14A_CARBON_DIOXIDE: Final = "mhz14a_carbon_dioxide"
ATTR_SDS011: Final = "sds011"
ATTR_SDS011_P1: Final = f"{ATTR_SDS011}{SUFFIX_P1}"
ATTR_SDS011_P2: Final = f"{ATTR_SDS011}{SUFFIX_P2}"
ATTR_SHT3X_HUMIDITY: Final = "sht3x_humidity"
ATTR_SHT3X_TEMPERATURE: Final = "sht3x_temperature"
ATTR_SIGNAL_STRENGTH: Final = "signal"
ATTR_SPS30: Final = "sps30"
ATTR_SPS30_P0: Final = f"{ATTR_SPS30}{SUFFIX_P0}"
ATTR_SPS30_P1: Final = f"{ATTR_SPS30}{SUFFIX_P1}"
ATTR_SPS30_P2: Final = f"{ATTR_SPS30}{SUFFIX_P2}"
ATTR_SPS30_P4: Final = f"{ATTR_SPS30}{SUFFIX_P4}"
ATTR_UPTIME: Final = "uptime"
DEFAULT_NAME: Final = "Nettigo Air Monitor"
DEFAULT_UPDATE_INTERVAL: Final = timedelta(minutes=6)
DOMAIN: Final = "nam"
MANUFACTURER: Final = "Nettigo"
MIGRATION_SENSORS: Final = [
("temperature", ATTR_DHT22_TEMPERATURE),
("humidity", ATTR_DHT22_HUMIDITY),
]
SENSORS: Final[tuple[SensorEntityDescription, ...]] = (
SensorEntityDescription(
key=ATTR_BME280_HUMIDITY,
name=f"{DEFAULT_NAME} BME280 Humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=DEVICE_CLASS_HUMIDITY,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_BME280_PRESSURE,
name=f"{DEFAULT_NAME} BME280 Pressure",
native_unit_of_measurement=PRESSURE_HPA,
device_class=DEVICE_CLASS_PRESSURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_BME280_TEMPERATURE,
name=f"{DEFAULT_NAME} BME280 Temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_BMP280_PRESSURE,
name=f"{DEFAULT_NAME} BMP280 Pressure",
native_unit_of_measurement=PRESSURE_HPA,
device_class=DEVICE_CLASS_PRESSURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_BMP280_TEMPERATURE,
name=f"{DEFAULT_NAME} BMP280 Temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_HECA_HUMIDITY,
name=f"{DEFAULT_NAME} HECA Humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=DEVICE_CLASS_HUMIDITY,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_HECA_TEMPERATURE,
name=f"{DEFAULT_NAME} HECA Temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_MHZ14A_CARBON_DIOXIDE,
name=f"{DEFAULT_NAME} MH-Z14A Carbon Dioxide",
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
device_class=DEVICE_CLASS_CO2,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SDS011_P1,
name=f"{DEFAULT_NAME} SDS011 Particulate Matter 10",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SDS011_P2,
name=f"{DEFAULT_NAME} SDS011 Particulate Matter 2.5",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SHT3X_HUMIDITY,
name=f"{DEFAULT_NAME} SHT3X Humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=DEVICE_CLASS_HUMIDITY,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SHT3X_TEMPERATURE,
name=f"{DEFAULT_NAME} SHT3X Temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SPS30_P0,
name=f"{DEFAULT_NAME} SPS30 Particulate Matter 1.0",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SPS30_P1,
name=f"{DEFAULT_NAME} SPS30 Particulate Matter 10",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SPS30_P2,
name=f"{DEFAULT_NAME} SPS30 Particulate Matter 2.5",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SPS30_P4,
name=f"{DEFAULT_NAME} SPS30 Particulate Matter 4.0",
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
icon="mdi:blur",
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_DHT22_HUMIDITY,
name=f"{DEFAULT_NAME} DHT22 Humidity",
native_unit_of_measurement=PERCENTAGE,
device_class=DEVICE_CLASS_HUMIDITY,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_DHT22_TEMPERATURE,
name=f"{DEFAULT_NAME} DHT22 Temperature",
native_unit_of_measurement=TEMP_CELSIUS,
device_class=DEVICE_CLASS_TEMPERATURE,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_SIGNAL_STRENGTH,
name=f"{DEFAULT_NAME} Signal Strength",
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
entity_registry_enabled_default=False,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key=ATTR_UPTIME,
name=f"{DEFAULT_NAME} Uptime",
device_class=DEVICE_CLASS_TIMESTAMP,
entity_registry_enabled_default=False,
),
)
| [
"noreply@github.com"
] | joshs85.noreply@github.com |
847cdbbd6b26df96e6d469bbed90d7705e42b96b | 570c80464aaef29b76dbf7836c407da772eb301f | /main/helpers.py | d028b420335ea430143d51b9449dac9900f8663b | [] | no_license | YukkuriC/django_fillin_oj | afcf35d7ee093691f95e65791229c33640dbad34 | 7e5e1d87633a6ccbef20288c485f200384d1f7d6 | refs/heads/master | 2020-06-04T05:48:42.384254 | 2019-07-11T07:01:55 | 2019-07-11T07:01:55 | 191,894,297 | 3 | 0 | null | 2019-06-16T16:17:52 | 2019-06-14T07:12:34 | Python | UTF-8 | Python | false | false | 5,799 | py | import os
from django.db import models
from django.dispatch import receiver
from django.contrib import admin, messages
from django.shortcuts import redirect
from django.http import JsonResponse, HttpRequest
from django.utils import timezone
from django.core import mail
from django.conf import settings
from django.template import loader
from django.shortcuts import render
from usr_sys import models as usr_models
import json
def auto_admin(model_pool):
'''一行注册admin'''
for md_name in dir(model_pool):
md = getattr(model_pool, md_name)
if isinstance(md, models.base.ModelBase):
tmp = []
for name, field in md.__dict__.items():
if isinstance(field, models.query_utils.DeferredAttribute):
tmp.append(name)
class AutoAdmin(admin.ModelAdmin):
list_display = tmp
try:
admin.site.register(md, AutoAdmin)
except admin.sites.AlreadyRegistered:
pass
def set_autodelete(local_dict, model, field):
'''
使FileField自动清理文件
'''
def auto_delete_file_on_delete(sender, instance, **kwargs):
file_field = getattr(instance, field, None)
if file_field:
if os.path.isfile(file_field.path):
os.remove(file_field.path)
def auto_delete_file_on_change(sender, instance, **kwargs):
if not instance.pk:
return
try:
old_file = getattr(model.objects.get(pk=instance.pk), field)
except model.DoesNotExist:
return
new_file = getattr(instance, field)
if not old_file == new_file:
if os.path.isfile(old_file.path):
os.remove(old_file.path)
del1 = '%s_%s_del1' % (model.__name__, field)
del2 = '%s_%s_del2' % (model.__name__, field)
local_dict[del1] = auto_delete_file_on_delete
local_dict[del2] = auto_delete_file_on_change
models.signals.post_delete.connect(local_dict[del1], model)
models.signals.pre_save.connect(local_dict[del2], model)
def show_date(date):
""" 显示日期 """
try:
d_show = date.timetuple()
except:
return '-'
d_now = timezone.now().timetuple()
if d_show[:3] == d_now[:3]: # 同一天显示时间
pattern = "%H:%M"
if d_show[3:5] == d_now[3:5]: # 时分相同
pattern += ':%S'
elif d_show[0] == d_now[0]: # 同年显示月日
pattern = "%m{M}%d{D}"
else: # 不同年显示年月
pattern = "%Y{Y}%m{M}"
return date.strftime(pattern).format(Y='年', M='月', D='日')
if 'user system':
def login_required(req_yes, req_email=True, target=None):
if target == None:
target = '/login/' if req_yes else '/home/'
def decorator(func):
def wrap(req, *a, **kw):
if bool(req.session.get('userid')) == req_yes:
if req_yes and req_email and not get_user(
req).email_validated:
return redirect('/validate/')
return func(req, *a, **kw)
return redirect(target)
return wrap
return decorator
def get_user(request, update_login=False):
try:
user = usr_models.User.objects.get(id=request.session['userid'])
except:
return None
user.login_datetime = timezone.now()
user.save()
return user
def set_user(request, user):
request.session['userid'] = user.id
request.session['username'] = user.name
if user.is_admin:
request.session['username'] += ' (管理员)'
def send_valid_email(user, request, type='valid'):
if type == 'forgotpw': # 忘记密码
mail_class = usr_models.UserResetPwMail
mail_name = 'userresetpwmail'
else: # 新用户激活
mail_class = usr_models.UserMailCheck
mail_name = 'usermailcheck'
# create email checker
if hasattr(user, mail_name):
checker = getattr(user, mail_name)
if timezone.now() < checker.send_time + timezone.timedelta(
minutes=settings.EMAIL_VALID_RESEND_MINUTES):
messages.warning(request, '邮件发送过于频繁')
return False
else:
checker = mail_class()
checker.user = user
checker.send_time = timezone.now()
checker.activate()
# send email
expire_time = checker.send_time + timezone.timedelta(
days=settings.EMAIL_VALID_LAST_DAYS)
http = 'https' if request.is_secure() else 'http'
host = request.META['HTTP_HOST']
if type == 'forgotpw':
link = '%s://%s/forgotpasswd/%s' % (http, host, checker.check_hash)
title = 'PyFillin OJ重设密码'
template_name = 'email/resetpasswd.html'
else:
link = '%s://%s/validate/%s' % (http, host, checker.check_hash)
title = 'PyFillin OJ激活邮件'
template_name = 'email/activation.html'
html_content = loader.render_to_string(template_name, locals())
mail.send_mail(
title,
html_content,
settings.EMAIL_HOST_USER, [user.stu_code + '@pku.edu.cn'],
html_message=html_content)
messages.info(request, '邮件发送成功')
return True
if 'pages':
def sorry(request,
code=404,
title='Oops...',
text=['你来到了', '一片没有知识的', '荒原']):
if isinstance(text, str):
text = [text]
return render(request, 'sorry.html', locals(), status=code)
| [
"799433638@qq.com"
] | 799433638@qq.com |
9aefade78806c988cb28e7a2ffcebd4f578c3432 | 55333fd7ec8d2667a885c21256d894716d3b2c22 | /scripts/GMLParser.py | 51805ebdd802b318372c146fceffffa368c41428 | [] | no_license | jura-g/MultiUAV_Simulator | ffece72747069b5bcada9b8fe64c5d84397d777b | 17a918c6270010d582cec308cea0438d3741e1ea | refs/heads/master | 2022-12-06T01:15:12.259199 | 2020-09-03T14:13:55 | 2020-09-03T14:13:55 | 254,403,809 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,717 | py | from osgeo import ogr
from osgeo.osr import SpatialReference
import json
class GMLParser:
def __init__(self):
self.points_dict = {}
'''
All functions that start with "getPoints" return list of lists of coordinates
MultiPolygon and MultiLineString may have few parcels within them (few lists of coordinates)
while MultiPoint, Polygon and LineString, describe only one parcel (one list of coordinates)
!!!!! Points are in the form of (Longitude, Latitude) !!!!!
'''
def getPointsFromMultipolygon(self, geometry):
polygonCount = geometry.GetGeometryCount()
points = []
for i in range(polygonCount):
polygon = geometry.GetGeometryRef(i)
points.append(self.getPointsFromPolygon(polygon)[0])
return points
def getPointsFromMultilinestring(self, geometry): #not sure
lineStringCount = geometry.GetGeometryCount()
points = []
for i in range(lineStringCount):
lineString = geometry.GetGeometryRef(i)
points.append(self.getPointsFromLineString(lineString)[0])
return [points]
def getPointsFromPolygon(self, geometry):
linearRing = geometry.GetGeometryRef(0)
points = linearRing.GetPoints()
return [points]
def getPointsFromLineString(self, geometry): # not sure
line = geometry.GetGeometryRef(0)
points = line.GetPoints()
return [points]
def getPointsFromMultipoint(self, geometry): #not sure
points = geometry.GetPoints()
return [points]
def getPointFromPoint(self, geometry):
point = (geometry.getX(), geometry.getY())
return [[point]]
def getPoints(self, geometry):
gtype = geometry.GetGeometryType()
name = geometry.GetGeometryName()
if gtype == 6 and name == "MULTIPOLYGON":
return self.getPointsFromMultipolygon(geometry)
elif gtype == 5 and name == "MULTILINESTRING": #not sure
return self.getPointsFromMultilinestring(geometry)
elif gtype == 4 and name == "MULTIPOINT": #not sure
return self.getPointsFromMultipoint(geometry)
elif gtype == 3 and name == "POLYGON":
return self.getPointsFromPolygon(geometry)
elif gtype == 2 and name == "LINESTRING": #not sure
return self.getPointsFromLineString(geometry)
elif gtype == 1 and name == "POINT": #not sure
return self.getPointFromPoint(geometry)
else:
print("GMLParser: Unrecognized geometry type: ", name)
return -1
def getCoordinatesDictionary(self):
return self.points_dict
def parse(self, GMLfile):
ogr.RegisterAll()
inSource = ogr.Open(GMLfile)
self.points_dict = {}
for layerIndex in range(inSource.GetLayerCount()):
############################### LAYER #######################################
inLayer = inSource.GetLayer(layerIndex)
inLayer.ResetReading() # not neccessary, ensures iterating from begining
############################### FEATURE #####################################
for featureIndex in range(inLayer.GetFeatureCount()):
feature = inLayer.GetNextFeature()
############################### GEOMETRY #####################################
geometry = feature.GetGeometryRef()
coord_system = geometry.GetSpatialReference()
targetReference = SpatialReference()
targetReference.ImportFromEPSG(4326) # WGS84
geometry.TransformTo(targetReference)
points = self.getPoints(geometry)
# print(points)
entryName = "Layer-" + str(layerIndex) + " Feature-" + str(featureIndex)
self.points_dict[entryName] = points
if self.points_dict.has_key('coordinates'):
self.points_dict['coordinates'] = self.points_dict['coordinates'] + points
else:
self.points_dict['coordinates'] = points
def exportToJSON(self):
with open('WGS84_coordinates_from_GML.json', 'w') as file:
json.dump(self.points_dict, file, indent=4)
if __name__ == '__main__':
inSource = "/home/ivan/Downloads/katastarski_plan_CESTICE.gml"
# inSource = /home/ivan/Downloads/Building_9620123VK0192B.gml"
# inSource = "/home/ivan/Downloads/Building_9531109VK0193B.gml"
# inSource = "/home/ivan/Downloads/Building_9642901VK3794B.gml"
parser = GMLParser()
parser.parse(inSource)
print(parser.getCoordinatesDictionary())
| [
"ivan.pavlak3@gmail.com"
] | ivan.pavlak3@gmail.com |
d8b199029d370056116fe675100bb43ed608dd91 | 91e31243d4f7f6610fc7bcf4b54ef54432ba9baf | /directory-creation/path2json.py | aef4a5333b9ac0d3dce555cb394d35b8e6013866 | [] | no_license | sdhutchins/code-haven | a7c1bda18796f33ba993a9194808901cc2dfeabb | cab72c086e42ec2af22f8b67afb9c2eeb607a6cd | refs/heads/master | 2020-06-16T13:54:24.331907 | 2019-07-30T18:05:10 | 2019-07-30T18:05:10 | 195,599,675 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | # -*- coding: utf-8 -*-
"""
File Name:
Description: This script creates a shell directory structure similar to the path
inserted into the script.
Author: shutchins2
Date Created: Wed Apr 5 14:20:49 2017
Project Name:
"""
import os
import json
def path_to_dict(path):
d = {'name': os.path.basename(path)}
if os.path.isdir(path):
d['type'] = "directory"
d['children'] = [path_to_dict(os.path.join(path, x)) for x in os.listdir(path) if x != '.git']
else:
d['type'] = "file"
return d
def save_to_json(path, jsonfilename):
data = path_to_dict(path)
with open(jsonfilename + '.json', 'w') as outfile:
json.dump(data, outfile, indent=4)
def json_to_project(jsonfile):
with open(jsonfile, 'r') as f:
dirdict = json.load(f)
return dirdict
def write_file(filename):
file = open(filename, "w")
file.close()
pathdict = json_to_project('example.json')
rootdir = pathdict['name']
os.makedirs(rootdir, exist_ok=True)
for key in pathdict['children']:
if key['type'] == 'directory':
topdir = os.path.join(rootdir, key['name'])
os.makedirs(topdir, exist_ok=True)
for key in key['children']:
if key['type'] == 'directory':
subdir = os.path.join(topdir, key['name'])
os.makedirs(subdir, exist_ok=True)
elif key['type'] == 'file':
write_file(os.path.join(topdir, key['name']))
elif key['type'] == 'file':
write_file(os.path.join(rootdir, key['name']))
| [
"sdhutchins@outlook.com"
] | sdhutchins@outlook.com |
ea9b2459a9d48926e1f7b9b548bc3141bafc951f | c3ec150169c7da6a3d8edd942538a3741e8d5c48 | /app.py | 897d45b9f9697f43c4ad8749daa59d5b9ba17b5d | [] | no_license | Apophus/socialapp | c588a098395ceb964d2e3d7223d14e0ad3a3d61a | 644cb82001b7502607d6984a16ed3fdcd7b627d9 | refs/heads/master | 2020-04-02T13:21:26.700587 | 2016-07-11T21:02:11 | 2016-07-11T21:02:11 | 62,619,856 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,508 | py | #!usr/bin/python
from flask import Flask, g, render_template, redirect, url_for, flash
from flask.ext.login import LoginManager
import models
import forms
DEBUG = True
PORT = 8000
HOST = '0.0.0.0'
app = Flask(__name__)
app.secret_key ='hard to guess'
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(userid):
try:
return models.User.get(models.User.id == userid)
except models.DoesNotExist:
return None
@app.before_request()
def before_request():
#connect to database before each request
g.db =models.DATABASE
g.db.connect()
@app.after_request
def after_request():
#close the database after each request
g.db.close()
return response
@app.route('/register', methods=['GET', 'POST'])
def register():
form = forms.RegisterForm()
if form.validate_on_submit():
flash("you are registered", "success")
models.User.create_User(
username=form.username.data,
email=form.email.data,
password=form.password.data
)
return redirect(url_for('index'))
return render_template('register.html', form=form)
@app.route('/')
def index():
return 'Hey'
if __name__ == '__main__':
models.initialize()
models.User.create_user(
username="Larrisa",
email="larrisa@gmail.com",
password='password',
admin=True
)
app.run(debug=DEBUG, port=PORT, host=HOST) | [
"bkilel12@gmail.com"
] | bkilel12@gmail.com |
0cd9a166c4c8af939bbc531934e7642245708d0b | 976cd1a0a67b94aeeb2eeb88c665475f2f7c6336 | /Cursovaya/mysite/orders/migrations/0001_initial.py | d37c7f64c7754d938eaf725751cc8785890272f3 | [] | no_license | AygulAzizova/recommendation_system_apriori | 7fc75374a14decc32cc2a22b294a418755cdeb50 | 54d444c08b465b6f999a36f2a6264b962664c316 | refs/heads/master | 2022-11-10T09:08:19.390543 | 2019-03-29T18:03:19 | 2019-03-29T18:03:19 | 178,451,072 | 0 | 1 | null | 2022-10-29T07:03:45 | 2019-03-29T17:43:11 | Python | UTF-8 | Python | false | false | 1,875 | py | # Generated by Django 2.0.4 on 2018-05-02 12:06
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalog', '0005_auto_20180501_1158'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('email', models.EmailField(max_length=254)),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Создан')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Обновлен')),
('paid', models.BooleanField(default=False, verbose_name='Оплачен')),
],
options={
'verbose_name': 'Заказ',
'verbose_name_plural': 'Заказы',
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='OrderItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='Цена')),
('quantity', models.PositiveIntegerField(default=1, verbose_name='Количество')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='orders.Order')),
('tour', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_items', to='catalog.Tour')),
],
),
]
| [
"aygulazizova8@gmail.com"
] | aygulazizova8@gmail.com |
1a82a7750a74dbbc00bab268edae24184b44176d | 9ede52351cb6753864e10b1df9c4f3a72e2b7a6d | /Code/Make_Graphs.py | ccb501316e44759712c9dc376eb207dbff7cbbee | [] | no_license | dmfeuerman/Stock_Bot | ca651f2f45c3261a515e5501adb4dded4a932374 | 7b833ca83650e27a8e2273e6da047fe870eb73de | refs/heads/master | 2023-03-16T06:18:09.720724 | 2021-03-16T20:47:46 | 2021-03-16T20:47:46 | 346,143,837 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 531 | py | import pandas as pd
import matplotlib.pyplot as plt
def Graph_Count():
df = pd.read_csv('/outfiles/Sorted_data/Graph_total.csv', index_col='Date')
df = df.T
df.plot(legend=True) # plot usa column
plt.savefig("/home/dylan/Documents/StockBot/outfiles/Sorted_data/Graph.pdf")
def Graph_Price():
df = pd.read_csv('/outfiles/Sorted_data/Graph_total.csv', index_col='Date')
df = df.T
df.plot(legend=True) # plot usa column
plt.savefig("/home/dylan/Documents/StockBot/outfiles/Sorted_data/Graph.pdf")
| [
"dmfeuerman@loyola.edu"
] | dmfeuerman@loyola.edu |
3b1c725e110cc52229ac85127b2ebf8be1c19994 | 3aa5b2b1a7b6d3ee66ad6ccebf157363c5f910a0 | /Testcases/NetApp_OEM_Chinese/test_cp_5_NetAppHCI/test_cp_5_dashboard_chinese.py | 68c4da876c34d85642d0be348df265d07dde0e98 | [] | no_license | 2sumanthk/PythonAutomationAIQCA | fd04c0b0ea5617c41298ee31d51e3f6fdafdcc6a | 60aae6e5a9de9e1eb2ee3d8e507beaf640576d94 | refs/heads/master | 2023-01-05T02:56:54.685107 | 2020-10-19T06:06:16 | 2020-10-19T06:06:16 | 297,239,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | from functions.base_functions_zh import *
import pytest
import allure
from functions.ExcelFunctions import ReadWriteExcel
# Reading data from excel
data_read = ReadWriteExcel(
"C://Users//Sumanth//PycharmProjects//PythonTesting//Resources//configurations//testdata//Test_data.xlsx")
# filename to be used in screen shot
test_file_name = os.path.basename(__file__)
def test_netapp_hci_dashboard_one():
driver.get(properties_china('health_check_url'))
maximize_window()
sleep(2)
actual_output = find_element("dashboard_card_title_NetAppHCI_xpath")
expected_output = properties_china('check_point5_NetAppHCI_string')
print("Element Returned Test: ", actual_output.text)
highlight_element(actual_output)
capture_screenshot(test_file_name)
assert actual_output.text == expected_output
test_netapp_hci_dashboard_one() | [
"2sumanthk@gmail.com"
] | 2sumanthk@gmail.com |
f64dc8b8c4c99f1ad42711b9cde1f13acf3c8314 | 46ee4291335b92547d95baaf90a0747b5862b861 | /packages/pathfinder/package.py | 8986347c2aea50da6302c12c7ca291e7284c4bbe | [] | no_license | pkondamudi/proxy-apps-spack | 90e87dad19cb2e9c0e12f676f8bd6533447994da | cdfd29033b69c011821e2eb6bfde27cc06d0b522 | refs/heads/master | 2021-01-21T22:02:14.074001 | 2017-06-22T20:01:18 | 2017-06-22T20:01:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,313 | py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from os import listdir
class Pathfinder(MakefilePackage):
"""Signature search."""
homepage = "https://mantevo.org/packages/"
url = "http://mantevo.org/downloads/releaseTarballs/miniapps/PathFinder/PathFinder_1.0.0.tgz"
version('1.0.0', '374269e8d42c305eda3e392444e22dde')
depends_on('openmpi')
build_targets = ['--directory=PathFinder_ref']
def edit(self, spec, prefix):
makefile = FileFilter('PathFinder_ref/Makefile')
makefile.filter('CC=.*', 'CC=cc')
def install(self, spec, prefix):
# Manual installation
mkdir(prefix.bin)
mkdir(join_path(prefix, 'generatedData'))
mkdir(join_path(prefix, 'scaleData'))
install('PathFinder_ref/PathFinder.x', prefix.bin)
for f in listdir(join_path(self.build_directory, 'generatedData')):
install('generatedData/{}'.format(f), join_path(prefix, 'generatedData'))
for f in listdir(join_path(self.build_directory, 'scaleData')):
install('scaleData/{}'.format(f), join_path(prefix, 'scaleData'))
| [
"bhomerding@jlselogin1.ftm.alcf.anl.gov"
] | bhomerding@jlselogin1.ftm.alcf.anl.gov |
9e28302446039124857680f45fcc730c89a193f7 | a5edf77af0c51f5137ab5c7bc38712d3feed4919 | /SVM/svm2.py | 815bfe24070c66511e6e7a957165c555f80e5420 | [] | no_license | Sumitsami14/NAI | 4be4c5b130cd3b5b3de820669040fb2e9605d97f | a3805ca3063b8e836b3b12c8aec1f9b9d91f766a | refs/heads/main | 2023-02-21T21:47:58.438178 | 2021-01-29T19:58:00 | 2021-01-29T19:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,760 | py | #Autorzy : Jan Rygulski , Dominika Stryjewska
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import svm
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
'''
Program ma za zadanie klasyfikować dane za pomocą SVM i drzew decyzyjnych.
Baza która postanowilikśmy poddać klasyfikacji są owoce.
zbiór danych został zebrany samodzielnie.
zbiór danych zawiera:
-kolor owocu
- masa
- wysokość
- szerokość
- gatunek
'''
# definiujemy nazwy dla column
colnames = ["class", "mass", "width", "height", "color"]
#odczytujemy dane z pliku
dataset = pd.read_csv('fruit_data.txt', header=None, names=colnames)
print(dataset)
dataset = dataset.replace({"class": {"apple": 1,
"mandarin": 2,
"orange": 3,
"green apple": 4,
"lemon": 5,
"tomato": 6,
"banana": 7,
"pearl": 8}})
dataset = dataset.replace({"color": {"red": 1,
"orange": 2,
"green": 3,
"yellow": 4}})
print(dataset)
sns.heatmap(dataset.corr())
plt.title('Correlation')
plt.show()
X = dataset.iloc[:, 2:4].values
y = dataset.iloc[:, 0].values
X1 = dataset.iloc[:, 1:].values
X_train, X_test, y_train, y_test = train_test_split(
X1, y, test_size=0.30, random_state=0)
params = {'random_state': 0, 'max_depth': 4}
classifier = DecisionTreeClassifier(**params)
classifier.fit(X_train, y_train)
X_new_fruit = np.array([[116, 6.3, 7.7, 3]])
prediction = classifier.predict(X_new_fruit)
print(prediction)
print("dokładność zestawu testowego : ", classifier.score(X_test, y_test))
# SVC
svc = svm.SVC(kernel='linear', C=1, gamma=1).fit(X, y)
# create a mesh to plot in
h = 0.02 # step size in the mesh
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
plt.subplot(1, 1, 1)
Z = svc.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.Paired, alpha=0.8)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Paired)
plt.xlabel('width')
plt.ylabel('height')
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title('SVC with Linear kernel')
plt.show()
| [
"noreply@github.com"
] | Sumitsami14.noreply@github.com |
e0bfafd57b7eba7ffdb19e13857be41828c5d360 | 5878062d93a3e9cf150127aa1b4a1f3531183f72 | /train.py | 68efe4d3ee5e933a9ed8484276fe77b1cb3f2685 | [] | no_license | abhijeeetkumar/siamFCR | 52c4d2a57ca60c9ea5924070ba3dbd0919811913 | deb35af3f9094c13c1b8f705830e5ebd765d8482 | refs/heads/master | 2023-04-08T17:08:30.001479 | 2021-04-16T03:16:27 | 2021-04-16T03:16:27 | 352,776,215 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | import os
import sys
import torch
from torch.utils.data import DataLoader
from got10k.datasets import ImageNetVID, GOT10k, VOT
from pairwise import Pairwise
from siamfc import TrackerSiamFC
if __name__ == '__main__':
# setup dataset
name = 'VOT'
assert name in ['VID', 'GOT-10k', 'VOT']
if name == 'GOT-10k':
root_dir = 'data/GOT-10k'
seq_dataset = GOT10k(root_dir, subset='train')
elif name == 'VID':
root_dir = 'data/ILSVRC'
seq_dataset = ImageNetVID(root_dir, subset=('train', 'val'))
elif name == 'VOT':
root_dir = 'dataset/data/vot2018/'
seq_dataset = VOT(root_dir)
pair_dataset = Pairwise(seq_dataset)
#setup data loader
cuda = torch.cuda.is_available()
loader = DataLoader(
pair_dataset, batch_size=8, shuffle=True,
pin_memory=cuda, drop_last=True, num_workers=4)
#setup tracker
tracker = TrackerSiamFC()
print('tracker created')
#path for saving checkpoints
net_dir = 'network/siamfc' #siamFCR
if not os.path.exists(net_dir):
os.makedirs(net_dir)
#training loop
epoch_num = 50
for epoch in range(epoch_num):
for step, batch in enumerate(loader):
loss = tracker.step(batch, backward=True, update_lr=(step ==0))
if step % 20 == 0:
print('Epoch [{}][{}/{}]: Loss: {:.3f}'.format(
epoch + 1, step + 1, len(loader), loss))
sys.stdout.flush()
#save checkpoint
net_path = os.path.join(net_dir, 'model_e%d.pth' % (epoch + 1))
torch.save(tracker.net.state_dict(), net_path)
| [
"abhijeet.kumar@psu.edu"
] | abhijeet.kumar@psu.edu |
608aeac97db4c8e1695b7f3d1f937739a8d47049 | 7728ce58c771c4f643b27c2854f13fa4587a7c91 | /color.py | 7168ef5a9536039c0e25326308b2c366c3c1b629 | [
"MIT"
] | permissive | albedo-geo/Image-to-Text | b2a9cbffc25c532fb72695b91110340c00c5e048 | 7c34f9805aee6d9aa3d7bbcefbd84c2977c4f305 | refs/heads/master | 2023-07-12T11:22:46.960942 | 2021-08-25T04:08:14 | 2021-08-25T04:08:14 | 399,685,265 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,344 | py | import numpy as np
import cv2
from PIL import Image, ImageFont, ImageDraw, ImageFilter
import random
from pathlib import Path
import time
from tqdm import tqdm
def get_alphabet(choice) -> str:
"""get the alphabet used to print on the output image"""
if choice == 'uppercase':
return 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
elif choice == 'lowercase':
return 'abcdefghijklmnopqrstuvwxyz'
elif choice == 'alphabet':
return get_alphabet('uppercase') + get_alphabet('lowercase')
elif choice == 'number':
return '0123456789'
elif choice == 'alphanumeric':
return get_alphabet('alphabet') + get_alphabet('number')
elif choice == 'symbol':
return r'~!@#$%^&*()-_=+[]{}\|;:,<.>/?"'
elif choice == 'random':
return get_alphabet('alphanumeric') + get_alphabet('symbol')
def get_background(choice: str, origin, width, height) -> Image.Image:
"""generate a canvas to print"""
if choice == 'transparent':
# 4-channel
return Image.fromarray(np.uint8(np.zeros((height, width, 4))))
elif choice == 'black':
return Image.fromarray(np.uint8(np.zeros((height, width, 3))))
elif choice == 'white':
return Image.fromarray(np.uint8(np.ones((height, width, 3)) * 255))
elif choice == 'mean':
mean = np.mean(np.array(origin)[:])
return Image.fromarray(np.uint8(np.ones((height, width, 3)) * mean))
elif choice.startswith('origin'):
opacity = float(choice[-1]) / 10
canvas = origin.resize((width, height), Image.BICUBIC).filter(
ImageFilter.GaussianBlur(25)
)
canvas = np.array(canvas)
canvas = np.uint8(canvas[:, :, 0:3] * opacity)
return Image.fromarray(canvas)
def color(
input: str,
output: str = None,
rows: int = 100,
alphabet='uppercase',
background='origin7',
out_height: int = None,
scale: float = None,
):
"""output colorful text picture"""
input_path = Path(input)
# the original image
origin = Image.open(input_path)
width, height = origin.size
print(f'input size: {origin.size}')
# text amount of the output image
text_rows = rows
text_cols = round(width / (height / text_rows) * 1.25) # char height-width ratio
origin_ref_np = cv2.resize(
np.array(origin), (text_cols, text_rows), interpolation=cv2.INTER_AREA
)
origin_ref = Image.fromarray(origin_ref_np)
# font properties
fontsize = 17
font = ImageFont.truetype('courbd.ttf', fontsize)
char_width = 8.88
char_height = 11
# output size depend on the rows and cols
canvas_height = round(text_rows * char_height)
canvas_width = round(text_cols * char_width)
# a canvas used to draw texts on it
canvas = get_background(background, origin, canvas_width, canvas_height)
print(f'canvas size: {canvas.size}')
# start drawing
since = time.time()
print(f'Start transforming {input_path.name}')
draw = ImageDraw.Draw(canvas)
charlist = get_alphabet(alphabet)
length = len(charlist)
for i in tqdm(range(text_cols)):
for j in range(text_rows):
x = round(char_width * i)
y = round(char_height * j - 4)
char = charlist[random.randint(0, length - 1)]
color = origin_ref.getpixel((i, j))
draw.text((x, y), char, fill=color, font=font)
# resize the reproduct if necessary
if out_height: # height goes first
canvas_height = out_height
canvas_width = round(width * canvas_height / height)
canvas = canvas.resize((canvas_width, canvas_height), Image.BICUBIC)
elif scale:
canvas_width = round(width * scale)
canvas_height = round(height * scale)
canvas = canvas.resize((canvas_width, canvas_height), Image.BICUBIC)
# output filename
if output:
output_path = Path(output)
else:
output_path = input_path.with_name(
f'{input_path.stem}_{canvas_width}x{canvas_height}_D{text_rows}_{background}.png'
)
canvas.save(output_path)
print(f'Transformation completed. Saved as {output_path.name}.')
print(f'Output image size: {canvas_width}x{canvas_height}')
print(f'Text density: {text_cols}x{text_rows}')
print(f'Elapsed time: {time.time() - since:.4} second(s)')
| [
"noreply@github.com"
] | albedo-geo.noreply@github.com |
3c6382a662d9ee9bc19ec515c982e30fc01b5944 | 2f61a8cb2200e64adc835c31d8457c3949cd93e0 | /tests/test_action_list_teams.py | 0710484ae7b7f1a9e6466415b3cfa70d37dfa6f0 | [
"Apache-2.0"
] | permissive | syncurity-exchange/stackstorm-github | 8b53907f920bd76fcf8a915f6db7da3e99c9b68c | 9449332a8cd9e5af98e60d564a2b42e2359ef687 | refs/heads/master | 2021-02-13T10:27:32.027838 | 2019-10-31T11:09:30 | 2019-10-31T11:09:30 | 244,687,873 | 0 | 1 | Apache-2.0 | 2020-03-04T02:00:06 | 2020-03-03T16:35:47 | null | UTF-8 | Python | false | false | 995 | py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# from mock import MagicMock
from github_base_action_test_case import GitHubBaseActionTestCase
from list_teams import ListTeamsAction
class ListTeamsActionTestCase(GitHubBaseActionTestCase):
__test__ = True
action_cls = ListTeamsAction
| [
"jon.middleton@pulsant.com"
] | jon.middleton@pulsant.com |
3edd11b131a4342745ed19f4bd4d890254793c45 | b67dd7bfd64a6c2becffe6cdbbba9c05b033e4a6 | /python/test/core/test_core.py | 274d18e1dfd85fc8cee390df73e0ae0afd69af9b | [
"MIT"
] | permissive | bo-rc/Open3D | b0eee76cb66535ca0ef9f7a56441d19e2f4353da | e7cad94c7a7c63d07da5bc8637b3bb65e698a5ff | refs/heads/master | 2023-03-31T17:11:27.342134 | 2021-04-07T22:23:58 | 2021-04-07T22:23:58 | 343,340,362 | 0 | 0 | NOASSERTION | 2021-04-07T22:23:59 | 2021-03-01T08:22:13 | C++ | UTF-8 | Python | false | false | 45,709 | py | # ----------------------------------------------------------------------------
# - Open3D: www.open3d.org -
# ----------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2020 www.open3d.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
# ----------------------------------------------------------------------------
import open3d as o3d
import numpy as np
import pytest
import tempfile
import sys
import os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/..")
from open3d_test import list_devices
def list_dtypes():
return [
o3d.core.Dtype.Float32,
o3d.core.Dtype.Float64,
o3d.core.Dtype.Int16,
o3d.core.Dtype.Int32,
o3d.core.Dtype.Int64,
o3d.core.Dtype.UInt8,
o3d.core.Dtype.UInt16,
o3d.core.Dtype.Bool,
]
def list_non_bool_dtypes():
return [
o3d.core.Dtype.Float32,
o3d.core.Dtype.Float64,
o3d.core.Dtype.Int16,
o3d.core.Dtype.Int32,
o3d.core.Dtype.Int64,
o3d.core.Dtype.UInt8,
o3d.core.Dtype.UInt16,
]
def to_numpy_dtype(dtype: o3d.core.Dtype):
conversions = {
o3d.core.Dtype.Float32: np.float32,
o3d.core.Dtype.Float64: np.float64,
o3d.core.Dtype.Int16: np.int16,
o3d.core.Dtype.Int32: np.int32,
o3d.core.Dtype.Int64: np.int64,
o3d.core.Dtype.UInt8: np.uint8,
o3d.core.Dtype.UInt16: np.uint16,
o3d.core.Dtype.Bool: bool, # np.bool deprecated
}
return conversions[dtype]
@pytest.mark.parametrize("dtype", list_dtypes())
@pytest.mark.parametrize("device", list_devices())
def test_creation(dtype, device):
# Shape takes tuple, list or o3d.core.SizeVector
t = o3d.core.Tensor.empty((2, 3), dtype, device=device)
assert t.shape == o3d.core.SizeVector([2, 3])
t = o3d.core.Tensor.empty([2, 3], dtype, device=device)
assert t.shape == o3d.core.SizeVector([2, 3])
t = o3d.core.Tensor.empty(o3d.core.SizeVector([2, 3]), dtype, device=device)
assert t.shape == o3d.core.SizeVector([2, 3])
# Test zeros and ones
t = o3d.core.Tensor.zeros((2, 3), dtype, device=device)
np.testing.assert_equal(t.cpu().numpy(), np.zeros((2, 3), dtype=np.float32))
t = o3d.core.Tensor.ones((2, 3), dtype, device=device)
np.testing.assert_equal(t.cpu().numpy(), np.ones((2, 3), dtype=np.float32))
# Automatic casting of dtype.
t = o3d.core.Tensor.full((2,), False, o3d.core.Dtype.Float32, device=device)
np.testing.assert_equal(t.cpu().numpy(),
np.full((2,), False, dtype=np.float32))
t = o3d.core.Tensor.full((2,), 3.5, o3d.core.Dtype.UInt8, device=device)
np.testing.assert_equal(t.cpu().numpy(), np.full((2,), 3.5, dtype=np.uint8))
@pytest.mark.parametrize("shape", [(), (0,), (1,), (0, 2), (0, 0, 2),
(2, 0, 3)])
@pytest.mark.parametrize("dtype", list_dtypes())
@pytest.mark.parametrize("device", list_devices())
def test_creation_special_shapes(shape, dtype, device):
o3_t = o3d.core.Tensor.full(shape, 3.14, dtype, device=device)
np_t = np.full(shape, 3.14, dtype=to_numpy_dtype(dtype))
np.testing.assert_allclose(o3_t.cpu().numpy(), np_t)
def test_dtype():
dtype = o3d.core.Dtype.Int32
assert dtype.byte_size() == 4
assert "{}".format(dtype) == "Int32"
def test_device():
device = o3d.core.Device()
assert device.get_type() == o3d.core.Device.DeviceType.CPU
assert device.get_id() == 0
device = o3d.core.Device("CUDA", 1)
assert device.get_type() == o3d.core.Device.DeviceType.CUDA
assert device.get_id() == 1
device = o3d.core.Device("CUDA:2")
assert device.get_type() == o3d.core.Device.DeviceType.CUDA
assert device.get_id() == 2
assert o3d.core.Device("CUDA", 1) == o3d.core.Device("CUDA:1")
assert o3d.core.Device("CUDA", 1) != o3d.core.Device("CUDA:0")
assert o3d.core.Device("CUDA", 1).__str__() == "CUDA:1"
def test_size_vector():
# List
sv = o3d.core.SizeVector([-1, 2, 3])
assert "{}".format(sv) == "SizeVector[-1, 2, 3]"
# Tuple
sv = o3d.core.SizeVector((-1, 2, 3))
assert "{}".format(sv) == "SizeVector[-1, 2, 3]"
# Numpy 1D array
sv = o3d.core.SizeVector(np.array([-1, 2, 3]))
assert "{}".format(sv) == "SizeVector[-1, 2, 3]"
# Empty
sv = o3d.core.SizeVector()
assert "{}".format(sv) == "SizeVector[]"
sv = o3d.core.SizeVector([])
assert "{}".format(sv) == "SizeVector[]"
sv = o3d.core.SizeVector(())
assert "{}".format(sv) == "SizeVector[]"
sv = o3d.core.SizeVector(np.array([]))
assert "{}".format(sv) == "SizeVector[]"
# Not integer: thorws exception
with pytest.raises(RuntimeError):
sv = o3d.core.SizeVector([1.9, 2, 3])
with pytest.raises(RuntimeError):
sv = o3d.core.SizeVector([-1.5, 2, 3])
# 2D list exception
with pytest.raises(RuntimeError):
sv = o3d.core.SizeVector([[1, 2], [3, 4]])
# 2D Numpy array exception
with pytest.raises(RuntimeError):
sv = o3d.core.SizeVector(np.array([[1, 2], [3, 4]]))
# Garbage input
with pytest.raises(RuntimeError):
sv = o3d.core.SizeVector(["foo", "bar"])
@pytest.mark.parametrize("dtype", list_dtypes())
@pytest.mark.parametrize("device", list_devices())
def test_tensor_constructor(dtype, device):
# Numpy array
np_t = np.array([[0, 1, 2], [3, 4, 5]], dtype=to_numpy_dtype(dtype))
o3_t = o3d.core.Tensor(np_t, device=device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# 2D list
li_t = [[0, 1, 2], [3, 4, 5]]
np_t = np.array(li_t, dtype=to_numpy_dtype(dtype))
o3_t = o3d.core.Tensor(li_t, dtype, device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# 2D list, inconsistent length
li_t = [[0, 1, 2], [3, 4]]
with pytest.raises(Exception):
o3_t = o3d.core.Tensor(li_t, dtype, device)
# Automatic casting
np_t_double = np.array([[0., 1.5, 2.], [3., 4., 5.]])
np_t_int = np.array([[0, 1, 2], [3, 4, 5]])
o3_t = o3d.core.Tensor(np_t_double, o3d.core.Dtype.Int32, device)
np.testing.assert_equal(np_t_int, o3_t.cpu().numpy())
# Special strides
np_t = np.random.randint(10, size=(10, 10))[1:10:2, 1:10:3].T
o3_t = o3d.core.Tensor(np_t, o3d.core.Dtype.Int32, device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Boolean
np_t = np.array([True, False, True], dtype=np.bool)
o3_t = o3d.core.Tensor([True, False, True], o3d.core.Dtype.Bool, device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
o3_t = o3d.core.Tensor(np_t, o3d.core.Dtype.Bool, device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
@pytest.mark.parametrize("device", list_devices())
def test_arange(device):
# Full parameters.
setups = [(0, 10, 1), (0, 10, 1), (0.0, 10.0, 2.0), (0.0, -10.0, -2.0)]
for start, stop, step in setups:
np_t = np.arange(start, stop, step)
o3_t = o3d.core.Tensor.arange(start,
stop,
step,
dtype=None,
device=device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Only stop.
for stop in [1.0, 2.0, 3.0, 1, 2, 3]:
np_t = np.arange(stop)
o3_t = o3d.core.Tensor.arange(stop, dtype=None, device=device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Only start, stop (step = 1).
setups = [(0, 10), (0, 10), (0.0, 10.0), (0.0, -10.0)]
for start, stop in setups:
np_t = np.arange(start, stop)
# Not full parameter list, need to specify device by kw.
o3_t = o3d.core.Tensor.arange(start, stop, dtype=None, device=device)
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Type inference: int -> int.
o3_t = o3d.core.Tensor.arange(0, 5, dtype=None, device=device)
np_t = np.arange(0, 5)
assert o3_t.dtype == o3d.core.Dtype.Int64
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Type inference: int, float -> float.
o3_t = o3d.core.Tensor.arange(0, 5.0, dtype=None, device=device)
np_t = np.arange(0, 5)
assert o3_t.dtype == o3d.core.Dtype.Float64
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Type inference: float, float -> float.
o3_t = o3d.core.Tensor.arange(0.0, 5.0, dtype=None, device=device)
np_t = np.arange(0, 5)
assert o3_t.dtype == o3d.core.Dtype.Float64
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
# Type inference: explicit type.
o3_t = o3d.core.Tensor.arange(0.0,
5.0,
dtype=o3d.core.Dtype.Int64,
device=device)
np_t = np.arange(0, 5)
assert o3_t.dtype == o3d.core.Dtype.Int64
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
def test_tensor_from_to_numpy():
# a->b copy; b, c share memory
a = np.ones((2, 2))
b = o3d.core.Tensor(a)
c = b.numpy()
c[0, 1] = 200
r = np.array([[1., 200.], [1., 1.]])
np.testing.assert_equal(r, b.numpy())
np.testing.assert_equal(r, c)
# a, b, c share memory
a = np.array([[1., 1.], [1., 1.]])
b = o3d.core.Tensor.from_numpy(a)
c = b.numpy()
a[0, 0] = 100
c[0, 1] = 200
r = np.array([[100., 200.], [1., 1.]])
np.testing.assert_equal(r, a)
np.testing.assert_equal(r, b.numpy())
np.testing.assert_equal(r, c)
# Special strides
ran_t = np.random.randint(10, size=(10, 10)).astype(np.int32)
src_t = ran_t[1:10:2, 1:10:3].T
o3d_t = o3d.core.Tensor.from_numpy(src_t) # Shared memory
dst_t = o3d_t.numpy()
np.testing.assert_equal(dst_t, src_t)
dst_t[0, 0] = 100
np.testing.assert_equal(dst_t, src_t)
np.testing.assert_equal(dst_t, o3d_t.numpy())
src_t[0, 1] = 200
np.testing.assert_equal(dst_t, src_t)
np.testing.assert_equal(dst_t, o3d_t.numpy())
def test_tensor_to_numpy_scope():
src_t = np.array([[10., 11., 12.], [13., 14., 15.]])
def get_dst_t():
o3d_t = o3d.core.Tensor(src_t) # Copy
dst_t = o3d_t.numpy()
return dst_t
dst_t = get_dst_t()
np.testing.assert_equal(dst_t, src_t)
@pytest.mark.parametrize("dtype", list_non_bool_dtypes())
@pytest.mark.parametrize("device", list_devices())
def test_binary_ew_ops(dtype, device):
a = o3d.core.Tensor(np.array([4, 6, 8, 10, 12, 14]),
dtype=dtype,
device=device)
b = o3d.core.Tensor(np.array([2, 3, 4, 5, 6, 7]),
dtype=dtype,
device=device)
np.testing.assert_equal((a + b).cpu().numpy(),
np.array([6, 9, 12, 15, 18, 21]))
np.testing.assert_equal((a - b).cpu().numpy(), np.array([2, 3, 4, 5, 6, 7]))
np.testing.assert_equal((a * b).cpu().numpy(),
np.array([8, 18, 32, 50, 72, 98]))
np.testing.assert_equal((a / b).cpu().numpy(), np.array([2, 2, 2, 2, 2, 2]))
a = o3d.core.Tensor(np.array([4, 6, 8, 10, 12, 14]),
dtype=dtype,
device=device)
a += b
np.testing.assert_equal(a.cpu().numpy(), np.array([6, 9, 12, 15, 18, 21]))
a = o3d.core.Tensor(np.array([4, 6, 8, 10, 12, 14]),
dtype=dtype,
device=device)
a -= b
np.testing.assert_equal(a.cpu().numpy(), np.array([2, 3, 4, 5, 6, 7]))
a = o3d.core.Tensor(np.array([4, 6, 8, 10, 12, 14]),
dtype=dtype,
device=device)
a *= b
np.testing.assert_equal(a.cpu().numpy(), np.array([8, 18, 32, 50, 72, 98]))
a = o3d.core.Tensor(np.array([4, 6, 8, 10, 12, 14]),
dtype=dtype,
device=device)
a //= b
np.testing.assert_equal(a.cpu().numpy(), np.array([2, 2, 2, 2, 2, 2]))
@pytest.mark.parametrize("device", list_devices())
def test_to(device):
a = o3d.core.Tensor(np.array([0.1, 1.2, 2.3, 3.4, 4.5,
5.6]).astype(np.float32),
device=device)
b = a.to(o3d.core.Dtype.Int32)
np.testing.assert_equal(b.cpu().numpy(), np.array([0, 1, 2, 3, 4, 5]))
assert b.shape == o3d.core.SizeVector([6])
assert b.strides == o3d.core.SizeVector([1])
assert b.dtype == o3d.core.Dtype.Int32
assert b.device == a.device
@pytest.mark.parametrize("device", list_devices())
def test_unary_ew_ops(device):
src_vals = np.array([0, 1, 2, 3, 4, 5]).astype(np.float32)
src = o3d.core.Tensor(src_vals, device=device)
rtol = 1e-5
atol = 0
np.testing.assert_allclose(src.sqrt().cpu().numpy(),
np.sqrt(src_vals),
rtol=rtol,
atol=atol)
np.testing.assert_allclose(src.sin().cpu().numpy(),
np.sin(src_vals),
rtol=rtol,
atol=atol)
np.testing.assert_allclose(src.cos().cpu().numpy(),
np.cos(src_vals),
rtol=rtol,
atol=atol)
np.testing.assert_allclose(src.neg().cpu().numpy(),
-src_vals,
rtol=rtol,
atol=atol)
np.testing.assert_allclose(src.exp().cpu().numpy(),
np.exp(src_vals),
rtol=rtol,
atol=atol)
@pytest.mark.parametrize("device", list_devices())
def test_getitem(device):
np_t = np.array(range(24)).reshape((2, 3, 4))
o3_t = o3d.core.Tensor(np_t, device=device)
np.testing.assert_equal(o3_t[:].cpu().numpy(), np_t[:])
np.testing.assert_equal(o3_t[0].cpu().numpy(), np_t[0])
np.testing.assert_equal(o3_t[0, 1].cpu().numpy(), np_t[0, 1])
np.testing.assert_equal(o3_t[0, :].cpu().numpy(), np_t[0, :])
np.testing.assert_equal(o3_t[0, 1:3].cpu().numpy(), np_t[0, 1:3])
np.testing.assert_equal(o3_t[0, :, :-2].cpu().numpy(), np_t[0, :, :-2])
np.testing.assert_equal(o3_t[0, 1:3, 2].cpu().numpy(), np_t[0, 1:3, 2])
np.testing.assert_equal(o3_t[0, 1:-1, 2].cpu().numpy(), np_t[0, 1:-1, 2])
np.testing.assert_equal(o3_t[0, 1:3, 0:4:2].cpu().numpy(), np_t[0, 1:3,
0:4:2])
np.testing.assert_equal(o3_t[0, 1:3, 0:-1:2].cpu().numpy(), np_t[0, 1:3,
0:-1:2])
np.testing.assert_equal(o3_t[0, 1, :].cpu().numpy(), np_t[0, 1, :])
# Slice out-of-range
np.testing.assert_equal(o3_t[1:6].cpu().numpy(), np_t[1:6])
np.testing.assert_equal(o3_t[2:5, -10:20].cpu().numpy(), np_t[2:5, -10:20])
np.testing.assert_equal(o3_t[2:2, 3:3, 4:4].cpu().numpy(), np_t[2:2, 3:3,
4:4])
np.testing.assert_equal(o3_t[2:20, 3:30, 4:40].cpu().numpy(),
np_t[2:20, 3:30, 4:40])
np.testing.assert_equal(o3_t[-2:20, -3:30, -4:40].cpu().numpy(),
np_t[-2:20, -3:30, -4:40])
# Slice the slice
np.testing.assert_equal(o3_t[0:2, 1:3, 0:4][0:1, 0:2, 2:3].cpu().numpy(),
np_t[0:2, 1:3, 0:4][0:1, 0:2, 2:3])
@pytest.mark.parametrize("device", list_devices())
def test_setitem(device):
np_ref = np.array(range(24)).reshape((2, 3, 4))
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[:].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[:] = np_fill_t
o3_t[:] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0] = np_fill_t
o3_t[0] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1] = np_fill_t
o3_t[0, 1] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, :].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, :] = np_fill_t
o3_t[0, :] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1:3].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1:3] = np_fill_t
o3_t[0, 1:3] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, :, :-2].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, :, :-2] = np_fill_t
o3_t[0, :, :-2] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1:3, 2].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1:3, 2] = np_fill_t
o3_t[0, 1:3, 2] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1:-1, 2].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1:-1, 2] = np_fill_t
o3_t[0, 1:-1, 2] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1:3, 0:4:2].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1:3, 0:4:2] = np_fill_t
o3_t[0, 1:3, 0:4:2] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1:3, 0:-1:2].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1:3, 0:-1:2] = np_fill_t
o3_t[0, 1:3, 0:-1:2] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0, 1, :].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0, 1, :] = np_fill_t
o3_t[0, 1, :] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
np_t = np_ref.copy()
o3_t = o3d.core.Tensor(np_t, device=device)
np_fill_t = np.random.rand(*np_t[0:2, 1:3, 0:4][0:1, 0:2, 2:3].shape)
o3_fill_t = o3d.core.Tensor(np_fill_t, device=device)
np_t[0:2, 1:3, 0:4][0:1, 0:2, 2:3] = np_fill_t
o3_t[0:2, 1:3, 0:4][0:1, 0:2, 2:3] = o3_fill_t
np.testing.assert_equal(o3_t.cpu().numpy(), np_t)
@pytest.mark.parametrize(
"dim",
[0, 1, 2, (), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2), None])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_sum(dim, keepdim, device):
np_src = np.array(range(24)).reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.sum(axis=dim, keepdims=keepdim)
o3_dst = o3_src.sum(dim=dim, keepdim=keepdim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize("shape_and_axis", [
((), ()),
((0,), ()),
((0,), (0)),
((0, 2), ()),
((0, 2), (0)),
((0, 2), (1)),
])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_special_shapes(shape_and_axis, keepdim, device):
shape, axis = shape_and_axis
np_src = np.array(np.random.rand(*shape))
o3_src = o3d.core.Tensor(np_src, device=device)
np.testing.assert_equal(o3_src.cpu().numpy(), np_src)
np_dst = np_src.sum(axis=axis, keepdims=keepdim)
o3_dst = o3_src.sum(dim=axis, keepdim=keepdim)
np.testing.assert_equal(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize(
"dim",
[0, 1, 2, (), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2), None])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_mean(dim, keepdim, device):
np_src = np.array(range(24)).reshape((2, 3, 4)).astype(np.float32)
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.mean(axis=dim, keepdims=keepdim)
o3_dst = o3_src.mean(dim=dim, keepdim=keepdim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize(
"dim",
[0, 1, 2, (), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2), None])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_prod(dim, keepdim, device):
np_src = np.array(range(24)).reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.prod(axis=dim, keepdims=keepdim)
o3_dst = o3_src.prod(dim=dim, keepdim=keepdim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize(
"dim",
[0, 1, 2, (), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2), None])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_min(dim, keepdim, device):
np_src = np.array(range(24))
np.random.shuffle(np_src)
np_src = np_src.reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.min(axis=dim, keepdims=keepdim)
o3_dst = o3_src.min(dim=dim, keepdim=keepdim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize(
"dim",
[0, 1, 2, (), (0,), (1,), (2,), (0, 1), (0, 2), (1, 2), (0, 1, 2), None])
@pytest.mark.parametrize("keepdim", [True, False])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_max(dim, keepdim, device):
np_src = np.array(range(24))
np.random.shuffle(np_src)
np_src = np_src.reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.max(axis=dim, keepdims=keepdim)
o3_dst = o3_src.max(dim=dim, keepdim=keepdim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize("dim", [0, 1, 2, None])
@pytest.mark.parametrize("device", list_devices())
def test_reduction_argmin_argmax(dim, device):
np_src = np.array(range(24))
np.random.shuffle(np_src)
np_src = np_src.reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src.argmin(axis=dim)
o3_dst = o3_src.argmin(dim=dim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
np_dst = np_src.argmax(axis=dim)
o3_dst = o3_src.argmax(dim=dim)
np.testing.assert_allclose(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize("device", list_devices())
def test_advanced_index_get_mixed(device):
np_src = np.array(range(24)).reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_dst = np_src[1, 0:2, [1, 2]]
o3_dst = o3_src[1, 0:2, [1, 2]]
np.testing.assert_equal(o3_dst.cpu().numpy(), np_dst)
# Subtle differences between slice and list
np_src = np.array([0, 100, 200, 300, 400, 500, 600, 700, 800]).reshape(3, 3)
o3_src = o3d.core.Tensor(np_src, device=device)
np.testing.assert_equal(o3_src[1, 2].cpu().numpy(), np_src[1, 2])
np.testing.assert_equal(o3_src[[1, 2]].cpu().numpy(), np_src[[1, 2]])
np.testing.assert_equal(o3_src[(1, 2)].cpu().numpy(), np_src[(1, 2)])
np.testing.assert_equal(o3_src[(1, 2), [1, 2]].cpu().numpy(),
np_src[(1, 2), [1, 2]])
# Complex case: interleaving slice and advanced indexing
np_src = np.array(range(120)).reshape((2, 3, 4, 5))
o3_src = o3d.core.Tensor(np_src, device=device)
o3_dst = o3_src[1, [[1, 2], [2, 1]], 0:4:2, [3, 4]]
np_dst = np_src[1, [[1, 2], [2, 1]], 0:4:2, [3, 4]]
np.testing.assert_equal(o3_dst.cpu().numpy(), np_dst)
@pytest.mark.parametrize("device", list_devices())
def test_advanced_index_set_mixed(device):
np_src = np.array(range(24)).reshape((2, 3, 4))
o3_src = o3d.core.Tensor(np_src, device=device)
np_fill = np.array(([[100, 200], [300, 400]]))
o3_fill = o3d.core.Tensor(np_fill, device=device)
np_src[1, 0:2, [1, 2]] = np_fill
o3_src[1, 0:2, [1, 2]] = o3_fill
np.testing.assert_equal(o3_src.cpu().numpy(), np_src)
# Complex case: interleaving slice and advanced indexing
np_src = np.array(range(120)).reshape((2, 3, 4, 5))
o3_src = o3d.core.Tensor(np_src, device=device)
fill_shape = np_src[1, [[1, 2], [2, 1]], 0:4:2, [3, 4]].shape
np_fill_val = np.random.randint(5000, size=fill_shape).astype(np_src.dtype)
o3_fill_val = o3d.core.Tensor(np_fill_val)
o3_src[1, [[1, 2], [2, 1]], 0:4:2, [3, 4]] = o3_fill_val
np_src[1, [[1, 2], [2, 1]], 0:4:2, [3, 4]] = np_fill_val
np.testing.assert_equal(o3_src.cpu().numpy(), np_src)
@pytest.mark.parametrize("np_func_name,o3_func_name", [("sqrt", "sqrt"),
("sin", "sin"),
("cos", "cos"),
("negative", "neg"),
("exp", "exp"),
("abs", "abs"),
("floor", "floor"),
("ceil", "ceil"),
("round", "round"),
("trunc", "trunc")])
@pytest.mark.parametrize("device", list_devices())
def test_unary_elementwise(np_func_name, o3_func_name, device):
np_t = np.array([-3.4, -2.6, -1.5, 0, 1.4, 2.6, 3.5]).astype(np.float32)
o3_t = o3d.core.Tensor(np_t, device=device)
# Test non-in-place version
np.seterr(invalid='ignore') # e.g. sqrt of negative should be -nan
np.testing.assert_allclose(getattr(o3_t, o3_func_name)().cpu().numpy(),
getattr(np, np_func_name)(np_t),
rtol=1e-7,
atol=1e-7)
# Test in-place version
if o3_func_name not in ["floor", "ceil", "round", "trunc"]:
o3_func_name_inplace = o3_func_name + "_"
getattr(o3_t, o3_func_name_inplace)()
np.testing.assert_allclose(o3_t.cpu().numpy(),
getattr(np, np_func_name)(np_t),
rtol=1e-7,
atol=1e-7)
@pytest.mark.parametrize("device", list_devices())
def test_logical_ops(device):
np_a = np.array([True, False, True, False])
np_b = np.array([True, True, False, False])
o3_a = o3d.core.Tensor(np_a, device=device)
o3_b = o3d.core.Tensor(np_b, device=device)
o3_r = o3_a.logical_and(o3_b)
np_r = np.logical_and(np_a, np_b)
np.testing.assert_equal(o3_r.cpu().numpy(), np_r)
o3_r = o3_a.logical_or(o3_b)
np_r = np.logical_or(np_a, np_b)
np.testing.assert_equal(o3_r.cpu().numpy(), np_r)
o3_r = o3_a.logical_xor(o3_b)
np_r = np.logical_xor(np_a, np_b)
np.testing.assert_equal(o3_r.cpu().numpy(), np_r)
@pytest.mark.parametrize("device", list_devices())
def test_comparision_ops(device):
np_a = np.array([0, 1, -1])
np_b = np.array([0, 0, 0])
o3_a = o3d.core.Tensor(np_a, device=device)
o3_b = o3d.core.Tensor(np_b, device=device)
np.testing.assert_equal((o3_a > o3_b).cpu().numpy(), np_a > np_b)
np.testing.assert_equal((o3_a >= o3_b).cpu().numpy(), np_a >= np_b)
np.testing.assert_equal((o3_a < o3_b).cpu().numpy(), np_a < np_b)
np.testing.assert_equal((o3_a <= o3_b).cpu().numpy(), np_a <= np_b)
np.testing.assert_equal((o3_a == o3_b).cpu().numpy(), np_a == np_b)
np.testing.assert_equal((o3_a != o3_b).cpu().numpy(), np_a != np_b)
@pytest.mark.parametrize("device", list_devices())
def test_non_zero(device):
np_x = np.array([[3, 0, 0], [0, 4, 0], [5, 6, 0]])
np_nonzero_tuple = np.nonzero(np_x)
o3_x = o3d.core.Tensor(np_x, device=device)
o3_nonzero_tuple = o3_x.nonzero(as_tuple=True)
for np_t, o3_t in zip(np_nonzero_tuple, o3_nonzero_tuple):
np.testing.assert_equal(np_t, o3_t.cpu().numpy())
@pytest.mark.parametrize("device", list_devices())
def test_boolean_advanced_indexing(device):
np_a = np.array([1, -1, -2, 3])
o3_a = o3d.core.Tensor(np_a, device=device)
np_a[np_a < 0] = 0
o3_a[o3_a < 0] = 0
np.testing.assert_equal(np_a, o3_a.cpu().numpy())
np_x = np.array([[0, 1], [1, 1], [2, 2]])
np_row_sum = np.array([1, 2, 4])
np_y = np_x[np_row_sum <= 2, :]
o3_x = o3d.core.Tensor(np_x, device=device)
o3_row_sum = o3d.core.Tensor(np_row_sum)
o3_y = o3_x[o3_row_sum <= 2, :]
np.testing.assert_equal(np_y, o3_y.cpu().numpy())
@pytest.mark.parametrize("device", list_devices())
def test_scalar_op(device):
# +
a = o3d.core.Tensor.ones((2, 3), o3d.core.Dtype.Float32, device=device)
b = a.add(1)
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 2))
b = a + 1
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 2))
b = 1 + a
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 2))
b = a + True
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 2))
# +=
a = o3d.core.Tensor.ones((2, 3), o3d.core.Dtype.Float32, device=device)
a.add_(1)
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 2))
a += 1
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 3))
a += True
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 4))
# -
a = o3d.core.Tensor.ones((2, 3), o3d.core.Dtype.Float32, device=device)
b = a.sub(1)
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 0))
b = a - 1
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 0))
b = 10 - a
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 9))
b = a - True
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 0))
# -=
a = o3d.core.Tensor.ones((2, 3), o3d.core.Dtype.Float32, device=device)
a.sub_(1)
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 0))
a -= 1
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), -1))
a -= True
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), -2))
# *
a = o3d.core.Tensor.full((2, 3), 2, o3d.core.Dtype.Float32, device=device)
b = a.mul(10)
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 20))
b = a * 10
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 20))
b = 10 * a
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 20))
b = a * True
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 2))
# *=
a = o3d.core.Tensor.full((2, 3), 2, o3d.core.Dtype.Float32, device=device)
a.mul_(10)
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 20))
a *= 10
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 200))
a *= True
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 200))
# /
a = o3d.core.Tensor.full((2, 3), 20, o3d.core.Dtype.Float32, device=device)
b = a.div(2)
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 10))
b = a / 2
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 10))
b = a // 2
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 10))
b = 10 / a
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 0.5))
b = 10 // a
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 0.5))
b = a / True
np.testing.assert_equal(b.cpu().numpy(), np.full((2, 3), 20))
# /=
a = o3d.core.Tensor.full((2, 3), 20, o3d.core.Dtype.Float32, device=device)
a.div_(2)
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 10))
a /= 2
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 5))
a //= 2
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 2.5))
a /= True
np.testing.assert_equal(a.cpu().numpy(), np.full((2, 3), 2.5))
# logical_and
a = o3d.core.Tensor([True, False], device=device)
np.testing.assert_equal(
a.logical_and(True).cpu().numpy(), np.array([True, False]))
np.testing.assert_equal(
a.logical_and(5).cpu().numpy(), np.array([True, False]))
np.testing.assert_equal(
a.logical_and(False).cpu().numpy(), np.array([False, False]))
np.testing.assert_equal(
a.logical_and(0).cpu().numpy(), np.array([False, False]))
# logical_and_
a = o3d.core.Tensor([True, False], device=device)
a.logical_and_(True)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_and_(5)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_and_(False)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, False]))
a.logical_and_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, False]))
# logical_or
a = o3d.core.Tensor([True, False], device=device)
np.testing.assert_equal(
a.logical_or(True).cpu().numpy(), np.array([True, True]))
np.testing.assert_equal(
a.logical_or(5).cpu().numpy(), np.array([True, True]))
np.testing.assert_equal(
a.logical_or(False).cpu().numpy(), np.array([True, False]))
np.testing.assert_equal(
a.logical_or(0).cpu().numpy(), np.array([True, False]))
# logical_or_
a = o3d.core.Tensor([True, False], device=device)
a.logical_or_(True)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, True]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_or_(5)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, True]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_or_(False)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
a.logical_or_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
# logical_xor
a = o3d.core.Tensor([True, False], device=device)
np.testing.assert_equal(
a.logical_xor(True).cpu().numpy(), np.array([False, True]))
np.testing.assert_equal(
a.logical_xor(5).cpu().numpy(), np.array([False, True]))
np.testing.assert_equal(
a.logical_xor(False).cpu().numpy(), np.array([True, False]))
np.testing.assert_equal(
a.logical_xor(0).cpu().numpy(), np.array([True, False]))
# logical_xor_
a = o3d.core.Tensor([True, False], device=device)
a.logical_xor_(True)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, True]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_xor_(5)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, True]))
a = o3d.core.Tensor([True, False], device=device)
a.logical_xor_(False)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
a.logical_xor_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False]))
# gt
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.gt(0)).cpu().numpy(),
np.array([False, False, True]))
np.testing.assert_equal((a > 0).cpu().numpy(),
np.array([False, False, True]))
# gt_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.gt_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, False, True]))
# lt
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.lt(0)).cpu().numpy(),
np.array([True, False, False]))
np.testing.assert_equal((a < 0).cpu().numpy(),
np.array([True, False, False]))
# lt_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.lt_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False, False]))
# ge
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.ge(0)).cpu().numpy(),
np.array([False, True, True]))
np.testing.assert_equal((a >= 0).cpu().numpy(),
np.array([False, True, True]))
# ge_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.ge_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, True, True]))
# le
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.le(0)).cpu().numpy(),
np.array([True, True, False]))
np.testing.assert_equal((a <= 0).cpu().numpy(),
np.array([True, True, False]))
# le_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.le_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, True, False]))
# eq
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.eq(0)).cpu().numpy(),
np.array([False, True, False]))
np.testing.assert_equal((a == 0).cpu().numpy(),
np.array([False, True, False]))
# eq_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.eq_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([False, True, False]))
# ne
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
np.testing.assert_equal((a.ne(0)).cpu().numpy(),
np.array([True, False, True]))
np.testing.assert_equal((a != 0).cpu().numpy(),
np.array([True, False, True]))
# ne_
a = o3d.core.Tensor([-1, 0, 1], dtype=dtype, device=device)
a.ne_(0)
np.testing.assert_equal(a.cpu().numpy(), np.array([True, False, True]))
@pytest.mark.parametrize("device", list_devices())
def test_all_any(device):
a = o3d.core.Tensor([False, True, True, True],
dtype=o3d.core.Dtype.Bool,
device=device)
assert not a.all()
assert a.any()
a = o3d.core.Tensor([True, True, True, True],
dtype=o3d.core.Dtype.Bool,
device=device)
assert a.all()
# Empty
a = o3d.core.Tensor([], dtype=o3d.core.Dtype.Bool, device=device)
assert a.all()
assert not a.any()
@pytest.mark.parametrize("device", list_devices())
def test_allclose_isclose(device):
a = o3d.core.Tensor([1, 2], device=device)
b = o3d.core.Tensor([1, 3], device=device)
assert not a.allclose(b)
np.testing.assert_allclose(
a.isclose(b).cpu().numpy(), np.array([True, False]))
assert a.allclose(b, atol=1)
np.testing.assert_allclose(
a.isclose(b, atol=1).cpu().numpy(), np.array([True, True]))
# Test cases from
# https://numpy.org/doc/stable/reference/generated/numpy.allclose.html
a = o3d.core.Tensor([1e10, 1e-7], device=device)
b = o3d.core.Tensor([1.00001e10, 1e-8], device=device)
assert not a.allclose(b)
a = o3d.core.Tensor([1e10, 1e-8], device=device)
b = o3d.core.Tensor([1.00001e10, 1e-9], device=device)
assert a.allclose(b)
a = o3d.core.Tensor([1e10, 1e-8], device=device)
b = o3d.core.Tensor([1.0001e10, 1e-9], device=device)
assert not a.allclose(b)
@pytest.mark.parametrize("device", list_devices())
def test_issame(device):
dtype = o3d.core.Dtype.Float32
a = o3d.core.Tensor.ones((2, 3), dtype, device=device)
b = o3d.core.Tensor.ones((2, 3), dtype, device=device)
assert a.allclose(b)
assert not a.issame(b)
c = a
assert a.allclose(c)
assert a.issame(c)
d = a[:, 0:2]
e = a[:, 0:2]
assert d.allclose(e)
assert d.issame(e)
@pytest.mark.parametrize("device", list_devices())
def test_item(device):
o3_t = o3d.core.Tensor.ones(
(2, 3), dtype=o3d.core.Dtype.Float32, device=device) * 1.5
assert o3_t[0, 0].item() == 1.5
assert isinstance(o3_t[0, 0].item(), float)
o3_t = o3d.core.Tensor.ones(
(2, 3), dtype=o3d.core.Dtype.Float64, device=device) * 1.5
assert o3_t[0, 0].item() == 1.5
assert isinstance(o3_t[0, 0].item(), float)
o3_t = o3d.core.Tensor.ones(
(2, 3), dtype=o3d.core.Dtype.Int32, device=device) * 1.5
assert o3_t[0, 0].item() == 1
assert isinstance(o3_t[0, 0].item(), int)
o3_t = o3d.core.Tensor.ones(
(2, 3), dtype=o3d.core.Dtype.Int64, device=device) * 1.5
assert o3_t[0, 0].item() == 1
assert isinstance(o3_t[0, 0].item(), int)
o3_t = o3d.core.Tensor.ones((2, 3),
dtype=o3d.core.Dtype.Bool,
device=device)
assert o3_t[0, 0].item() == True
assert isinstance(o3_t[0, 0].item(), bool)
@pytest.mark.parametrize("device", list_devices())
def test_save_load(device):
with tempfile.TemporaryDirectory() as temp_dir:
file_name = f"{temp_dir}/tensor.npy"
o3_tensors = [
o3d.core.Tensor([[1, 2], [3, 4]],
dtype=o3d.core.Dtype.Float32,
device=device),
o3d.core.Tensor(3.14, dtype=o3d.core.Dtype.Float32, device=device),
o3d.core.Tensor.ones((0,),
dtype=o3d.core.Dtype.Float32,
device=device),
o3d.core.Tensor.ones((0, 0),
dtype=o3d.core.Dtype.Float32,
device=device),
o3d.core.Tensor.ones((0, 1, 0),
dtype=o3d.core.Dtype.Float32,
device=device)
]
np_tensors = [
np.array([[1, 2], [3, 4]], dtype=np.float32),
np.array(3.14, dtype=np.float32),
np.ones((0,), dtype=np.float32),
np.ones((0, 0), dtype=np.float32),
np.ones((0, 1, 0), dtype=np.float32)
]
for o3_t, np_t in zip(o3_tensors, np_tensors):
# Open3D -> Numpy.
o3_t.save(file_name)
o3_t_load = o3d.core.Tensor.load(file_name)
np.testing.assert_equal(o3_t_load.cpu().numpy(), np_t)
# Open3D -> Numpy.
np_t_load = np.load(file_name)
np.testing.assert_equal(np_t_load, np_t_load)
# Numpy -> Open3D.
np.save(file_name, np_t)
o3_t_load = o3d.core.Tensor.load(file_name)
np.testing.assert_equal(o3_t_load.cpu().numpy(), np_t)
# Ragged tensor: exception.
np_t = np.array([[1, 2, 3], [4, 5]], dtype=np.dtype(object))
np.save(file_name, np_t)
with pytest.raises(RuntimeError):
o3_t_load = o3d.core.Tensor.load(file_name)
# Fortran order: exception.
np_t = np.array([[1, 2, 3], [4, 5, 6]])
np_t = np.asfortranarray(np_t)
np.save(file_name, np_t)
with pytest.raises(RuntimeError):
o3_t_load = o3d.core.Tensor.load(file_name)
# Unsupported dtype: exception.
np_t = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.uint32)
np.save(file_name, np_t)
with pytest.raises(RuntimeError):
o3_t_load = o3d.core.Tensor.load(file_name)
# Non-contiguous numpy array.
np_t = np.arange(24).reshape(2, 3, 4)
assert np_t.flags['C_CONTIGUOUS']
np_t = np_t[0:2:1, 0:3:2, 0:4:2]
assert not np_t.flags['C_CONTIGUOUS']
np.save(file_name, np_t)
o3_t_load = o3d.core.Tensor.load(file_name)
assert o3_t_load.is_contiguous()
np.testing.assert_equal(o3_t_load.cpu().numpy(), np_t)
| [
"noreply@github.com"
] | bo-rc.noreply@github.com |
23f21683714939b63d13f3a7a5925a64aa3ce38d | 44bad44c25b040b334911b555c573a03bcdad6a6 | /qit/lmap.py | d8a8b67494b975360158efe63cc8b4e51a799aa7 | [] | no_license | Quantum-Machine-Learning-Initiative/Deep-Learning---Information-theory | 5bb37ef073b2dcf89dfeb84adf37856c6c829748 | 073bcf596e81dda5e325a73cd967a49821c27b94 | refs/heads/master | 2021-09-03T03:44:39.897599 | 2018-01-05T08:57:57 | 2018-01-05T08:57:57 | 113,054,810 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,930 | py | # -*- coding: utf-8 -*-
"""
Linear maps (:mod:`qit.lmap`)
=============================
Bounded finite-dimensional linear maps are represented using :class:`lmap` class instances.
In addition to the matrix representing the map, they contain
the dimension vectors of the domain and codomain vector spaces.
All the usual scalar-map and map-map arithmetic operators are
provided, including the exponentiation of maps by integers.
.. currentmodule:: qit.lmap.lmap
Utilities
---------
.. autosummary::
remove_singletons
is_compatible
is_ket
Linear algebra
--------------
.. autosummary::
conj
transpose
ctranspose
trace
norm
reorder
Non-member functions:
.. currentmodule:: qit.lmap
.. autosummary::
tensor
"""
# Ville Bergholm 2008-2011
from __future__ import division, absolute_import, print_function, unicode_literals
from copy import copy, deepcopy
import numpy as np
import scipy.sparse as ssp
from .base import tol
__all__ = ['numstr_to_array', 'array_to_numstr', 'lmap', 'tensor']
def numstr_to_array(s):
"""Utility, converts a numeric string to the corresponding array."""
return np.array([ord(x) -ord('0') for x in s])
def array_to_numstr(s):
"""Utility, converts an integer array to the corresponding numeric string."""
return "".join([chr(x +ord('0')) for x in s])
class lmap(object):
"""Bounded linear maps between tensor products of finite-dimensional Hilbert spaces.
Contains both the order-2 tensor and the dimensional information.
TODO Another possible interpretation of lmap would be to
treat each subsystem as an index, with the subsystems within dim{1} and dim{2}
corresponding to contravariant and covariant indices, respectively?
Variables:
data: ndarray of tensor data
dim: tuple of input and output dimension tuples, big-endian: ((out), (in))
Base class of state.
"""
# TODO def __format__(self, format_spec)
# TODO linalg efficiency: copy vs. view
def __init__(self, s, dim=None):
"""Construct an lmap.
s: ndarray OR valid initializer for ndarray OR lmap instance
A copy is made unless s is an ndarray.
dim: 2-tuple containing the output and input subsystem dimensions
stored in tuples: dim == ((out), (in)).
If dim, (out) or (in) is None, the corresponding dimensions
are inferred from s.
calling syntax resulting dim
============== =============
lmap(rand(a)) ((a,), (1,)) 1D array default: ket vector
lmap(rand(a), ((1,), None)) ((1,), (a,)) bra vector given as a 1D array
lmap(rand(a,b)) ((a,), (b,)) 2D array, all dims inferred
lmap(rand(4,b), ((2, 2), None)) ((2, 2), (b,)) 2D array, output: two qubits
lmap(rand(a,6), (None, (3, 2))) ((a,), (3, 2)) 2D array, input: qutrit+qubit
lmap(rand(6,6), ((3, 2), (2, 3))) ((3, 2), (2, 3)) 2D array, all dims given
lmap(A) (A is an lmap) copy constructor
lmap(A, dim) (A is an lmap) copy constructor, redefine the dimensions
"""
# initialize the ndarray part
if isinstance(s, lmap):
# copy constructor
self.data = deepcopy(s.data)
defdim = s.dim # copy the dimensions too, unless redefined
else:
if ssp.isspmatrix(s):
# TODO FIXME handle sparse matrices properly
# TODO lmap constructor, mul/add, tensor funcs must be able to handle both dense and sparse arrays.
s = s.todense()
# valid array initializer
self.data = np.asarray(s) # NOTE that if s is an ndarray it is not copied here
# into a 2d array
if self.data.ndim == 0:
# scalar
self.data.resize((1, 1))
elif self.data.ndim == 1:
# vector, ket by default
self.data.resize((self.data.size, 1))
elif self.data.ndim > 2:
raise ValueError('Array dimension must be <= 2.')
# now self.data.ndim == 2, always
# is it a bra given as a 1D array?
if dim and dim[0] == (1,):
self.data.resize((1, self.data.size))
# infer default dims from data (wrap them in tuples!)
defdim = tuple([(k,) for k in self.data.shape])
# set the dimensions
if dim == None:
# infer both dimensions from s
dim = (None, None)
self.dim = []
for k in range(len(dim)):
if dim[k] == None:
# not specified, use default
self.dim.append(defdim[k])
else:
self.dim.append(tuple(dim[k]))
self.dim = tuple(self.dim)
# check dimensions
if self.data.shape != tuple(map(np.prod, self.dim)):
raise ValueError('Dimensions of the array do not match the combined dimensions of the subsystems.')
def __repr__(self):
"""Display the lmap in a neat format."""
out = ''
# is it a vector? (a map with a singleton domain or codomain dimension)
sh = self.data.shape
if 1 in sh:
# vector
# ket or bra?
if sh[1] == 1:
# let scalars be kets too
dim = self.dim[0]
is_ket = True
else:
dim = self.dim[1]
is_ket = False
# loop over all vector elements
printed = 0
d = np.prod(dim)
for ind in range(d):
# TODO with sparse arrays we could do better
# sanity check, do not display lmaps with hundreds of terms
if ind >= 128 or printed >= 20:
out += ' ...'
break
temp = self.data.flat[ind]
# make sure there is something to print
if abs(temp) < tol:
continue
printed += 1
if abs(temp.imag) < tol:
# just the real part
out += ' {0:+.4g}'.format(temp.real)
elif abs(temp.real) < tol:
# just the imaginary part
out += ' {0:+.4g}j'.format(temp.imag)
else:
# both
out += ' +({0:.4g}{1:+.4g}j)'.format(temp.real, temp.imag) #' +' + str(temp)
# ket or bra symbol
temp = array_to_numstr(np.unravel_index(ind, dim))
if is_ket:
out += ' |' + temp + '>'
else:
out += ' <' + temp + '|'
else:
# matrix
out = self.data.__repr__()
out += '\ndim: ' + str(self.dim[0]) + ' <- ' + str(self.dim[1])
return out
# utilities
def _inplacer(self, inplace):
"""Utility for implementing inplace operations.
Functions using this should begin with s = self._inplacer(inplace)
and end with return s
"""
if inplace:
return self
else:
return deepcopy(self)
def remove_singletons(self):
"""Eliminate unnecessary singleton dimensions.
NOTE: changes the object itself!
"""
dd = []
for d in self.dim:
temp = tuple([x for x in d if x > 1])
if len(temp) == 0:
temp = (1,)
dd.append(temp)
self.dim = tuple(dd)
return
def is_compatible(self, t):
"""True iff the lmaps have equal dimensions and can thus be added."""
if not isinstance(t, lmap):
raise TypeError('t is not an lmap.')
return self.dim == t.dim
def is_ket(self):
"""True if the lmap is a ket."""
return self.data.shape[1] == 1
# linear algebra
def conj(self):
"""Complex conjugate."""
s = copy(self) # preserves the type, important for subclasses
s.data = np.conj(self.data) # copy
return s
def transpose(self):
"""Transpose."""
s = copy(self)
s.dim = (s.dim[1], s.dim[0]) # swap dims
s.data = self.data.transpose().copy()
return s
def ctranspose(self):
"""Hermitian conjugate."""
s = copy(self)
s.dim = (s.dim[1], s.dim[0]) # swap dims
s.data = np.conj(self.data).transpose() # view to a copy
return s
def __mul__(self, t):
"""Multiplication of lmaps by lmaps and scalars."""
# must be able to handle sparse data
if isinstance(t, lmap):
if self.dim[1] != t.dim[0]:
raise ValueError('The dimensions do not match.')
else:
s = copy(self)
s.dim = (self.dim[0], t.dim[1])
s.data = self.data.dot(t.data)
else:
# t is a scalar
s = copy(self)
s.data = self.data * t
return s
def __rmul__(self, t):
"""Multiplication of lmaps by scalars, reverse."""
# scalars commute, lmaps already handled by __mul__
return self.__mul__(t)
def __div__(self, t):
"""Division of lmaps by scalars from the right."""
s = copy(self)
s.data = self.data / t
return s
def __truediv__(self, t):
"""Division of lmaps by scalars from the right."""
s = copy(self)
s.data = self.data / t
return s
def __add__(self, t):
"""Addition of lmaps."""
if not self.is_compatible(t):
raise ValueError('The lmaps are not compatible.')
s = copy(self)
s.data = self.data + t.data
return s
def __sub__(self, t):
"""Subtraction of lmaps."""
if not self.is_compatible(t):
raise ValueError('The lmaps are not compatible.')
s = copy(self)
s.data = self.data - t.data
return s
def __pow__(self, n):
"""Exponentiation of lmaps by integer scalars."""
if self.dim[0] != self.dim[1]:
raise ValueError('The dimensions do not match.')
s = copy(self)
s.data = np.linalg.matrix_power(self.data, n)
return s
def __imul__(self, t):
"""In-place multiplication of lmaps by scalars from the right."""
self.data *= t
return self
def __itruediv__(self, t):
"""In-place division of lmaps by scalars from the right."""
self.data /= t
return self
def __iadd__(self, t):
"""In-place addition of lmaps."""
if not self.is_compatible(t):
raise ValueError('The lmaps are not compatible.')
self.data += t.data
return self
def __isub__(self, t):
"""In-place subtraction of lmaps."""
if not self.is_compatible(t):
raise ValueError('The lmaps are not compatible.')
self.data -= t.data
return self
def trace(self):
"""Trace of the lmap.
The trace is only properly defined if self.dim[0] == self.dim[1].
"""
if not np.array_equal(self.dim[0], self.dim[1]):
raise ValueError('Trace not defined for non-endomorphisms.')
return np.trace(self.data)
def norm(self):
"""Matrix norm of the lmap."""
return np.linalg.norm(self.data)
# subsystem ordering
def reorder(self, perm, inplace=False):
"""Change the relative order of the input and/or output subsystems.
Returns a copy of the lmap with permuted subsystem order.
A permutation can be either None (do nothing), a pair (a, b) of subsystems to be swapped,
or a tuple containing a full permutation of the subsystems.
Two subsystems to be swapped must be in decreasing order so as not
to mistake the full identity permutation (0, 1) for a swap.
reorder((None, (2, 1, 0))) ignore first index, reverse the order of subsystems in the second
reorder(((5, 2), None)) swap the subsystems 2 and 5 in the first index, ignore the second
NOTE: The full permutations are interpreted in the same sense as
numpy.transpose() understands them, i.e. the permutation
tuple is the new ordering of the old subsystem indices.
This is the inverse of the mathematically more common "one-line" notation.
"""
s = self._inplacer(inplace)
orig_d = s.data.shape # original dimensions
total_d = []
total_perm = []
last_used_index = 0
newdim = list(s.dim)
# loop over indices
for k, this_perm in enumerate(perm):
# avoid a subtle problem with the input syntax, (0, 1) must not be understood as swap!
if this_perm != None and tuple(this_perm) == (0, 1):
this_perm = None
# requested permutation for this index
if this_perm == None:
# no change
# let the dimensions vector be, lump all subsystems in this index into one
this_dim = (orig_d[k],)
this_perm = np.array([0])
this_n = 1
else:
this_dim = np.array(s.dim[k]) # subsystem dims
this_perm = np.array(this_perm) # requested permutation for this index
this_n = len(this_dim) # number of subsystems
temp = np.arange(this_n) # identity permutation
if len(this_perm) == 2:
# swap two subsystems
temp[this_perm] = this_perm[::-1]
this_perm = temp
else:
# full permutation
if len(set(temp) ^ set(this_perm)) != 0:
raise ValueError('Invalid permutation.')
# reorder the dimensions vector
newdim[k] = tuple(this_dim[this_perm])
# big-endian ordering
total_d.extend(this_dim)
total_perm.extend(last_used_index + this_perm)
last_used_index += this_n
# tensor into another tensor which has one index per subsystem, permute dimensions, back into a tensor with the original number of indices
s.dim = tuple(newdim)
s.data = s.data.reshape(total_d).transpose(total_perm).reshape(orig_d)
return s
def tensor(*arg):
"""Tensor product of lmaps."""
data = 1
dout = []
din = []
for k in arg:
# concatenate dimensions
dout += k.dim[0]
din += k.dim[1]
# kronecker product of the data
data = np.kron(data, k.data)
s = lmap(data, (tuple(dout), tuple(din)))
return s
| [
"vitomichele.leli@skoltech.ru"
] | vitomichele.leli@skoltech.ru |
6f165bd3430da5e23e11c2ec05a94fc5c3b910fc | d271deb821403207688d485d642f1dc8faf3502d | /aliens/settings.py | f434c0657354486f76f3e864518fe42904888fd5 | [] | no_license | Arturo0911/py_lab | 1e003ff4e0ebe8c7cd03f3580a1f301ab732dbe4 | 8d79f1218f9a240d6e75edf5092231e6cdc653cc | refs/heads/master | 2022-11-08T13:03:07.391121 | 2020-06-30T22:30:00 | 2020-06-30T22:30:00 | 276,221,886 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | class settings():
def __init__(self):
self.screen_width = 800
self.screen_height = 500
self.bg_color = (230,230,230) | [
"anegreiross@ooutlook.com"
] | anegreiross@ooutlook.com |
a45723ce8923e306d911a695fc641f343edcae0f | 8645e3c453a860c0afe6d64e1efaa6dbc19fa131 | /APIs/Heap/Heap.py | 34553c8d660b5968e92bb8e72ee938cb5866e3e3 | [] | no_license | vaiarrm/InterviewCodes | 614a61143e2d7498996f46cbf898252dc92538ef | 3b0ef3964e79b3f3c85d3cbeb9abb518f2c90f44 | refs/heads/master | 2021-01-12T14:06:40.306789 | 2016-11-04T17:13:05 | 2016-11-04T17:13:05 | 69,568,882 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,867 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Nov 1 11:03:13 2016
@author: vaibhavsharma
"""
from collections import deque
class BinHeap(object):
def __init__(self):
self.heapLst = deque()
self.heapLst.append(0)
self.size = 0
def insert(self,item):
self.heapLst.append(item)
self.size += 1
self.swim(len(self.heapLst)-1)
def swim(self,i):
while i // 2 > 0:
if self.heapLst[i] < self.heapLst[i//2]:
self.heapLst[i],self.heapLst[i//2] = self.heapLst[i//2],self.heapLst[i]
i = i // 2
else:
break
def sink(self,i):
#print self.heapLst, i,self.size,"in sink start"
while i*2 <= self.size:
if i*2 + 1 > self.size:
index = 2*i
elif self.heapLst[2*i] < self.heapLst[2*i+1]:
index = 2*i
else:
index = 2 * i+1
#print self.heapLst, self.size,index,"in sink"
if self.heapLst[i] > self.heapLst[index]:
self.heapLst[i],self.heapLst[index] = self.heapLst[index],self.heapLst[i]
i = index
#print self.heapLst, i,"in sink"
def minVal(self):
if self.size == 0:
raise ValueError()
self.heapLst[1],self.heapLst[len(self.heapLst)-1] = self.heapLst[len(self.heapLst)-1],self.heapLst[1]
toRet = self.heapLst.pop()
self.size -= 1
self.sink(1)
return toRet
def __str__(self):
return str(self.heapLst)
def __repr__(self):
return str(self.heapLst)
b = BinHeap()
b.insert(5)
print b
b.insert(10)
print b
b.insert(15)
print b
b.insert(1)
print b
print b.minVal()
#print b
print b.minVal()
#print b
print b.minVal()
#print b
print b.minVal()
#print b
| [
"vaibhav.s.sharma@outlook.com"
] | vaibhav.s.sharma@outlook.com |
d1ea3dfd849246836a3e8246235627cde6c0ee87 | 86cc17a69213569af670faed7ad531cb599b960d | /hunter24.py | 730ac3fef8a685c5e82de2b8e09948cbbdacc764 | [] | no_license | LakshmikanthRavi/guvi-lux | ed1c389e27a9ec62e0fd75c140322563f68d311a | 5c29f73903aa9adb6484c76103edf18ac165259e | refs/heads/master | 2020-04-15T05:07:19.743874 | 2019-08-13T08:53:00 | 2019-08-13T08:53:00 | 164,409,489 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | k,l=map(int,input().split())
g=list(map(int,input().split()))
fact=1
c=[]
for i in range(0,k-1):
for j in range(i+1,k):
if g[i]+g[j]==l:
c=1
break
if c==1:
print("YES")
else:
print("NO")
| [
"noreply@github.com"
] | LakshmikanthRavi.noreply@github.com |
6ddcfad6a35f76a927cb6e83b266b9c704622adc | b43e6ce94c3824db7cdf3df2fb286ee8c638b4d2 | /install.py | 6b56d05652f1286b646c85c4a3f8635e1db31e61 | [] | no_license | anyonecancode/dotfiles | 6b60f0c4523e5805a3aabe4fc69e083cbf22df3a | aed41b4a7faf8babc5c695a0166f088fd9a8f98f | refs/heads/master | 2020-03-28T19:01:51.102142 | 2017-01-23T20:24:11 | 2017-01-23T20:24:11 | 5,887,347 | 0 | 0 | null | 2017-01-23T20:24:12 | 2012-09-20T14:00:44 | Shell | UTF-8 | Python | false | false | 1,689 | py | #!/usr/bin/env python
from os import mkdir
from os import rename
from os import symlink
from os.path import expanduser
from os.path import islink
from os.path import exists
VERSION = '0.1.0'
DESC = 'Creates symlinks from the home directory to the dotfiles repo. TODO: ZSH stuff'
FORMAT = '%(asctime)s %(levelname)s %(message)s'
HOME = expanduser('~')
DIR = 'dotfiles'
BACKUPDIR = 'dotfiles_old'
files = ['vimrc', 'vim', 'zshrc', 'oh-my-zsh', 'irssi', 'tmux.conf', 'lynx.cfg', 'ackrc']
def main():
makeBackupDir()
filesToBackup = {}
filesToLink = {}
for f in files:
filePath = HOME + '/.' + f
if not exists(filePath):
filesToLink[f] = filePath
else:
if not islink(filePath):
filesToLink[f] = filePath
backupCurrent(f, filePath)
if (len(filesToLink) > 0):
fileList = ', .'.join(filesToLink)
print 'These files are not yet symlinked to your dotfiles: .' + fileList
print 'Moving them to ' + HOME + '/' + BACKUPDIR
print 'Creating symlinks to version in your dotfiles repo'
for key in filesToLink:
createSymlink(key, filesToLink[key])
print 'Done!'
else:
print 'All dotfiles already symlinked. Nothing to do.'
def makeBackupDir():
backup = HOME + '/' + BACKUPDIR
if not exists(backup):
print 'Creating backup directory ' + backup
mkdir(backup)
def backupCurrent(fileName, filePath):
dest = HOME + '/' + BACKUPDIR + '/.' + fileName
rename(filePath, dest)
def createSymlink(fileName, filePath):
src = HOME + '/' + DIR + '/' + fileName
dest = HOME + '/.' + fileName
symlink(src, dest)
if __name__ == '__main__':
main()
| [
"pschwei1@gmail.com"
] | pschwei1@gmail.com |
d7d6fd270e4c91f332031efd49b11ed83c277491 | 80bcb71937fdb99bd64bc554efaa65997b37ce33 | /articles/urls/base_urls.py | 88c9d2d8f33e59b3110216f3873f717f0508b9d7 | [
"MIT"
] | permissive | audiolion/tango-articles | b7454f57b7a2d1e3eec1c9aee71d9e37394cf38d | 79d574600e870e938f080eef8b028c038814cb25 | refs/heads/master | 2021-01-17T22:00:05.862668 | 2016-06-29T21:02:26 | 2016-06-29T21:02:26 | 62,227,072 | 0 | 0 | null | 2016-06-29T13:15:16 | 2016-06-29T13:15:16 | null | UTF-8 | Python | false | false | 366 | py |
from django.conf.urls import patterns, url
from django.views.generic import ListView
from articles.models import Destination
urlpatterns = patterns(
'',
url(
regex=r'^$',
view=ListView.as_view(
queryset=Destination.objects.all(),
template_name='articles/index.html'
),
name="article_index"
)
)
| [
"tim.baxter@cerner.com"
] | tim.baxter@cerner.com |
d61b8e01fe0e146724b52c034d77d2e5169c2257 | 268c6ea0b1e0a34547de6958fbc7af78cf6efeaf | /mosaicTest.py | ef090f993a8bd9adb14cb09fefc9a9c91931bf8f | [] | no_license | rpeng/makeup-hackathon | e3afc3e6f70eb3ce78399f0c028b23779f5627e1 | 1526097479ea9d4b168ba53adb3c04a5a0c7ae52 | refs/heads/master | 2021-01-18T14:22:22.780577 | 2014-05-20T04:17:03 | 2014-05-20T04:17:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,544 | py | from image_processing import process_image
from webmakeup import get_image_from_url
import glob
import os
#RefImagePath = "testing/image.jpeg"
RefImagePath = "testing/ericProf.jpg"
CpntImageDir = "testing/ericPhotos"
FileFormats = ["jpg", "jpeg", "gif", "bmp", "png"]
MosaicFilename = "eric"
def LoadRefImage():
return open(RefImagePath, "rb").read()
def LoadCpntImages():
cpntImageStreams = []
filenames = []
for fmt in FileFormats:
filenames.extend(glob.glob(CpntImageDir+'/*.'+fmt))
for file in filenames:
cpntImageStreams.append(open(file, "rb").read())
return cpntImageStreams
def Test():
refImageStream = LoadRefImage()
cpntImageStreams = LoadCpntImages()
mosaic = process_image(refImageStream, cpntImageStreams)
outFile = open(MosaicFilename+".jpg", "wb")
outFile.write(mosaic)
outFile.close()
def FacebookTest():
refImageStream = get_image_from_url(r'http://blogs-images.forbes.com/jonbruner/files/2011/07/facebook_logo.jpg')
cpntImageStreams = LoadCpntImages()
mosaic = process_image(refImageStream, cpntImageStreams)
outFile = open("facebook.jpg", "wb")
outFile.write(mosaic)
outFile.close()
def HackTest():
refImageStream = get_image_from_url(r'https://fbcdn-profile-a.akamaihd.net/hprofile-ak-snc6/211085_364348883660343_553562280_n.jpg')
cpntImageStreams = LoadCpntImages()
mosaic = process_image(refImageStream, cpntImageStreams)
outFile = open("hack.jpg", "wb")
outFile.write(mosaic)
outFile.close()
| [
"contact@eric-langlois.ca"
] | contact@eric-langlois.ca |
44074bfcef80010bf2d8c28e42fb99818a225bf7 | 72541f87379e3c69347abbebc626b1d2941f1604 | /RFID/Store/migrations/0001_initial.py | 09e984d316a6f256c616728aba134f169f7b86cf | [] | no_license | Siwadol0408/RFID-new- | 2c32473ecf65bb1d0ad6fc4963b481fc9914c124 | 7eecf1b8299cdc122dc64efbbf6a125badf8d822 | refs/heads/main | 2023-07-18T04:10:20.205418 | 2021-09-01T02:04:29 | 2021-09-01T02:04:29 | 401,900,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | # Generated by Django 3.2.6 on 2021-08-21 15:56
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Object',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tag_id', models.IntegerField(default=0)),
('object_name', models.CharField(max_length=200)),
('add_date', models.DateTimeField()),
('status', models.CharField(choices=[('อยู่', 'อยู่'), ('ไม่อยู่', 'ไม่อยู่')], default='อยู่', max_length=20)),
],
),
]
| [
"earthzamag@gmaail.com"
] | earthzamag@gmaail.com |
8057e4c12af3e4985c2ddbfb1319e1c914d49d16 | bae08323817c364e9fd1731ed2890be861e5aab3 | /evolutionary.py | a26fa70a97a43cb2cfb6f44b9c6d9d2881470f31 | [] | no_license | dvdalilue/qap_optimizations | 1bf1fbb782f09b6a63bf78c659ad31abbd2624ee | 525cc8c9a560f89d451835c62bec6f63678e7e8a | refs/heads/master | 2020-03-19T02:39:02.354676 | 2018-06-06T23:38:59 | 2018-06-06T23:38:59 | 135,648,038 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,999 | py | import random
import operator
import local_search as local
from solution import Solution
a_random = random.SystemRandom()
def crossover(parent_1, parent_2):
n = parent_1.n # == parent_2.n
offspring_1 = parent_1.copy()
offspring_2 = parent_2.copy()
unequal_facilities = [[],[]]
for i in xrange(0, n):
if offspring_1.permutation[i] != offspring_2.permutation[i]:
unequal_facilities[0].append(i)
unequal_facilities[1].append(i)
n_unequal = len(unequal_facilities[0]) # == len(unequal_facilities[1])
unequal_facilities[0] = a_random.sample(unequal_facilities[0], n_unequal)
unequal_facilities[1] = a_random.sample(unequal_facilities[1], n_unequal)
for i in xrange(0, n_unequal-1, 2):
# Assign random facilities to offspring 1
offspring_1.exchangeFacilities(
unequal_facilities[0][i],
unequal_facilities[0][i+1])
# Assign random facilities to offspring 2
offspring_2.exchangeFacilities(
unequal_facilities[1][i],
unequal_facilities[1][i+1])
return (offspring_1, offspring_2)
def crossover_mutant(parent_1, parent_2):
n = parent_1.n # == parent_2.n
offspring_1 = parent_1.copy()
offspring_2 = parent_2.copy()
mutant = parent_1.copy()
unequal_facilities = [[],[],[]]
for i in xrange(0, n):
if offspring_1.permutation[i] != offspring_2.permutation[i]:
unequal_facilities[0].append(i)
unequal_facilities[1].append(i)
n_unequal = len(unequal_facilities[0]) # == len(unequal_facilities[1])
unequal_facilities[0] = a_random.sample(unequal_facilities[0], n_unequal)
unequal_facilities[1] = a_random.sample(unequal_facilities[1], n_unequal)
unequal_facilities[2] = a_random.sample(unequal_facilities[1], n_unequal)
for i in xrange(0, n_unequal-1, 2):
# Assign random facilities to offspring 1
offspring_1.exchangeFacilities(
unequal_facilities[0][i],
unequal_facilities[0][i+1])
# Assign random facilities to offspring 2
offspring_2.exchangeFacilities(
unequal_facilities[1][i],
unequal_facilities[1][i+1])
# Assign random facilities to mutant
mutant.exchangeFacilities(
unequal_facilities[2][i],
unequal_facilities[2][i+1])
return (offspring_1, offspring_2, mutant)
def genetic(parents, generations, local_s):
n = len(parents)
gen_number = 0
while gen_number < generations:
new_generation = []
for i in xrange(0, n-1):
(of1, of2, mut) = crossover_mutant(parents[i],parents[i+1])
new_generation.append(of1)
new_generation.append(of2)
new_generation.append(mut)
map(local_s, new_generation)
new_generation.sort(key=operator.attrgetter('cost'))
parents = a_random.sample(new_generation[:n], n)
gen_number += 1
return parents | [
"dvdalilue@gmail.com"
] | dvdalilue@gmail.com |
a6bea6c892ecefd888c3dd10a0502aad8836ba84 | abacbf9798f089cd43fd50c2d577de50cca806d8 | /venv/Lib/site-packages/lux/extensions/auth/rest/user.py | f50a5de48ae9f86ae1d30a2a5ec70192bcb2d3fe | [] | no_license | Sarveshr49/ProInternSML | f2bfd82905dd185d82830d4758d69ee2b23f71fb | 2ac09e31ebe54dbecd46935818b089a4b8428354 | refs/heads/master | 2023-08-11T17:36:16.387236 | 2021-10-16T18:23:04 | 2021-10-16T18:23:04 | 373,503,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,292 | py | from pulsar import Http401, MethodNotAllowed
from lux.core import route, GET_HEAD
from lux.extensions.rest import RestRouter, RestField, user_permissions
from lux.forms import get_form_class
from . import RestModel
full_name = RestField(
'full_name',
displayName='Name',
field=('first_name', 'last_name', 'username', 'email')
)
class UserModel(RestModel):
authenticated = False
@classmethod
def create(cls, exclude=None, fields=None,
id_field='username',
repr_field='full_name',
authenticated=False,
**kw):
exclude = exclude or ('password',)
fields = list(fields or ())
fields.extend((
full_name,
RestField('groups', model='groups')
))
model = cls(
'user',
id_field=id_field,
repr_field=repr_field,
exclude=exclude,
fields=fields,
**kw
)
model.authenticated = authenticated
return model
def create_model(self, request, instance, data, session=None):
'''Override create model so that it calls the backend method
'''
return request.cache.auth_backend.create_user(request, **data)
def get_instance(self, request, *args, **kwargs):
"""When authenticated is True return the current user or
raise Http401
"""
if self.authenticated:
user = request.cache.user
if not user.is_authenticated():
raise Http401('Token')
return self.instance(user)
return super().get_instance(request, *args, **kwargs)
class UserRest(RestRouter):
"""Rest view for the authenticated user
Read, Updates and other update-type operations only
"""
model = UserModel.create(
url='user',
updateform='user-profile',
hidden=('id', 'oauth'),
exclude=('password', 'type'),
authenticated=True
)
def get(self, request):
"""Get the authenticated user
"""
user = self.model.get_instance(request)
data = self.model.tojson(request, user)
return self.json_response(request, data)
def patch(self, request):
"""Update authenticated user and/or user profile
"""
user = self.model.get_instance(request)
model = self.model
form_class = get_form_class(request, model.updateform)
if not form_class:
raise MethodNotAllowed
form = form_class(request, data=request.body_data())
if form.is_valid(exclude_missing=True):
user = model.update_model(request, user, form.cleaned_data)
data = model.tojson(request, user)
else:
data = form.tojson()
return self.json_response(request, data)
@route('permissions', method=['get', 'head', 'options'])
def get_permissions(self, request):
"""Check permissions the authenticated user has for a
given action.
"""
if request.method == 'OPTIONS':
request.app.fire('on_preflight', request, methods=GET_HEAD)
return request.response
permissions = user_permissions(request)
return self.json_response(request, permissions)
| [
"sarveshragade@gmail.com"
] | sarveshragade@gmail.com |
7fe2ce4f60cc0a49b416d9c0ba9f68755a39b821 | c20943fd460c1017fd9f4e291ab1d231d651ca43 | /venv/bin/pyrsa-sign | 3f64c6cde995f8ff26e208822d368f6837d708eb | [] | no_license | Free-apples/VisaCheckerWebsite | f107f1f1ce89e969ac18d32d13d2203d214d33c3 | 831f79f89dd392bd0c0c25f46b511c33774dae49 | refs/heads/main | 2023-02-21T22:40:57.648820 | 2021-01-26T21:59:45 | 2021-01-26T21:59:45 | 332,868,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | #!/Users/meganfreedman/PycharmProjects/pythonProject2/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from rsa.cli import sign
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(sign())
| [
"megan@freedman.co.nz"
] | megan@freedman.co.nz | |
1a7f5ba4ed1e47739729d838023ab286fd14b45f | 7f66a90e3034af7f1ff1b8e777e90912e4e8f30e | /ShortMyUrl/main/admin.py | b963a5f8417f6ea8e26e20ceedf32f67147230d1 | [] | no_license | Beketx/URLShortenerDRF | 73b62c8a6491a70857c56f433e880b77f6c23528 | db83218b65aca3a20d70b300c138aad87101eb0a | refs/heads/master | 2023-08-18T15:53:33.055281 | 2020-05-26T09:52:47 | 2020-05-26T09:52:47 | 260,903,383 | 0 | 0 | null | 2021-09-22T18:57:47 | 2020-05-03T11:59:38 | Python | UTF-8 | Python | false | false | 102 | py | from django.contrib import admin
from main.models import Model_Short
admin.site.register(Model_Short) | [
"beketsk@gmail.com"
] | beketsk@gmail.com |
dfd40b1a094a889a8b8b43dba66b85694fe27ee6 | 6519de5c5b92c55270fcd71262bc4187b9d7cfb5 | /hashblast.py | ac2e58513df51f6d441a79e9779f015d0e538747 | [] | no_license | kensorrells/HashBlast | 6801fa5611049e387ce7cdc2107f61e346d685b4 | 2da6e94eaf5040cc633e8d615ce8954d650eb859 | refs/heads/master | 2020-06-17T11:18:12.046868 | 2019-07-17T12:34:16 | 2019-07-17T12:34:16 | 195,908,411 | 0 | 0 | null | 2019-07-17T12:34:17 | 2019-07-09T01:16:59 | Python | UTF-8 | Python | false | false | 6,763 | py | #python3
import hashlib
#Character list for decryption
chrList = ['A','B','C','D','E','F','G','H','I','J','K','L','M','N',
'O','P','Q','R','S','T','U','V','W','X','Y','Z','a','b',
'c','d','e','f','g','h','i','j','k','l','m','n','o','p',
'q','r','s','t','u','v','w','x','y','z',' ','1','2','3',
'4','5','6','7','8','9','0']
#Variables for user input
prgFunction = ''
encryptType = ''
inputMsg = ''
#Variables for encryption
currentLetterNum = 0
currentLetter = ''
#Variable for decryption
alphaCounter = 0
conversionHolder = 0
block = ''
i = 0
x = 0
currentAlpha = 0
#Variable for output
finalMessage = ''
print('Would you like to encrypt or decrypt?')
prgFunction = input(str(':'))
if prgFunction == 'encrypt':
print('What form of encryption would you like to do?')
encryptType = input(str(':'))
print('What is the message?')
inputMsg = input(str(':'))
while currentLetterNum +1 <= len(inputMsg):
if encryptType =='md5':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.md5(currentLetter.encode()).hexdigest()
elif encryptType == 'sha1':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.sha1(currentLetter.encode()).hexdigest()
elif encryptType == 'sha224':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.sha224(currentLetter.encode()).hexdigest()
elif encryptType == 'sha256':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.sha256(currentLetter.encode()).hexdigest()
elif encryptType == 'sha384':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.sha384(currentLetter.encode()).hexdigest()
elif encryptType == 'sha512':
currentLetter = inputMsg[currentLetterNum]
currentLetter = hashlib.sha512(currentLetter.encode()).hexdigest()
else:
print('ERROR: Invalid Type')
break
finalMessage += currentLetter
currentLetterNum += 1
elif prgFunction == 'decrypt':
print('What format is your message encrypted in?')
encryptType = input(str(':'))
print('What is the secret code?')
inputMsg = input(str(':'))
while x <= len(inputMsg):
if encryptType == 'md5':
while i+x <= x+31:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.md5(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 32
if x >= len(inputMsg):
print(finalMessage)
elif encryptType == 'sha1':
while i+x <= x+39:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.sha1(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 40
if x >= len(inputMsg):
print(finalMessage)
elif encryptType == 'sha224':
while i+x <= x+55:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.sha224(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 56
if x >= len(inputMsg):
print(finalMessage)
elif encryptType == 'sha256':
while i+x <= x+63:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.sha256(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 64
if x >= len(inputMsg):
print(finalMessage)
elif encryptType == 'sha384':
while i+x <= x+95:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.sha224(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 96
if x >= len(inputMsg):
print(finalMessage)
elif encryptType == 'sha512':
while i+x <= x+127:
currentAlpha = x + i
block += inputMsg[currentAlpha]
i += 1
i = 0
while alphaCounter <= 62:
conversionHolder = chrList[alphaCounter]
conversionHolder = hashlib.sha256(conversionHolder.encode()).hexdigest()
if block == conversionHolder:
finalMessage += chrList[alphaCounter]
alphaCounter = 100
else:
alphaCounter += 1
alphaCounter = 0
block = ''
x += 128
if x >= len(inputMsg):
print(finalMessage)
else:
print('ERROR: Invalid program function')
print(finalMessage)
| [
"noreply@github.com"
] | kensorrells.noreply@github.com |
6ca5de7b034be586122402ff4c89be7c2acd2e45 | 152476469e822fbad4aae1698529caf8d401cb33 | /single-spiking/test.py | 1200a6a4ce43b2b47fcc4f8d3cd9b96f93ee042d | [] | no_license | Fulin-Wei/snn-seizure-prediction | 0c2a01d66bf3708a2b6514242297a69b709189ff | d031f1d9c5938674d4bb3aeef341f6fc76fc4b73 | refs/heads/master | 2022-02-07T07:38:08.119336 | 2019-07-19T18:36:33 | 2019-07-19T18:36:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,944 | py | import numpy as np
import sys
from brian2 import *
e_const = 2.718282
def TranslateJsonFile(filename, mode = 'data'):
rfile = open(filename,'r')
rfiledataset = eval(rfile.read())
rfile.close()
ansarr = []
loopnum = len(rfiledataset[0])
for i in range(loopnum):
arr = []
for j in range(4):
arr += rfiledataset[j][i]
ansarr.append(arr)
ansarr = np.array(ansarr)
return ansarr
start_scope()
tau = 20*ms
weight_file = open('weight.txt', 'r')
testing_file = 'traditional-testing.json'
dataset = TranslateJsonFile(testing_file, 'data')
num, input_num = dataset.shape
ansset = [0 for i in range(num//2)] + [1 for i in range(num - num//2)]
epoch = num
t_run = 100*ms
t_run_test = 100*ms
threshold_alpha = 2.5
gamma = 1
sigma = 0.5
kappa = -0.4
#=======================================
eqs = '''
dv/dt = ((e_const) ** (-1*ms/tau)- 1) * v / (1*ms) : 1
thrval : 1
'''
Ginput = PoissonGroup(input_num,dataset[0,:]/tau)
hidden_num = 10
Ghidden = NeuronGroup(hidden_num, model = eqs, threshold = 'v > thrval', reset = 'v -= thrval', method = 'exact')
Ghidden.thrval = threshold_alpha * np.sqrt(3 / input_num)
output_num = 2
Goutput = NeuronGroup(output_num, model = eqs, threshold = 'v > thrval', reset = 'v -= thrval', method = 'exact')
Goutput.thrval = threshold_alpha * np.sqrt(3 / hidden_num)
Sih = Synapses(Ginput, Ghidden, model = 'w : 1', on_pre = 'v_post += w')
Sih.connect(condition = True)
Sih.w = 2 * np.sqrt(3 / input_num) * np.random.random(input_num * hidden_num) - np.sqrt(3 / input_num)
Sho = Synapses(Ghidden, Goutput, model = 'w : 1', on_pre = 'v_post += w')
Sho.connect(condition = True)
Sho.w = 2 * np.sqrt(3 / hidden_num) * np.random.random(hidden_num * output_num) - np.sqrt(3 / hidden_num)
Shh = Synapses(Ghidden, Ghidden, model = 'w : 1', on_pre = 'v_post += w')
Shh.connect(condition = True)
Shh.w = kappa + np.zeros(hidden_num*hidden_num)
for i in range(hidden_num):
Shh.w[i + hidden_num*i] = 0
Soo = Synapses(Goutput, Goutput, model = 'w : 1', on_pre = 'v_post += w')
Soo.connect(condition = True)
Soo.w = kappa + np.zeros(output_num*output_num)
for i in range(output_num):
Shh.w[i + output_num*i] = 0
SpikeMinput = SpikeMonitor(Ginput, None, record = True)
StateMhidden = StateMonitor(Ghidden, 'v', record = True)
SpikeMhidden = SpikeMonitor(Ghidden, 'v', record = True)
StateMoutput = StateMonitor(Goutput, 'v', record = True)
SpikeMoutput = SpikeMonitor(Goutput, 'v', record = True)
wih = np.array(list(map(float, weight_file.readline().split(','))))
who = np.array(list(map(float, weight_file.readline().split(','))))
thidden = np.array(list(map(float, weight_file.readline().split(','))))
toutput = np.array(list(map(float, weight_file.readline().split(','))))
store()
delta_sum = 0
delta_cnt = 0
ans_correct = 0
for i in range(num):
restore()
Sih.w = wih
Sho.w = who
Ghidden.thrval = thidden
Goutput.thrval = toutput
# set test data
index = i
ta = dataset[index,:] / tau
Ginput.rates = ta
run(t_run_test)
spike_count = SpikeMoutput.count
if SpikeMoutput.num_spikes == 0:
print('%d : dead' %(i))
continue
else:
spike_count /= SpikeMoutput.num_spikes
ans = np.zeros(2)
ans[ansset[index]] = 1
if spike_count[0] > spike_count[1] and ans[0] == 1:
ans_correct += 1
elif spike_count[0] < spike_count[1] and ans[1] == 1:
ans_correct += 1
delta_output = spike_count - ans
print('%d : Error = ' %(i), abs(delta_output[0]))
delta_sum += abs(delta_output[0])
delta_cnt += 1
print('%d of %d samples are correct' %(ans_correct,epoch))
print('%d out of %d tries are dead' %(epoch-delta_cnt,epoch))
print('error rates are %f' %(delta_sum/delta_cnt))
| [
"noreply@github.com"
] | Fulin-Wei.noreply@github.com |
1ba09c146545ffdbffb7415629daec8ed063d961 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/11110005.py | ae4a0998f7cff16a635857ff1662ef26f4d93e53 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,054 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/11110005.py generated: Wed, 25 Jan 2017 15:25:18
#
# Event Type: 11110005
#
# ASCII decay Descriptor: {[B0 -> (tau- -> pi- pi- pi+ nu_tau) mu+]cc, [B_s0 -> (tau+ -> pi- pi+ pi+ anti-nu_tau ) mu-]cc}
#
from Gaudi.Configuration import *
importOptions( "$DECFILESROOT/options/B2XTau.py" )
from Configurables import Generation
Generation().EventType = 11110005
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bd_mutau,pipipinu=DecProdCut,TightCut,tauolacleo.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCbAndWithDaughAndBCuts"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 511,-511 ]
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
c0261aa02a69237783b528ccebe6f84a4bba754a | aca9b78a6fc46cf9c0a51be4c0caf694c7d3b4c6 | /PyCTP_Client/PyCTP_ClientCore/Utils.py | d569b32ab68f5416b65e166b21a41c7de4a24114 | [] | no_license | 15137359541/PyCTP-master | cd3842ef6a7dfe0f9abb888ee40ce5e53473318c | 417617c232cdb274c8dec4dbc80ed6e015b1affe | refs/heads/master | 2020-03-29T08:16:19.438755 | 2018-09-21T03:04:04 | 2018-09-21T03:04:04 | 149,702,241 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 23,206 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 20 08:46:13 2016
@author: YuWanying
"""
import os
import time
import DBManager
import chardet
import re
PyCTP_Trade_API_print = False # PyCTP_Trade_API类打印控制
Strategy_print = False # Strategy类打印控制
# 创建套接字,全局变量
socket_file_description = None
# 对CTP_API返回的dict结构内部的元素编码从bytes转换为utf-8,该方法也适用于单个变量的格式转换
def code_transform(data):
# 传入参数为list
if isinstance(data, list):
list_output = []
for i_dict in data:
if isinstance(i_dict, dict): # k是个dict
data_output = {}
for j_key in i_dict: # j是dict内部单个元素的key
data_output[j_key] = code_transform(i_dict[j_key])
list_output.append(data_output)
return list_output
# 传入参数为dict
elif isinstance(data, dict):
data_output = {}
for i in data:
data_output[i] = code_transform(data[i])
return data_output
# 传入参数为单个变量
elif isinstance(data, bytes):
# print(">>>Utils.conde_transform() data =", data, type(data))
return data.decode('gbk', 'ignore')
# return data.decode('gb18030')
# try:
# # data.decode('gbk')
# data.decode('utf-8')
# except:
# print(">>>Utils.conde_transform() data =", data, type(data))
# finally:
# # return data.decode('gbk')
# return data.decode('utf-8')
# return data.decode()
else:
return data
# 传入合约代码,返回品种代码,非品种代码则返回空字符串''
def extract_commodity_id(instrument_id):
if isinstance(instrument_id, str):
if re.match(r'[a-zA-Z][0-9]{3,4}$', instrument_id) is not None:
return instrument_id[:1]
elif re.match(r'[a-zA-Z][a-zA-Z][0-9]{3,4}$', instrument_id) is not None:
return instrument_id[:2]
else:
return ''
else:
return ''
# 打印主菜单
def print_menu():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号:')
print('|【qe】查询交易所信息')
print('|【qi】查询合约信息')
print('|【qa】查询账户信息')
print('|【qc】查询账户资金')
print('|【qp】查询账户持仓')
print('|【qo】查询委托记录')
print('|【qt】查询交易记录')
print('|【qm】查询行情')
print('|【sm】订阅行情')
print('|【i】报单')
print('|【a】撤单')
print('|【s】保存数据到本地')
print('|【e】退出')
print('===========================')
# 打印交易员登录、交易员登录
def print_select_admin_trader():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号')
print('|【1】管理员登录')
print('|【2】交易员登录')
print('|【q】退出')
print('===========================')
def print_select_trader_user_manager():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号')
print('|【1】交易员管理')
print('|【2】期货账户管理')
print('|【q】退出')
print('===========================')
# 打印管理员管理菜单,管理员权限
def print_trader_manager():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号')
print('|【1】查看交易员')
print('|【2】增加交易员')
print('|【3】删除交易员')
print('|【4】修改交易员')
print('|【q】退出')
print('===========================')
# 打印交易员管理菜单,管理员权限
def print_user_manager():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号')
print('|【1】查看期货账户')
print('|【2】增加期货账户')
print('|【3】删除期货账户')
print('|【4】修改期货账户')
print('|【q】退出')
print('===========================')
# 打印交易员一般操作菜单,非管理员权限
def print_trader_menu():
time.sleep(0.5)
print('===========================')
print('|请输入您的操作编号')
print('|【1】查看所有期货账户')
print('|【2】账户查询')
print('|【3】持仓查询')
print('|【4】报单查询')
print('|【5】成交查询')
print('|【6】报单')
print('|【7】撤单')
print('|【8】订阅行情')
print('|【9】退订行情')
print('|【10】创建交易策略')
print('|【11】修改交易策略')
print('|【12】删除交易策略')
print('|【13】查询交易策略')
print('|【q】退出')
print('===========================')
# 判断期货账号是否属于交易员名下
def trader_include_user(ctp_manager, trader_id, user_id):
if ctp_manager.get_mdb().get_user(user_id) is None:
print("trader_include_user()数据库中不存在该期货账号", user_id)
return None
for i in ctp_manager.get_list_user():
# print("i.get_user_id().decode() == v", i.get_user_id().decode(), v)
if i.get_user_id().decode() == user_id and i.get_trader_id().decode() == trader_id:
return i
# 人机交互
def gui(ctp_manager):
from PyCTP_Market import PyCTP_Market_API
import CTPManager
# ctp_manager.get_mdb() = DBManager.DBManger() # 在主程序之前已经创建了DBManager.DBManger()
while True:
print_select_admin_trader()
v = input()
if v == '1': # 管理员登录
print("请输入管理员账号")
v_admin_id = input()
print("请输入管理员密码")
v_password = input()
if not ctp_manager.get_mdb().check_admin(v_admin_id, v_password):
continue
time.sleep(0.5)
while True:
print_select_trader_user_manager()
v = input()
if v == '1': # 进入交易员管理
while True:
print_trader_manager()
v = input()
if v == '1': # 查看交易员
print("请输入要查看的交易员ID,查看所有请直接回车")
v = input()
print(ctp_manager.get_mdb().get_trader(v))
time.sleep(0.5)
elif v == '2': # 创建交易员
print("请输入交易员信息:{'trader_id': 'xxx', 'trader_name': 'xxx', 'password': 'xxx', 'is_active': '1'}")
try:
v = eval(input()) # 控制台输入的格式str转换为dict
ctp_manager.get_mdb().create_trader(v)
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
time.sleep(0.5)
elif v == '3': # 删除交易员
print("请输入交易员账号")
v = input()
ctp_manager.get_mdb().delete_trader(v)
time.sleep(0.5)
elif v == '4': # 修改交易员
print("请输入交易员信息:{'trader_id': 'xxx', 'trader_name': 'xxx', 'password': 'xxx', 'is_active': '1'}")
v = eval(input()) # 控制台输入的格式str转换为dict
ctp_manager.get_mdb().update_trader(v)
time.sleep(0.5)
elif v == 'q': # 退出
break
else:
print("输入错误,请重新输入")
time.sleep(0.5)
elif v == '2': # 进入期货账户管理
while True:
print_user_manager()
v = input()
if v == '1': # 查看期货账户
print("请输入要查看的期货账号,查看所有请直接回车")
v = input()
print(ctp_manager.get_mdb().get_user(v))
time.sleep(0.5)
elif v == '2': # 创建期货账户
print(
"请输入期货账户信息:{'trader_id': 'xxx', 'user_id': 'xxx', 'user_name': 'xxx', 'password': 'xxx', 'front_address': 'xxx'}")
try:
v = eval(input()) # 控制台输入的格式str转换为dict
ctp_manager.get_mdb().create_user(v)
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
time.sleep(0.5)
elif v == '3': # 删除期货账户
print("请输入期货账号")
v = input()
ctp_manager.get_mdb().delete_user(v)
time.sleep(0.5)
elif v == '4': # 修改期货账户
print(
"请输入期货账户信息:{'trader_id': 'xxx', 'user_id': 'xxx', 'user_name': 'xxx', 'password': 'xxx', 'front_address': 'xxx'}")
try:
v = eval(input()) # 控制台输入的格式str转换为dict
ctp_manager.get_mdb().update_user(v)
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
time.sleep(0.5)
elif v == 'q': # 退出
break
else:
print("输入错误,请重新输入")
time.sleep(0.5)
elif v == 'q': # 退出
break
else:
print("输入错误,请重新输入")
time.sleep(0.5)
elif v == '2': # 交易员登录
# 验证交易员账号密码
print("请输入交易员账号")
input_trader_id = input()
print("请输入交易员密码")
v_password = input()
if not ctp_manager.get_mdb().check_trader(input_trader_id, v_password):
continue
time.sleep(0.5)
# 将交易员登录日志信息写入到数据库集合TraderLoginLog
ctp_manager.get_mdb().update_trader_login_status(input_trader_id)
while True:
print_trader_menu() # 打印交易员操作菜单,非管理员权限
v = input()
if v == '1': # 查看交易员名下的所有期货账户
print(ctp_manager.get_mdb().get_user_id(input_trader_id)) # 传入参数为Trader类的实例
pass
elif v == '2': # 账户查询
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
print(obj_user.get_trade().QryTradingAccount())
continue
elif v == '3': # 持仓查询
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
print(obj_user.get_trade().QryInvestorPosition())
continue
elif v == '4': # 报单查询
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
print(code_transform(obj_user.get_trade().QryOrder()))
continue
elif v == '5': # 成交查询
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
print(code_transform(obj_user.get_trade().QryTrade()))
elif v == '6': # 报单
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
input_example = {'InstrumentID': b'cu1609',
'CombOffsetFlag': b'0',
'Direction': b'0',
'VolumeTotalOriginal': 2,
'LimitPrice': 39000.00,
'OrderRef': b'101',
'CombHedgeFlag': b'1'}
print("请输入报单参数,例:", input_example)
try:
input_order_insert = eval(input()) # 控制台输入的格式str转换为dict
obj_user.get_trade().OrderInsert(input_order_insert)
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
elif v == '7': # 撤单
print("请输入期货账号")
input_user_id = input()
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is not None:
input_example = {'ExchangeID': b'SHFE',
'OrderRef': b'101',
'OrderSysID': b' 46'}
print("请输入撤单参数,例:", input_example)
try:
input_order_insert = eval(input()) # 控制台输入的格式str转换为dict
obj_user.get_trade().OrderAction(input_order_insert)
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
elif v == '8': # 订阅行情
input_example = {'合约列表': [b'cu1610', b'cu1611'], 'user_id': '800658', 'strategy_id': '01'}
print("请输入订阅行情参数参数,例:", input_example)
input_arguments = input()
try:
print("input_arguments=", input_arguments)
# print("input_arguments['合约列表']=", input_arguments['合约列表'])
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
input_list_instrument_id = input_arguments['合约列表']
input_user_id = input_arguments['user_id']
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is None:
continue
input_user_id = input_arguments['user_id']
input_strategy_id = input_arguments['strategy_id']
ctp_manager.get_md().sub_market(input_list_instrument_id, input_user_id, input_strategy_id)
elif v == '9': # 退订行情
input_example = {'合约列表': [b'cu1610', b'cu1611'], 'user_id': '800658', 'strategy_id': '01'}
print("请输入退订行情参数,例:", input_example)
input_arguments = input()
try:
print("input_arguments=", input_arguments)
# print("input_arguments['合约列表']=", input_arguments['合约列表'])
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
input_list_instrument_id = input_arguments['合约列表']
input_user_id = input_arguments['user_id']
obj_user = trader_include_user(ctp_manager, input_trader_id, input_user_id)
if obj_user is None:
continue
input_user_id = input_arguments['user_id']
input_strategy_id = input_arguments['strategy_id']
ctp_manager.get_md().un_sub_market(input_list_instrument_id, input_user_id, input_strategy_id)
elif v == '10': # 创建交易策略
input_example = {'trader_id': '1601',
'user_id': '800658',
'strategy_id': '01',
'order_algorithm': '01',
'list_instrument_id': ['cu1611', 'cu1610']}
print("请输入创建策略的参数,例:", input_example)
input_arguments = input()
try:
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
ctp_manager.create_strategy(input_arguments)
ctp_manager.get_mdb().create_strategy(input_arguments)
elif v == '11': # 修改交易策略
input_example = {'trader_id': '1601',
'user_id': '800658',
'strategy_id': '01',
'order_algorithm': '01',
'list_instrument_id': ['cu1611', 'cu1610']}
print("请输入修改策略的参数,例:", input_example)
input_arguments = input()
try:
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
elif v == '12': # 删除交易策略
input_example = {'trader_id': '1601', 'user_id': '063802', 'strategy_id': '01'}
print("请输入删除策略的参数,例:", input_example)
input_arguments = input()
try:
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
# 调用管理类实例中删除strategy的方法
ctp_manager.delete_strategy(input_arguments)
# 从数据库删除该策略记录
ctp_manager.get_mdb().delete_strategy(input_arguments['user_id'], input_arguments['strategy_id'])
elif v == '13': # 查询交易策略
input_example1 = {} # 查询交易员名下所有的策略
input_example2 = {'user_id': '800658'} # 查询交易员名下的指定期货账户的所有策略
input_example3 = {'user_id': '800658', 'strategy_id': '01'} # 查询交易员名下指定期货账户指定交易策略
print("请输入查询策略的参数")
print("例:", input_example1, "查询交易员名下所有的策略")
print("例:", input_example2, "查询交易员名下指定期货账户的所有策略")
print("例:", input_example3, "查询交易员名下指定期货账户的指定交易策略")
input_arguments = input()
try:
input_arguments = eval(input_arguments) # 控制台输入的格式str转换为dict
except SyntaxError as e:
print("输入错误,请重新输入,错误信息:", e)
continue
input_arguments['trader_id'] = input_trader_id
output_v = ctp_manager.get_mdb().get_strategy(input_arguments)
if not output_v:
print("不存在交易策略")
else:
print("策略数量=", len(output_v))
print(output_v)
elif v == 'q': # 退出
break
else:
print("输入错误,请重新输入")
time.sleep(0.5)
elif v == 'q': # 退出
break
else:
print("输入错误,请重新输入")
time.sleep(0.5)
# 流文件路劲管理
def make_dirs(path):
is_exists = os.path.exists(path) # 判断路劲是否存在,存在True,不存在False
if not is_exists:
# print("make_dirs()文件路劲不存在,新创建,", path)
os.makedirs(path)
return True
else:
# print("make_dirs()文件路劲已存在,不用创建,", path)
return False
if __name__ == '__main__':
print(" cu1707", extract_commodity_id(' cu1707'))
print("IF17071", extract_commodity_id('IF17071'))
print("T1707", extract_commodity_id('T1707'))
print("T17071", extract_commodity_id('T17071'))
print("SR709", extract_commodity_id('SR709'))
print("SR1709", extract_commodity_id('SR1709'))
print("SR17091", extract_commodity_id('SR17091'))
print("i1709", extract_commodity_id('i1709'))
print("i17091", extract_commodity_id('i17091'))
print("i170912", extract_commodity_id('i170912'))
print("i17 1", extract_commodity_id('i17 1'))
print("i17&", extract_commodity_id('i17&')) | [
"1715338780@qq.com"
] | 1715338780@qq.com |
c9ea14a259c77d78f844e5eb3259582690b4fdba | 777a237f1728cdb4735a08021622ec0b49df218b | /assign3/test/compare_compression.py | 0d0cc73d044532f49957268dac35d3cb2d888e19 | [] | no_license | joshs333/cs1501 | 4a0e4be533e90a0fb3f3894a13518874dadd921d | 4b99811d84c8ad85937fdde3ddda96e8b7dcae65 | refs/heads/master | 2022-03-15T03:40:09.083798 | 2019-11-22T04:21:00 | 2019-11-22T04:21:00 | 216,253,179 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | #!/usr/bin/env python
import subprocess
import glob
import os
import shlex
import sys
if __name__ == "__main__":
files = [os.path.split(f)[1] for f in glob.glob("src/*")]
for file in files:
src = "src/%s"%(file)
comp = "comp/%s"%(file)
src_size = os.stat(src).st_size
comp_size = os.stat(comp).st_size
print("%s\t%3.2f\t%3.2f"%(file, src_size, comp_size))
| [
"joshs333@live.com"
] | joshs333@live.com |
4041b1110a3cf9e9b1683121a4a0e9906da47de9 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/bgp/bgppeerkeepalive1qtr.py | 6759bad5eb7c592428c5059694db794b13ec2576 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,383 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class BgpPeerKeepAlive1qtr(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.bgp.BgpPeerKeepAlive1qtr", "BGP Peer Keepalive")
counter = CounterMeta("keepaliveRcvd", CounterCategory.COUNTER, "packets", "Number of Keepalive Messages Received")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "keepaliveRcvdLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "keepaliveRcvdCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "keepaliveRcvdPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "keepaliveRcvdMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "keepaliveRcvdMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "keepaliveRcvdAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "keepaliveRcvdSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "keepaliveRcvdBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "keepaliveRcvdThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "keepaliveRcvdTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "keepaliveRcvdTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "keepaliveRcvdRate"
meta._counters.append(counter)
counter = CounterMeta("keepaliveSent", CounterCategory.COUNTER, "packets", "Number of Keepalive Messages Sent")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "keepaliveSentLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "keepaliveSentCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "keepaliveSentPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "keepaliveSentMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "keepaliveSentMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "keepaliveSentAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "keepaliveSentSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "keepaliveSentBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "keepaliveSentThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "keepaliveSentTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "keepaliveSentTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "keepaliveSentRate"
meta._counters.append(counter)
meta.moClassName = "bgpBgpPeerKeepAlive1qtr"
meta.rnFormat = "CDbgpBgpPeerKeepAlive1qtr"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current BGP Peer Keepalive stats in 1 quarter"
meta.writeAccessMask = 0x8008020040001
meta.readAccessMask = 0x8008020040001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.bgp.PeerEntryStats")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.bgp.BgpPeerKeepAlive")
meta.rnPrefixes = [
('CDbgpBgpPeerKeepAlive1qtr', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "keepaliveRcvdAvg", "keepaliveRcvdAvg", 53518, PropCategory.IMPLICIT_AVG)
prop.label = "Number of Keepalive Messages Received average value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdAvg", prop)
prop = PropMeta("str", "keepaliveRcvdBase", "keepaliveRcvdBase", 53513, PropCategory.IMPLICIT_BASELINE)
prop.label = "Number of Keepalive Messages Received baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdBase", prop)
prop = PropMeta("str", "keepaliveRcvdCum", "keepaliveRcvdCum", 53514, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Number of Keepalive Messages Received cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdCum", prop)
prop = PropMeta("str", "keepaliveRcvdLast", "keepaliveRcvdLast", 53512, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Number of Keepalive Messages Received current value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdLast", prop)
prop = PropMeta("str", "keepaliveRcvdMax", "keepaliveRcvdMax", 53517, PropCategory.IMPLICIT_MAX)
prop.label = "Number of Keepalive Messages Received maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdMax", prop)
prop = PropMeta("str", "keepaliveRcvdMin", "keepaliveRcvdMin", 53516, PropCategory.IMPLICIT_MIN)
prop.label = "Number of Keepalive Messages Received minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdMin", prop)
prop = PropMeta("str", "keepaliveRcvdPer", "keepaliveRcvdPer", 53515, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Number of Keepalive Messages Received periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdPer", prop)
prop = PropMeta("str", "keepaliveRcvdRate", "keepaliveRcvdRate", 53523, PropCategory.IMPLICIT_RATE)
prop.label = "Number of Keepalive Messages Received rate"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdRate", prop)
prop = PropMeta("str", "keepaliveRcvdSpct", "keepaliveRcvdSpct", 53519, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Number of Keepalive Messages Received suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdSpct", prop)
prop = PropMeta("str", "keepaliveRcvdThr", "keepaliveRcvdThr", 53520, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Number of Keepalive Messages Received thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("keepaliveRcvdThr", prop)
prop = PropMeta("str", "keepaliveRcvdTr", "keepaliveRcvdTr", 53522, PropCategory.IMPLICIT_TREND)
prop.label = "Number of Keepalive Messages Received trend"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdTr", prop)
prop = PropMeta("str", "keepaliveRcvdTrBase", "keepaliveRcvdTrBase", 53521, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Number of Keepalive Messages Received trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveRcvdTrBase", prop)
prop = PropMeta("str", "keepaliveSentAvg", "keepaliveSentAvg", 53539, PropCategory.IMPLICIT_AVG)
prop.label = "Number of Keepalive Messages Sent average value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentAvg", prop)
prop = PropMeta("str", "keepaliveSentBase", "keepaliveSentBase", 53534, PropCategory.IMPLICIT_BASELINE)
prop.label = "Number of Keepalive Messages Sent baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentBase", prop)
prop = PropMeta("str", "keepaliveSentCum", "keepaliveSentCum", 53535, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Number of Keepalive Messages Sent cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentCum", prop)
prop = PropMeta("str", "keepaliveSentLast", "keepaliveSentLast", 53533, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Number of Keepalive Messages Sent current value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentLast", prop)
prop = PropMeta("str", "keepaliveSentMax", "keepaliveSentMax", 53538, PropCategory.IMPLICIT_MAX)
prop.label = "Number of Keepalive Messages Sent maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentMax", prop)
prop = PropMeta("str", "keepaliveSentMin", "keepaliveSentMin", 53537, PropCategory.IMPLICIT_MIN)
prop.label = "Number of Keepalive Messages Sent minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentMin", prop)
prop = PropMeta("str", "keepaliveSentPer", "keepaliveSentPer", 53536, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Number of Keepalive Messages Sent periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentPer", prop)
prop = PropMeta("str", "keepaliveSentRate", "keepaliveSentRate", 53544, PropCategory.IMPLICIT_RATE)
prop.label = "Number of Keepalive Messages Sent rate"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentRate", prop)
prop = PropMeta("str", "keepaliveSentSpct", "keepaliveSentSpct", 53540, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Number of Keepalive Messages Sent suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentSpct", prop)
prop = PropMeta("str", "keepaliveSentThr", "keepaliveSentThr", 53541, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Number of Keepalive Messages Sent thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("keepaliveSentThr", prop)
prop = PropMeta("str", "keepaliveSentTr", "keepaliveSentTr", 53543, PropCategory.IMPLICIT_TREND)
prop.label = "Number of Keepalive Messages Sent trend"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentTr", prop)
prop = PropMeta("str", "keepaliveSentTrBase", "keepaliveSentTrBase", 53542, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Number of Keepalive Messages Sent trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("keepaliveSentTrBase", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
5f929a1ba13042372d1b5faccfcbda40cef3f219 | d452c00a98b085ae6248270843d587029775e9ca | /todo/todo/urls.py | 0cf6eaab54da3a13c56b96eb710f771c24c82f3b | [] | no_license | osmanshaon/todolist_django | 2313693476383abb38241367368259406f13d29a | 1af999f7e07040991912c032e5b5c15a9c97026e | refs/heads/main | 2023-02-07T14:29:06.055911 | 2021-01-03T00:25:34 | 2021-01-03T00:25:34 | 326,295,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | """todo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('tasks.urls'))
] | [
"noreply@github.com"
] | osmanshaon.noreply@github.com |
f8c1961002d5684aaac8b9146edcc8141815f3c3 | 006341ca12525aa0979d6101600e78c4bd9532ab | /CMS/Zope-3.2.1/Dependencies/zope.schema-Zope-3.2.1/zope.schema/tests/test_equality.py | 15ddc59bdbef08a1510e8324cf74ff18b85f5b80 | [
"ZPL-2.1",
"Python-2.0",
"ICU",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"ZPL-2.0"
] | permissive | germanfriday/code-examples-sandbox | d0f29e20a3eed1f8430d06441ac2d33bac5e4253 | 4c538584703754c956ca66392fdcecf0a0ca2314 | refs/heads/main | 2023-05-30T22:21:57.918503 | 2021-06-15T15:06:47 | 2021-06-15T15:06:47 | 377,200,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,183 | py | ##############################################################################
#
# Copyright (c) 2001, 2002 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Field equality tests
$Id: test_equality.py 26567 2004-07-16 06:58:27Z srichter $
"""
from unittest import TestCase, TestSuite, makeSuite
from zope.schema import Text, Int
class FieldEqualityTests(TestCase):
equality = [
'Text(title=u"Foo", description=u"Bar")',
'Int(title=u"Foo", description=u"Bar")',
]
def test_equality(self):
for text in self.equality:
self.assertEquals(eval(text), eval(text))
def test_suite():
return TestSuite(
[makeSuite(FieldEqualityTests)])
| [
"chris@thegermanfriday.com"
] | chris@thegermanfriday.com |
ecfad2b7a5f1e8fc3bac4be71079e14f1ede8d63 | e5a511e346f5be8a82fe9cb2edf457aa7e82859c | /PythonNEW/List/TwoListSimultaneously.py | 0dfa4f6fd4f78d93eaef150d9da5496a709b24a6 | [] | no_license | nekapoor7/Python-and-Django | 8397561c78e599abc8755887cbed39ebef8d27dc | 8fa4d15f4fa964634ad6a89bd4d8588aa045e24f | refs/heads/master | 2022-10-10T20:23:02.673600 | 2020-06-11T09:06:42 | 2020-06-11T09:06:42 | 257,163,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py | """Write a Python program to iterate over two lists simultaneously."""
l1 = list(input().split())
l2 = list(input().split())
for l1,l2 in zip(l1,l2):
print(l1,l2) | [
"neha.kapoor070789@gmail.com"
] | neha.kapoor070789@gmail.com |
60e3c2d1dbc4073fc7d1e547ae44adb9cae2aeeb | f65be296b831982b187cb3c3a1c82740fec15b5a | /ineco_bpe/purchase_requisition.py | 229d471699fc983da29a8663afeac5ff9902e655 | [] | no_license | nitikarnh/bpe_module | ab05af81f7dae10129ec584233423d4e5c3c7f3d | 6b1057495b277dc69023554d5d4e7bf172ba07c1 | refs/heads/master | 2020-05-21T16:40:05.291099 | 2017-10-24T09:11:01 | 2017-10-24T09:11:01 | 64,814,809 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,504 | py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 INECO Part., Ltd. (<http://www.ineco.co.th>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
import time
class ineco_job_type(osv.osv):
_name = 'ineco.job.type'
_description = "Job Type"
_columns = {
'name': fields.char('Description', size=128,required=True),
}
_sql_constraints = [
('name_unique', 'unique (name)', 'Job Type must be unique!')
]
class purchase_requisition(osv.osv):
def _get_purchase_order(self, cr, uid, ids, context=None):
result = {}
for po in self.pool.get('purchase.order').browse(cr, uid, ids, context=context):
result[po.requisition_id.id] = True
return result.keys()
def _get_requisition_line(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('purchase.requisition.line').browse(cr, uid, ids, context=context):
result[line.requisition_id.id] = True
return result.keys()
def _get_ready_product (self,cr,uid,ids,name,arg,context=None):
res = {}
for pr in self.browse(cr, uid, ids):
res[pr.id] = {
'rfq_ready': False
}
sql = """
select count(*) from purchase_requisition_line prl
where requisition_id = %s and (rfq_ready = False or rfq_ready is null)
"""
cr.execute(sql % (pr.id))
output = cr.fetchone()
if output and output[0] == 0.0:
if pr.state == 'cancel':
res[pr.id]['rfq_ready'] = False
else:
res[pr.id]['rfq_ready'] = True
else:
res[pr.id]['rfq_ready'] = False
return res
_inherit = "purchase.requisition"
_columns = {
'user_approve_id': fields.many2one('res.users','Approval By', required=True, track_visibility='onchange'),
'date_approve': fields.datetime('Date Approval', track_visibility='onchange'),
'user_checked_id': fields.many2one('res.users','Checked By', required=True, track_visibility='onchange'),
'date_checked': fields.datetime('Date Checked', track_visibility='onchange'),
'type_of_requirement': fields.selection([('normal','Normal'),('urgent','Urgent'),('shutdown','Shutdown')], 'Type of Requirement', required=True),
'additional_requirement_manual': fields.boolean('Manual'),
'additional_requirement_certificate': fields.boolean('Certificate'),
'additional_requirement_other': fields.boolean('Other'),
'additional_other': fields.char('Other',size=128),
'job_type_id': fields.many2one('ineco.job.type','Type of Order',required=True, track_visibility='onchange', ondelete='restrict'),
'rfq_ready': fields.function(_get_ready_product, method=True, type='boolean', string="RFQ Ready",
store={
'purchase.requisition': (lambda self, cr, uid, ids, c={}: ids, [], 10),
'purchase.requisition.line': (_get_requisition_line, [], 10),
'purchase.order': (_get_purchase_order, [], 10),
},
multi='_rfq_ready'),
}
_defaults = {
'additional_requirement_manual': False,
'additional_requirement_certificate': False,
'additional_requirement_other': False,
'type_of_requirement': 'normal',
'ordering_date': fields.date.context_today , #time.strftime('%Y-%m-%d'),
'name': '/',
}
_order = 'ordering_date desc, name desc'
def create(self, cr, uid, vals, context=None):
if vals.get('name','/')=='/':
vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'purchase.order.requisition') or '/'
vals['ordering_date'] = time.strftime("%Y-%m-%d")
context = dict(context or {}, mail_create_nolog=True)
order = super(purchase_requisition, self).create(cr, uid, vals, context=context)
#self.message_post(cr, uid, [order], body=_("RFQ created"), context=context)
return order
def copy(self, cr, uid, id, default=None, context=None):
if default is None:
default = {}
template = self.browse(cr, uid, id, context=context)
default['date_approve'] = False
default['date_checked'] = False
return super(purchase_requisition, self).copy(cr, uid, id, default=default, context=context)
def _prepare_purchase_order(self, cr, uid, requisition, supplier, context=None):
supplier_pricelist = supplier.property_product_pricelist_purchase
emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id','=',uid)])
employee = self.pool.get('hr.employee').browse(cr, uid, emp_ids)
user_checked_id = False
user_approve_id = False
if employee.parent_id and employee.parent_id.user_id :
user_approve_id = employee.parent_id.user_id.id
if employee.coach_id and employee.coach_id.user_id :
user_checked_id = employee.coach_id.user_id.id
return {
'name': self.pool.get('ir.sequence').get(cr, uid, 'purchase.order.temp'),
'origin': requisition.name,
'date_order': requisition.date_end or fields.datetime.now(),
'partner_id': supplier.id,
'pricelist_id': supplier_pricelist.id,
'currency_id': supplier_pricelist and supplier_pricelist.currency_id.id or requisition.company_id.currency_id.id,
'location_id': requisition.procurement_id and requisition.procurement_id.location_id.id or requisition.picking_type_id.default_location_dest_id.id,
'company_id': requisition.company_id.id,
'fiscal_position': supplier.property_account_position and supplier.property_account_position.id or False,
'requisition_id': requisition.id,
'notes': requisition.description,
'picking_type_id': requisition.picking_type_id.id,
'user_approve_id': user_approve_id,
'user_checked_id': user_checked_id,
'payment_term_id': supplier.property_supplier_payment_term and supplier.property_supplier_payment_term.id or False,
}
def _prepare_purchase_order_line(self, cr, uid, requisition, requisition_line, purchase_id, supplier, context=None):
if context is None:
context = {}
po_line_obj = self.pool.get('purchase.order.line')
product_uom = self.pool.get('product.uom')
product = requisition_line.product_id
default_uom_po_id = product.uom_po_id.id
ctx = context.copy()
ctx['tz'] = requisition.user_id.tz
date_order = requisition.ordering_date and fields.date.date_to_datetime(self, cr, uid, requisition.ordering_date, context=ctx) or fields.datetime.now()
qty = product_uom._compute_qty(cr, uid, requisition_line.product_uom_id.id, requisition_line.product_qty, default_uom_po_id)
supplier_pricelist = supplier.property_product_pricelist_purchase and supplier.property_product_pricelist_purchase.id or False
vals = po_line_obj.onchange_product_id(
cr, uid, [], supplier_pricelist, product.id, qty, default_uom_po_id,
supplier.id, date_order=date_order,
fiscal_position_id=supplier.property_account_position,
date_planned=requisition_line.schedule_date,
name=False, price_unit=False, state='draft', context=context)['value']
vals.update({
'order_id': purchase_id,
'product_id': product.id,
'account_analytic_id': requisition.account_analytic_id.id,
'name': requisition_line.note or '-',
})
return vals
def button_check(self,cr,uid,ids,context=None):
for pr in self.browse(cr,uid,ids):
pr.write({'user_checked_id': uid,'date_checked': time.strftime('%Y-%m-%d %H:%M:%S')})
def tender_reset(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'draft',
'date_approve': False,
'date_checked': False})
for p_id in ids:
# Deleting the existing instance of workflow for PO
self.delete_workflow(cr, uid, [p_id])
self.create_workflow(cr, uid, [p_id])
return True
#Approve PR
def tender_in_progress(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'in_progress', 'user_approve_id': uid, 'date_approve': time.strftime('%Y-%m-%d %H:%M:%S')}, context=context)
def tender_open(self, cr, uid, ids, context=None):
for data in self.browse(cr, uid, ids):
if not data.purchase_ids:
raise osv.except_osv('Warning!', 'You not have any RFQ or Purchase Order.')
return self.write(cr, uid, ids, {'state': 'open'}, context=context)
def onchange_user_id_old(self, cr, uid, ids, user_id, context=None):
""" Changes UoM and name if product_id changes.
@param user_id: User
@return: Dictionary of changed values
"""
value = {'user_approve_id': False,'user_checked_id': False}
group = self.pool.get('res.groups').browse(cr, uid, [54])
domain_approve_ids = [x.id for x in group.users]
domain_approve_ids.remove(1)
domain = {}
if user_id:
emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id','=',user_id)])
employee = self.pool.get('hr.employee').browse(cr, uid, emp_ids)
if employee.parent_id and employee.parent_id.user_id :
value.update({'user_approve_id': employee.parent_id.user_id.id })
if employee.coach_id and employee.coach_id.user_id :
value.update({'user_checked_id': employee.coach_id.user_id.id })
if employee.department_id:
domain = {'account_analytic_id': ['|','|',('department_id', '=', employee.department_id.id),
('parent_id.department_id','=', employee.department_id.id),
('project','=',True),('close','=',False)],
'user_approve_id': [('id','in',domain_approve_ids)]}
return {'value': value, 'domain': domain}
def onchange_user_id(self, cr, uid, ids, user_id, context=None):
""" Changes UoM and name if product_id changes.
@param user_id: User
@return: Dictionary of changed values
"""
value = {'user_approve_id': False,'user_checked_id': False}
group = self.pool.get('res.groups').browse(cr, uid, [54])
domain_approve_ids = [x.id for x in group.users]
domain_approve_ids.remove(1)
domain_check_ids = []
domain = {}
if user_id:
emp_ids = self.pool.get('hr.employee').search(cr, uid, [('user_id','=',user_id)])
employee = self.pool.get('hr.employee').browse(cr, uid, emp_ids)
domain_approve_ids = []
if employee.parent_id and employee.parent_id.user_id :
value.update({'user_approve_id': employee.parent_id.user_id.id })
domain_approve_ids.append(employee.parent_id.user_id.id)
domain_check_ids.append(employee.parent_id.user_id.id)
if employee.coach_id and employee.coach_id.user_id :
value.update({'user_checked_id': employee.coach_id.user_id.id })
domain_check_ids.append(employee.coach_id.user_id.id)
if employee.department_id:
domain = {'account_analytic_id': ['|','&','&',('department_id', '=', employee.department_id.id),('project','=',True),('close','=',False),
'&','&',('department_id', '=', False),('project','=',True),('close','=',False)] ,
'user_approve_id': [('id','in',domain_approve_ids)],
'user_checked_id': [('id','in',domain_check_ids)],
}
return {'value': value, 'domain': domain}
def generate_po(self, cr, uid, ids, context=None):
"""
Generate all purchase order based on selected lines, should only be called on one tender at a time
"""
po = self.pool.get('purchase.order')
poline = self.pool.get('purchase.order.line')
id_per_supplier = {}
for tender in self.browse(cr, uid, ids, context=context):
if tender.state == 'done':
raise osv.except_osv(_('Warning!'), _('You have already generate the purchase order(s).'))
confirm = False
#check that we have at least confirm one line
for po_line in tender.po_line_ids:
#Change This Line
if po_line.state not in ['cancel'] :
confirm = True
break
if not confirm:
raise osv.except_osv(_('Warning!'), _('You have no line selected for buying.'))
#check for complete RFQ
for quotation in tender.purchase_ids:
if (self.check_valid_quotation(cr, uid, quotation, context=context)):
#use workflow to set PO state to confirm
po.signal_workflow(cr, uid, [quotation.id], 'purchase_confirm')
#get other confirmed lines per supplier
for po_line in tender.po_line_ids:
#only take into account confirmed line that does not belong to already confirmed purchase order
if po_line.state == 'confirmed' and po_line.order_id.state in ['draft', 'sent', 'bid']:
if id_per_supplier.get(po_line.partner_id.id):
id_per_supplier[po_line.partner_id.id].append(po_line)
else:
id_per_supplier[po_line.partner_id.id] = [po_line]
#generate po based on supplier and cancel all previous RFQ
ctx = dict(context or {}, force_requisition_id=True)
for supplier, product_line in id_per_supplier.items():
#copy a quotation for this supplier and change order_line then validate it
quotation_id = po.search(cr, uid, [('requisition_id', '=', tender.id), ('partner_id', '=', supplier)], limit=1)[0]
vals = self._prepare_po_from_tender(cr, uid, tender, context=context)
new_po = po.copy(cr, uid, quotation_id, default=vals, context=context)
#duplicate po_line and change product_qty if needed and associate them to newly created PO
for line in product_line:
vals = self._prepare_po_line_from_tender(cr, uid, tender, line, new_po, context=context)
poline.copy(cr, uid, line.id, default=vals, context=context)
#use workflow to set new PO state to confirm
po.signal_workflow(cr, uid, [new_po], 'purchase_confirm')
#cancel other orders
self.cancel_unconfirmed_quotations(cr, uid, tender, context=context)
#set tender to state done
self.signal_workflow(cr, uid, [tender.id], 'done')
return True
class purchase_requisition_line(osv.osv):
def _get_ready_product (self,cr,uid,ids,name,arg,context=None):
res = {}
for line in self.browse(cr, uid, ids):
res[line.id] = {
'rfq_ready': False
}
if line.product_id:
sql = """
select product_id from purchase_order po
join purchase_order_line pol on po.id = pol.order_id
where requisition_id = %s and product_id = %s and po.state not in ('cancel')
"""
cr.execute(sql % (line.requisition_id.id, line.product_id.id))
output = cr.fetchone()
if output and output[0]:
res[line.id]['rfq_ready'] = True
else:
res[line.id]['rfq_ready'] = False
return res
def _get_purchase_order(self, cr, uid, ids, context=None):
result = {}
for po in self.pool.get('purchase.order').browse(cr, uid, ids, context=context):
for line in po.requisition_id.line_ids:
result[line.id] = True
return result.keys()
def _get_requisition(self, cr, uid, ids, context=None):
result = {}
for pr in self.pool.get('purchase.requisition').browse(cr, uid, ids, context=context):
for line in pr.line_ids:
result[line.id] = True
return result.keys()
_inherit = "purchase.requisition.line"
_description = "Purchase Requisition Line"
_columns = {
'cost': fields.float('Price Unit', digits=(12,4)),
'note': fields.char('Note', size=254),
'rfq_ready': fields.function(_get_ready_product, method=True, type='boolean', string="RFQ Ready",
store={
'purchase.requisition.line': (lambda self, cr, uid, ids, c={}: ids, [], 10),
'purchase.requisition': (_get_requisition, [], 10),
'purchase.order': (_get_purchase_order, [], 10),
},
multi='_rfq_ready'),
}
_defaults = {
'cost': 1.0000,
'note': False,
}
def onchange_product_id(self, cr, uid, ids, product_id, product_uom_id, parent_analytic_account, analytic_account, parent_date, date, context=None):
""" Changes UoM and name if product_id changes.
@param name: Name of the field
@param product_id: Changed product_id
@return: Dictionary of changed values
"""
value = {'product_uom_id': ''}
domain = {}
if product_id:
prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
value = {'product_uom_id': prod.uom_id.id, 'product_qty': 1.0,'cost': prod.standard_price or 0.0}
domain = {'product_uom_id': [('category_id','=',prod.uom_id.category_id.id)]}
if not analytic_account:
value.update({'account_analytic_id': parent_analytic_account})
if not date:
value.update({'schedule_date': parent_date})
return {'value': value,'domain':domain}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"thitithup@gmail.com"
] | thitithup@gmail.com |
7a270ed4e75773a84514a7764d32ef29877cf62d | 6ad7476e5375af9f76bd062816561b5d2179ce65 | /日常练习/ex4.py | 6730cd42df9899e85f105ceb12eb8145e1d0392f | [] | no_license | on195594/python | 252893a6ef4d4fa386f27c05e01e64af98c6e63f | 7e78344caf92f1a44305e430cc1da12a4a1a6b96 | refs/heads/master | 2021-05-10T18:52:46.568185 | 2018-01-20T02:32:34 | 2018-01-20T02:32:34 | 118,136,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 550 | py | cars = 100
space_in_a_car = 4
drivers = 30
passengers = 90
cars_not_driven = cars - drivers
cars_driven = drivers
carpool_capacity = cars_driven * space_in_a_car
average_passengers_per_car = passengers / cars_driven
print('There are',cars,'cars available.')
print('There are only',drivers,'drivers available.')
print('There will be',cars_not_driven,'empty cars today.')
print('We can transport',carpool_capacity,'people today.')
print('We have',passengers,'to carpoll today.')
print('We need to put about',average_passengers_per_car,'in each car.')
| [
"on195594@yahoo.com"
] | on195594@yahoo.com |
9609f6507779e812c1f898f8fd28a67585677e35 | 8e7a897cf27e470197edb2043b18e6e34692c523 | /scripts/drug_scraper.py | 9c0856eb4b8cbb6ddfe67d6fb8eda36594f238d0 | [] | no_license | peggybustamante/python-samples | 3541710b179aa35806ff3ab7d91b4186c86d4f55 | a0e677398966b0837ce9a3335776706d87d15787 | refs/heads/master | 2016-09-03T07:27:41.653259 | 2015-01-06T01:03:21 | 2015-01-06T01:03:21 | 28,839,877 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,593 | py | #!/usr/bin/env python
"""
scraper for Health Widget FDA recalls:
http://www.fda.gov/AJAX/DRUGS
"""
import urllib
import json
import sys
from BeautifulSoup import BeautifulSoup
fda_list = []
URL = 'http://www.fda.gov/AJAX/DRUGS/'
try:
web_cnx = urllib.urlopen(URL)
html = web_cnx.read()
# sys.exit()
except IOError:
print "Error: can\'t find file or read data"
sys.exit()
else:
print "Written content in the file successfully"
# Parse the HTML into a form that's easy to use
soup = BeautifulSoup(html)
table = soup.find(id='Drugs')
rows = table.findAll('tr')
headers = rows[0].findAll('th')
# Extract the column names and add them to a list
columns = []
for header in headers:
columns.append(header.text)
text_file = open("../data/drug_recalls.json", "w")
text_file.write('{"success":{"total":24808,"results":')
for row in rows[1:]:
# Extract data points from the table row
data = row.findAll('td')
# Pluck out the text of each field and store in a separate variable
recall_date = data[0].text
prodname = data[1].text
prodname = prodname.replace(" ","")
recall_url = data[1].find('a')['href']
description = data[2].text
description = description.replace(" ","")
reason = data[3].text
company = data[4].text
#drop into dictionary
fda_list.append({'recall_date':recall_date,'prodname':prodname,'recall_url':recall_url,'description':description,'reason':reason,'company':company})
#turn into json object and write to file
print>>text_file,json.dumps(fda_list)
text_file.write('}}')
text_file.close()
| [
"peggybustamante@Peggys-MacBook-Air.local"
] | peggybustamante@Peggys-MacBook-Air.local |
5a31f4f2368cf80d1e54ce6cde3cc4df5db704c4 | db5a2adf2da8efe6aae6b6d8f93e085f12b2a986 | /portfolio/urls.py | 1f39c9941b639bba16ce0f510b7d20d2498cf250 | [] | no_license | DmitriiGrekov/portfolio_backend | cea951968551004277f7f677758d890e77ed5225 | 977f1cd3245da0dbac8787a774f286a1248b815f | refs/heads/master | 2023-08-16T00:26:55.093400 | 2021-10-23T17:58:43 | 2021-10-23T17:58:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 350 | py | from django.contrib import admin
from django.urls import path
from django.urls.conf import include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include('main.urls')),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | [
"grekovdima7@gmail.com"
] | grekovdima7@gmail.com |
e77767f3b34ac0cceb230f1f9fe183fc12d42c3e | 377c0b5fe3434013a7026d750c6891135c3e1227 | /test/test_methods.py | ba2f5c0a17ff703db6946d718c186f282ad8def9 | [] | no_license | nahimilega/testing_mirror | 80736264e2b7ec37ef197efbcf798100673b7b57 | 80b30a451d8e2e2c3bd7c4dbd11999d27b71356a | refs/heads/master | 2023-07-11T03:57:30.300455 | 2021-08-12T13:08:28 | 2021-08-12T13:17:40 | 396,765,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 616 | py | from test_utils import perform_test
import os
if __name__ == '__main__':
parms = {
'method': ['1', '2', '3', '4', '5', '6'],
'verbose': ['3']
}
ref_file_name = "../ref/test_methods.ref"
test_name = 'test_methods'
field_to_compare = ['unfold']
perform_test(parms, ref_file_name, test_name, field_to_compare)
## For plotting
command_str = "../build/RooUnfoldTest ploterrors=2"
os.system(command_str)
command_str = "../build/RooUnfoldTest ploterrors=1"
os.system(command_str)
command_str = "../build/RooUnfoldTest plotparms=2"
os.system(command_str) | [
"archit18221@iiitd.ac.in"
] | archit18221@iiitd.ac.in |
5e52c4ff136799858643059ebd31b635207d611c | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/279/83542/submittedfiles/testes.py | 3a38cac1f44b845560cd1e275ba2832493e38db4 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 378 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
while (True) :
while(True) :
n=int(input("digite um numero inteiro positivo"))
if (n>=0) :
break
f=1
for i in range (2,n+1,1) :
f *= i
print("%d!= %d"%(n,f))
opt=input('deseja continuar?[s ou n]')
if(opt=='n'):
print('\n\nate breve')
break
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
5fb99139e8655db68ecfcb3c5bc147bf2c229044 | 38869340d12b858113df4005537a07efc45545f1 | /test_multiplier.py | a760af21333e9ec286e88b62a55a88f13195c871 | [
"MIT"
] | permissive | beepscore/argparse | c765c345f334d84d61feea99cfb11557ade3e40e | 6eeba617bcc263d85030bb24dd3e2f9253d741c8 | refs/heads/master | 2021-01-01T15:30:09.950746 | 2013-04-21T20:04:32 | 2013-04-21T20:04:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,198 | py | #!/usr/bin/env python3
import multiplier
import unittest
class TestMultiplier(unittest.TestCase):
multiplicand_index = 0
multiplier_index = 1
expected_result_index = 2
test_datas = [
[0,0,0],
[1,0,0],
[0,1,0],
[1,1,1],
[2,1,2],
[1,2,2],
[2,3,6],
[6,7,42],
[-6,7,-42],
[12,-5,-60],
]
def setUp(self):
pass
def test_multiply(self):
for test_data in self.test_datas:
# module multiplier, class Multiplier, method multiply(a, b)
result = multiplier.Multiplier.multiply(test_data[self.multiplicand_index],
test_data[self.multiplier_index])
self.assertEqual(test_data[self.expected_result_index], result,
'multiply({}, {}) expected {} but got {}'.format(test_data[self.multiplicand_index],
test_data[self.multiplier_index],
test_data[self.expected_result_index],
result))
def test_multiply_iterative(self):
for test_data in self.test_datas:
# module multiplier, class Multiplier, method multiply_iterative(a, b)
result = multiplier.Multiplier.multiply_iterative(test_data[self.multiplicand_index],
test_data[self.multiplier_index])
self.assertEqual(test_data[self.expected_result_index], result,
'multiply_iterative({}, {}) expected {} but got {}'.format(test_data[self.multiplicand_index],
test_data[self.multiplier_index],
test_data[self.expected_result_index],
result))
if __name__ == "__main__": unittest.main()
| [
"support@beepscore.com"
] | support@beepscore.com |
fdeb50ed56695c6bec4052990f11f0eed7e19b01 | e2c1665c4a13c66bcc79c4bf4bff21e591e8b6da | /review/urls.py | 72812fb0c0742b86099def7de243d936b618f20a | [] | no_license | kim-yejin20/13-watchandchill-backend | 22d557b8ff799dbb481d3eba8bb5c4f4150a927a | 25ddb5705a0db48c16886856374431148da79d09 | refs/heads/main | 2023-01-06T08:33:33.862892 | 2020-11-02T01:48:43 | 2020-11-02T01:48:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | from django.urls import path
from .views import (
ReviewView,
StarRatingView,
MovieRatingView
)
urlpatterns = [
path('/<int:movie_id>',MovieRatingView.as_view()),
path('/rating', StarRatingView.as_view()),
path('/get', ReviewView.as_view())
]
| [
"jin11241124@gmail.com"
] | jin11241124@gmail.com |
ba71c968ef470a11726fa3259b5e4cc50eb6c3cb | 9d7bf169e2604c009b76bf8667d69dae616a7fba | /ProbeSearch.spec | 889ab4f4b1251f2d776abb2b0f0454085c89c38d | [
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-other-permissive"
] | permissive | earthinversion/ProbeSearch-Desktop-Application | 80fbedb49bfda29bb058b040714d61fd1dba228b | ae08caa1360196ed87c1c6fd810a437e20996aae | refs/heads/master | 2023-03-17T07:21:45.943982 | 2021-03-28T05:06:03 | 2021-03-28T05:06:03 | 350,330,028 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,136 | spec | # -*- mode: python ; coding: utf-8 -*-
import sys ; sys.setrecursionlimit(sys.getrecursionlimit() * 5)
block_cipher = None
a = Analysis(['probeSearch.py'],
pathex=['/Users/utpalkumar50/Downloads/ProbeSearchTest'],
binaries=[('/System/Library/Frameworks/Tk.framework/Tk', 'tk'), ('/System/Library/Frameworks/Tcl.framework/Tcl', 'tcl')],
datas=[('icons/*.svg', 'icons/.'), ('*.ui', '.'), ('*.yml', '.')],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='ProbeSearch',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True , icon='icons/myicon.ico')
| [
"utpalkumar50@gmail.com"
] | utpalkumar50@gmail.com |
b6770849ca9f3fe85e001992391269fccaf5f00a | a714ebd86f3b33d848d1b31010b66e298251c44b | /legacy/bin/bin/upgrade_ms_python_language_server.py | bf7e08e0f64ec9bd60cf59483dfe6a448f9fc619 | [] | no_license | lbolla/dotfiles | 6257c415cc0e211c897cf7ed6fe5f212700b0244 | 176be09f3c4bc99dfc90a9d8a568a27314cb19bd | refs/heads/master | 2023-08-17T17:25:53.722263 | 2023-08-15T15:36:41 | 2023-08-15T15:36:41 | 67,174 | 8 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,775 | py | #!/usr/bin/env python3
from packaging import version
import re
import os
import shutil
import subprocess
import tempfile
from urllib.request import urlopen
from urllib.parse import urlencode
import xml.etree.ElementTree as ET
VERSION_RE = re.compile(r'^.+(\d+\.\d+\.\d+)\.nupkg$')
base_url = 'https://pvsc.blob.core.windows.net/python-language-server-stable'
params = {
'restype': 'container',
'comp': 'list',
'prefix': 'Python-Language-Server-linux-x64',
}
url = f'{base_url}?{urlencode(params)}'
with urlopen(url) as rs:
body = rs.read()
root = ET.fromstring(body)
blob_urls = sorted([
n.text for n in root.findall('./Blobs/Blob/Url') if n.text])
versions = []
for blob_url in blob_urls:
match = VERSION_RE.match(blob_url)
if match:
v = version.parse(match.groups()[0])
versions.append((v, blob_url))
latest_url = sorted(versions, reverse=True)[0][1]
latest_fname = latest_url.rsplit('/', 1)[1]
fname = os.path.join(tempfile.gettempdir(), latest_fname)
if not os.path.exists(fname):
print('Downloading', latest_url)
subprocess.check_call(
['curl', '-O', latest_url], cwd=tempfile.gettempdir())
destdir = os.path.join(tempfile.gettempdir(), 'mspyls')
if os.path.exists(destdir):
print('Clearing', destdir)
shutil.rmtree(destdir)
print('Extracting', fname)
subprocess.check_call(['unzip', '-d', destdir, fname])
print('Fixing permissions')
subprocess.check_call([
'chmod', '+x', os.path.join(destdir, 'Microsoft.Python.LanguageServer')])
print('Install')
if os.path.exists('/opt/mspyls'):
subprocess.check_call(['sudo', 'mv', '/opt/mspyls', '/opt/mspyls-old'])
subprocess.check_call(['sudo', 'mv', '-f', destdir, '/opt'])
print('Installed', fname)
| [
"lbolla@gmail.com"
] | lbolla@gmail.com |
3919dc1e448884fa1d00738b2ce3c698a89f2f7a | c3ca05c5569393b10f8ebf735884291646bac1e8 | /snippets/snippets/settings.py | 0ceb7c46f472b06b236db4bddcd296158619264f | [] | no_license | weeksghost/snippets | 318582bd8a9fa7603c4997fe25620170dc93377b | 99e118109779cde3b5c3d057baba34dbf3188cc6 | refs/heads/master | 2021-01-10T01:36:31.848065 | 2016-11-02T21:41:31 | 2016-11-02T21:41:31 | 36,667,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,900 | py | """
Django settings for snippets project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'b(srdp)_6o8i)3n8kxkdtkg-r)j!vxx6lr2k-_n!o%4&c&j)ws'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django_extensions',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'snippets.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'snippets.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
import warnings
import exceptions
warnings.filterwarnings('ignore',
category=exceptions.RuntimeWarning,
module='django.db.backends.sqlite3.base',
lineno=57)
| [
"emarty@broadway.com"
] | emarty@broadway.com |
679ca5decf66912c6107ac392fa5ffc6ae1957ae | e2e16c3854f4881f80d020f3b2f690104d207b70 | /online_dict_server.py | b03a0a8aa4abfe4a0620140cf1d46fa5180d1252 | [] | no_license | hjj194535/online_dict | 3b060c143d56bfbac3b86a0c44f200c2edcaa9e1 | c924b38d8a1d2a19a6aecfb4099f72a9ec24717e | refs/heads/master | 2020-07-27T19:02:18.771637 | 2019-09-18T10:42:00 | 2019-09-18T10:42:00 | 209,191,407 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,335 | py | from socket import *
from multiprocessing import Process
import signal,sys
from time import sleep
from online_dict.dict_db import *
ADDR = ('127.0.0.1',8888)
class Server:
def __init__(self,sockfd):
self.sockfd = sockfd
self.user = User()
def do_listen(self):
self.sockfd.setsockopt(SOL_SOCKET,SO_REUSEADDR,1)
self.sockfd.bind(ADDR)
self.sockfd.listen(5)
def res_msg(self,res,c):
if res:
c.send(b'Succ')
else:
c.send(b'fail')
def request(self,c):
while True:
data = c.recv(4086).decode()
print(data)
data_list = data.split(' ')
if not data or data_list[0] == 'C':
c.close()
sys.exit()
break
elif data_list[0] == 'R':
res = self.user.do_register(data_list[1],data_list[2])
self.res_msg(res,c)
elif data_list[0] == 'L':
res = self.user.do_login(data_list[1],data_list[2])
self.res_msg(res,c)
elif data_list[0] == 'Q':
res = self.user.do_query(data_list[1],data_list[2])
if res:
msg = 'Succ %s'%res
c.send(msg.encode())
else:
c.send(b'fail')
elif data_list[0] == 'H':
print(data_list)
name = data_list[1]
res = self.user.get_history(name)
for i in res:
msg = "%s %-16s %s"%i
c.send(msg.encode())
sleep(1)
c.send(b'##')
#大家网络
def main():
s = socket()
server = Server(s)
server.do_listen()
#处理僵尸进程
signal.signal(signal.SIGCHLD,signal.SIG_IGN)
#循环等待客户端连接
print("Listen the port 8888")
while True:
try:
c,addr = s.accept()
print("Connect from",addr)
except KeyboardInterrupt:
server.user.db_close()
sys.exit('服务端退出')
except Exception as e:
print(e)
continue
#创建子进程
p = Process(target=server.request,args=(c,))
p.start()
if __name__ == '__main__':
main()
| [
"670890875@qq.com"
] | 670890875@qq.com |
6a9d533176f0afcae6ab99714b965d09410152aa | 5950bc5239d294a88bcbfe221280799245afb1bd | /salary_paid_problem.py | 2fca0c37aca1468b14dc80b8d7aa8303a9de00b2 | [] | no_license | YaswanthKumarKaja/Janani_Swaroopa | ec738571324362a908368f46f29a17ea43c909eb | 48f5cc0a81525571c07d912ff53882445a58906e | refs/heads/master | 2022-11-18T09:06:59.925880 | 2020-07-14T16:58:36 | 2020-07-14T16:58:36 | 279,643,233 | 0 | 0 | null | 2020-07-14T16:54:02 | 2020-07-14T16:54:01 | null | UTF-8 | Python | false | false | 2,748 | py | '''Taxation Woes
In a country, there are N slabs for Income tax which are common for all age groups and genders. As an income tax officer, investigating a case,
you have the amount of tax paid by each employee of an organization.
Considering the income tax slabs and rebates offered, you need to find the total amount paid by the organization in salaries to the employees
to match it with the amount reported by the organization in its filed Income tax Returns.
Information regarding the income tax slabs, rebate amount and the income tax paid by each employee of the organization will be provided.
Rebate amount is subtracted from the total salary of each employee. Tax is calculated on the remaining amount. You need to calculate the sum
of total salary paid to the employees in that year.
Constraints
Number of tax slabs = Number of percentage on tax slabs
0<= Rebate, tax paid, slab <=1000000
Input Format
First Line will provide the Amount in each slab, separate by space (' ')
Second Line will provide the percentage of tax applied on each slab. Number of values in this line will be same as that in line one, separate by space (' ')
Third Line will provide the Rebate considered
Fourth line will provide the tax paid by each employee, separate by space (' ')
Output
Total Salary paid by the organization to its employees
Example Input
300000 600000 900000
10 20 30
100000
90000 150000 210000 300000
Output
5300000
Explanation
Slabs and tax percentage indicate that for salary:
Between 0 - 300000, tax is 0%
Between 300001 - 600000, tax is 10%
Between 600001 - 900000, tax is 20%
Greater than 900001, tax is 30%
First, we exclude the rebate from the salary of each employee. This will be the taxable component of salary.
Upon, taxable salary apply the slab and tax percentage logic. Upon computation, one finds that employees
are paid amounts 1000000, 1200000, 1400000, 1700000 respectively, as salaries
. So, the total salary paid to all employees in that year will be 5300000.'''
slab=list(map(int,input().split()))
per=list(map(int,input().split()))
rebate=int(input())
emp_intax=list(map(int,input().split()))
total=0
emp_sal=[0]*len(emp_intax)
for i in range(len(emp_intax)):
emp_sal[i]+=slab[0]
emp_tax=emp_intax[i]
for j in range(1,len(slab)):
max_slab_tax=(slab[j]-slab[j-1])*per[j-1]/100
if max_slab_tax<=emp_tax:
emp_sal[i]+=(slab[j]-slab[j-1])
emp_tax-=max_slab_tax
else:
curr_slab = emp_tax*100/per[j-1]
emp_sal[i]+=curr_slab
emp_tax-=curr_slab
if emp_tax>0:
emp_sal[i]+=emp_tax*100/per[-1]
total+=emp_sal[i]+rebate
print(int(total))
| [
"noreply@github.com"
] | YaswanthKumarKaja.noreply@github.com |
cc0040785639bb20fd84f983bd3f29829efec656 | 238aa46846a84ac62c0d8a3d9996778ac31165a4 | /NumPy/learningNumPy.py | 4e0fb0ec8a47f11e0844417e9e267f4d6f5257c6 | [
"MIT"
] | permissive | Ryanho84/Data_Analysis_by_python | 13871c964c419dd9e72d5eab3831844ab793494f | fa167d81aaacaab80c79f5ac6b18c1d24c3810e2 | refs/heads/master | 2020-07-04T14:16:04.345041 | 2019-09-04T10:12:27 | 2019-09-04T10:12:27 | 202,309,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,819 | py | """
Numpy是python语言的一个扩展程序库,支持大量的维度数组
和矩阵运算,此外也针对数组运算提供大量的数学函数库。
* 一个强大的N维数组对象ndarray
* 广播功能函数
* 整合C/C++/Fortran代码的工具
* 线性代数、傅里叶变换、随机数生成等
"""
"""
numpy.array:
numpy.array(object, dtype = None, copy = True,
order = None, subok = False, ndmin = 0)
object:数组或嵌套的数列
dtype:数组元素的数据类型,可选
copy:对象是否需要复制,可选
order:创建数组的样式,C为行方向,F为列方向,A为任意方向(default)
subok:默认返回一个与基类类型一致的数组
ndmin:指定生成数组的最小维度
dtype :
数据类型对象用来描述与数组对应的内存区域如何使用,依赖于下面几个方面:
* 数据的类型:整数/浮点数/...
* 数据的大小:不同类型的数据对应的内存字节数
* 数据的字节顺序:小端法/大端法
* 在结构化类型情况下:字段的名称、每个字段的数据类型和每个字段所取得内存块得部分
* 如果数据类型是子数组,它得形状和数据类型
numpy.dtype(object, align, copy)
object : 要转换为的数据类型对象
align : 如果为true,填充字段使其类似C得结构体
copy : 复制dtype对象,如果为false,则是对内置数据类型对象得引用
numpy.asarray:
numpy.asarray(a, dtype = None, order = None)
a : 任意形式的输入参数,可以是列表、元组、元组的元组、多维数组等
dtype : 数据类型
order : C/F
"""
#coding=utf-8
import numpy as np
from numpy import *
print("========dnarray object==========")
a = np.array([1,2,3])
b = np.array([[1,2,3],[4,5,6],[7,8,9]])
#对b中的[1,1]元素复制为10.下标从0开始
b[1,1] = 10
"""
Error:index is out of bounds for axis 0 with size 3
b[3,4] = 100
"""
"""
Error : index is out of bounds for axis 1 with size 3
b[2,4] = 100
"""
c = np.array([1,2,3,4,5], ndmin = 2)
d = np.array([1,2,3], dtype = complex)
e = np.array([[1,2,3],[4,5,6],[9,6,4]],dtype = float)
print(a.shape) #输出(3,) 前一个表示一个线性数组中元素的个数 后一个表示维数,即有多少个axis
print(b.shape)
print(a.dtype)
print(b)
print(c) #注意输出的是[[1,2,3,4,5]] 这是一个二维数组
print(d)
print(e)
print("=========dtype object==========")
#使用标量类型
dt1 = np.dtype(np.complex)
f = np.array([1,2,3], dtype = dt1)
print(f)
print(dt1)
"""
int8,int16,int32,int64四种数据类型可以使用字符串'i1' 'i2' 'i4' 'i8'代替
同理:
uint8/16/32/64可以用'u1/2/4/8'代替
float16/32/64可以用'f2/4/8'代替
complex64/128可以用'c8/16'代替
每一个内建类型都有一个唯一定义它得字符代码:
b bool
i int
u uint
f float
c complex
m timedelta
M datetime
O python Object
S,a array
U Unicode
V void
"""
dt2 = np.dtype('i8')
print(dt2)
"""
>/<字节顺序标注
>表示采用大端法存储(高位组放最前面)
<表示采用小端法存储(低位组放最前面)
"""
dt3 = np.dtype('>i4')
print(dt3)
g = np.array([1,2,3], dtype = dt3)
print(g)
"""
结构化数类型,即结构体
"""
dt4 = np.dtype([('age', np.int8)])
print(dt4)
h = np.array([(10,),(20,),(30,)],dtype = dt4)
print(h)
#类型字段名可以同于存取实际得age列
print(h['age'])
student = np.dtype([('name','S20'),('age', 'i1'),('marks','f4')])
print(student)
i = np.array([('abc',21,50),('xyz',18,75),('opq',22,80)],dtype = student)
print(i)
print('==========ndarray attribute=========')
"""
Numpy数组的维数成为秩rank,一维数组秩为1,二维数组秩为2
每一个线性的数组成为一个轴axis,也就是维度。 轴的数量就是秩
对于一个二维数组,有两个维度,就是有两个轴,可以声明axis
axis = 0,表示沿着第0轴进行操作,即对每一列进行操作
axis = 1,表示沿着第1轴进行操作,即对每一行进行操作
ndarray.ndim 秩,维度的数量
ndarray.shape 数组的维度,对于二维数组(矩阵),显示n行m列
ndarray.size 数组元素的总个数,相当于n*m
ndarray.dtype 对象数组类型
ndarray.itemsize 对象中每个元素的大小,以字节为单位
ndarray.flags 对象的内存信息
ndarray.real 元素的实部
ndarray.imag 元素的虚部
ndarray.data 一般不用
"""
#ndarray.ndim 秩,即轴的数量或维度的数量
j = np.arange(24)
print(j.ndim)
print(j)
j1 = j.reshape(2,4,3) #2*4*3 = 24
print(j1.ndim)
print(j1)
#ndarray.shape 数组的维度,返回一个元组,这个元组的长度就是维度的数目,即ndim
k = np.array([[1,2,3],[4,5,6]])
print(k)
print(k.shape)
#reshape 调整数组的维度和大小
l = np.array([[1,2,3],[4,5,6]])
l1 = l.reshape(3,2)
print(l)
print(l1)
#ndarray.itemsize 以字节形式返回数组中每个元素的大小
m = np.array([1,2,3,4,5], dtype = np.int8)
print(m.itemsize)
m1 = np.array([1,2,3,4,5], dtype = np.float64)
print(m1.itemsize)
"""
student = np.dtype([('name','S20'),('age', 'i1'),('marks','f4')])
i = np.array([('abc',21,50),('xyz',18,75),('opq',22,80)],dtype = student)
"""
print(i.itemsize)
"""
ndarray.flags:
C_CONTIGUOUS(C) 数据是在一个单一的C风格的连续段中
F_CONTIGUOUS(F) 数据是在一个单一的Fortran风格的连续段中
OWNDATA(O) 数据拥有它所使用的内存或从另一个对象中借用他
WRITEABLE(W) 数据区域可以被写入,将该值设置为flase。则数据为只读
ALIGNED(A) 数据和所有元素都适当地对齐到硬件上
UPDATEIFCOPY(U) 这个数组是其他数组的一个副本,当这个数组被释放时,原数组地内容将被更新
"""
n = np.array([1,2,3,4,5])
print(n.flags)
print("==========create array===========")
"""
numpy.empty(shape, dtype, order = 'C')
shape : 数组维度
dtype : 数据类型
order : C/F, C代表行优先, F代表列优先, 这些指在计算机内存中的存储元素的顺序
"""
#数组元素结果为随机数,未初始化
o = np.empty((3,2), dtype = int)
print(o)
"""
numpy.zeros(shape, dtype, order = 'C')
"""
#创建shape大小的数组,数组元素用0填充
#默认为浮点数
p = np.zeros(5)
print(p)
p1 = np.zeros((5,), dtype = np.int)
print(p1)
p2 = np.zeros((2,2),dtype = [('x', 'i4'), ('y','i4')])
print(p2)
"""
numpy.ones(shape, dtype, order = 'C')
"""
#创建shape大小的数组,数组元素用1填充
q = np.ones(5)
print(q)
q1 = np.ones((2,2), dtype = int)
print(q1)
"""
random.randn(size) 创建服从X-N(0,1)的正态分布随机数组
size可以是1维的 m
2维的 m,n
3维的 m,n,o
...
"""
r = random.randn(2,3,4)
print(r)
"""
randint([low,high], size)
创建[low, high]范围之间的size大小的数组
"""
s = random.randint(100,200,(3,3))
print(s)
print("============create array using exist array============")
t = [1,2,3]
#将列表转换为ndarray
t1 = np.asarray(t)
print(t1)
#将元组转换为ndarray
t2 = (1,2,3)
t3 = np.asarray(t2)
print(t3)
#将元组列表转化为ndarray
t4 = [(1,2,3),(4,5,6),(5,6,9)]
t5 = np.asarray(t4)
print(t5)
"""
Error:setting an array element with a sequence, 矩阵的列没有对齐,需要将没对齐的数据补齐
t6 = [[1,2,3],[4,5,],[7,8,9]]
t7 = np.asarray(t6, dtype = float)
"""
#并指定类型
t6 = np.asarray(t4, dtype = float)
print(t6)
print("====the difference between array and asarray====")
"""
1. 参数个数不同, array最多5个参数, asarray最多三个参数
2. array和asarray都可以将结构数据转化为ndarray,但是主要区别就是当数据源是ndarray时,
array仍然会copy出一个副本,占用新的内存,但asarray不会。
"""
#example 1:
print("example 1 :")
data1=[[1,1,1],[1,1,1],[1,1,1]]
arr2=np.array(data1)
arr3=np.asarray(data1)
data1[1][1]=2
print('data1:\n',data1)
print('arr2:\n',arr2)
print('arr3:\n',arr3)
#example 2:
print("example 2 :")
arr1=np.ones((3,3))
arr2=np.array(arr1)
arr3=np.asarray(arr1)
arr1[1]=2
print('arr1:\n',arr1)
print('arr2:\n',arr2)
print('arr3:\n',arr3)
print("===========create array from range===========")
"""
numpy.arange(start, stop, step, dtype):
创建数值范围并返回ndarray对象
start : 起始值,默认为0
stop : 终止值
step : 步长,默认为1
dtype : 数据类型
似乎只能创建一维向量
"""
#[0, 5)
u = np.arange(5)
print(u)
u1 = np.arange(5, dtype = float)
print(u1)
u2 = np.arange(10, 20, 2, dtype = complex)
print(u2)
"""
numpy.linspace(start, stop, num = 50, endpoint = True, retstep = False, dtype = None)
用于创建一个一维数组,数组是由一个等差数列构成的
start : 序列的起始值
stop : 序列的终止指,如果endpoint为true,则该值包含在数列中,即决定是半开半闭还是全闭区间
num : 要生成的等步长的样本数量,默认为50
endpoint : 该值为true时,数列中包含stop,否则不包含,默认为true
retstep : 如果为true,生成的数组会显示间距,否则不显示
dtype : 数据类型
"""
v = np.linspace(1, 10, 10)
print(v)
v1 = np.linspace(1, 1, 10)
print(v1)
v2 = np.linspace(10, 20, 6, endpoint = False)
print(v2)
v3 = np.linspace(1, 10, 10, retstep = True)
print(v3)
v4 = np.linspace(1, 10, 10).reshape((10, 1))
print(v4)
"""
numpy.logspace(start, stop, num = 50, endpoint = True, base = 10.0, dtype = None)
创建一个等比数列,并返回一个ndarray对象
start : 序列的初始值为base^start
stop : 序列的终止值为base^stop
num : 要生成的等步长的样本数量,默认为50
endpoint : 同linspace()
base : 对数log的底数
dtype : 数据类型
"""
w = np.logspace(1.0, 2.0, num = 20)
print(w)
#区间[2^0, 2^9],num = 10的等比数列
w1 = np.logspace(0, 9, 10, base = 2)
print(w1)
print("===============slice and index=============")
"""
ndarray可以通过索引或切片来访问和修改,操作与python中的list一样
ndarray可以基于0 - n的下标进行索引,切片可以通过slice函数,并设置start
, stop, step参数进行
"""
x = np.arange(10, dtype = 'i1')
#slice(start, stop, step)
xs = slice(2, 7, 2)
print(x[xs])
#或者直接使用[start:stop:step]进行操作
xs1 = x[2:7:2]
print(xs1)
"""
[n], 返回该索引对应的单个元素
[n:], 返回从n起始的所有元素
[n:m], 返回从n到m的所有元素
[n:m:s], 返回从n到m, 间隔为s的所有元素
"""
x2 = np.arange(100) #[0,1,2,3,...,99]
x2_6 = x2[6] #返回下标为6的元素
print(x2_6)
x2_2_ = x2[2:] #返回从2开始的所有元素
print(x2_2_)
x2_2_5 = x2[2:5] #返回2到4的值,默认step = 1
print(x2_2_5)
"""
多为数组也可以切割
"""
x3 = np.array([[1,2,3], [3,4,5], [4,5,6]])
print(x3)
#print("从数组索引a[1:]初开始切割 :")
print(x3[1:]) #从数组的数组来理解 | [
"ryanho2013@163.com"
] | ryanho2013@163.com |
85ee619c75fe4e001557036234067afc4b23a9db | 991c70026dc6c2f1c1a026f0036b1fcc48b7dea6 | /solution/__main__.py | 54a54c9134533922eeda8f1b6d351fd88c6e9633 | [] | no_license | boostcampaitech2/image-classification-level1-08 | 1cdd74ea7bb75e84b31f5623d7cca52abfc3ace9 | 909375589e0ed76a6aea9e7ad912ff4bc379f6ff | refs/heads/master | 2023-07-21T10:27:31.052436 | 2021-09-04T15:16:14 | 2021-09-04T15:16:14 | 397,467,509 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,239 | py | import sys
from importlib import import_module
from argparse import ArgumentParser
HUGGING_RUN_SCRIPTS = {
"image_classification": "run_image_classification",
"mask_image_modeling": "run_mask_image_model",
"test_module": "run_test_module"
}
JISOO_RUN_SCRIPTS = {
"train": "train",
"test": "test",
}
CNN_ENGINE_RUN_SCRIPTS = {
"train": "train",
}
MOON_ENGINE_RUN_SCRIPTS = {
"train": "train",
}
LIBRARY_MAP = {
"hugging": HUGGING_RUN_SCRIPTS,
"jisoo": JISOO_RUN_SCRIPTS,
"cnn_engine": CNN_ENGINE_RUN_SCRIPTS,
"moon": MOON_ENGINE_RUN_SCRIPTS,
}
def main(args: ArgumentParser):
script_list = LIBRARY_MAP.get(args.module, None)
if script_list is None:
raise AttributeError
module = import_module(args.module)
script_name = script_list.get(args.script, None)
script = getattr(module, script_name)
sys.argv = sys.argv[-2:]
if hasattr(script, 'main'):
script.main()
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('-m', '--module', default='hugging')
parser.add_argument('-s', '--script', default='image_classification')
parser.add_argument('-c', '--config')
args = parser.parse_args()
main(args)
| [
"jinmang2@gmail.com"
] | jinmang2@gmail.com |
cea5677ccbb0d2267bdf0008be9c86c843017b59 | daa8ed992d97d74f4c2deaff6a8951e24c12cf69 | /lab_vgg16/models/resnet_PW.py | dc85e426fc3ecc746dcca4fd11a538cce2b7aad2 | [] | no_license | SpeagleYao/Symmetry-Property | 280f75c46d4aa53fda15ea4af04df2b5ac20051b | 94090b4086892a9e92f69dcbdc56d0ab620c715d | refs/heads/master | 2023-03-08T19:29:13.179054 | 2021-02-26T06:15:48 | 2021-02-26T06:15:48 | 341,729,857 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,874 | py | '''ResNet in PyTorch.
For Pre-activation ResNet, see 'preact_resnet.py'.
Reference:
[1] Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun
Deep Residual Learning for Image Recognition. arXiv:1512.03385
'''
import torch
import torch.nn as nn
from torch.nn import Parameter
import torch.nn.functional as F
class FeatLinear(nn.Module):
def __init__(self, in_features, out_features):
super(FeatLinear, self).__init__()
self.in_features = in_features
self.out_features = out_features
self.weight = Parameter(torch.Tensor(in_features, out_features))
self.bias = Parameter(torch.Tensor(out_features))
nn.init.xavier_uniform_(self.weight)
nn.init.uniform_(self.bias)
def forward(self, input):
w = self.weight
b = self.bias
y = input.mm(w) + b
return w, y
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(
in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3,
stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion *
planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion*planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion*planes:
self.shortcut = nn.Sequential(
nn.Conv2d(in_planes, self.expansion*planes,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(self.expansion*planes)
)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class PWResNet(nn.Module):
def __init__(self, block, num_blocks, num_classes=10):
super(PWResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3,
stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = FeatLinear(512*block.expansion, num_classes)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1]*(num_blocks-1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
feature = out.view(out.size(0), -1)
w, out = self.linear(feature)
return w, out
def PWResNet18():
return PWResNet(BasicBlock, [2, 2, 2, 2])
def PWResNet34():
return PWResNet(BasicBlock, [3, 4, 6, 3])
def PWResNet50():
return PWResNet(Bottleneck, [3, 4, 6, 3])
def PWResNet101():
return PWResNet(Bottleneck, [3, 4, 23, 3])
def PWResNet152():
return PWResNet(Bottleneck, [3, 8, 36, 3])
if __name__=='__main__':
net = PWResNet34()
feature, y = net(torch.randn(1, 3, 32, 32))
print(feature.size())
print(y.size())
| [
"SpeagleYao@sjtu.edu.cn"
] | SpeagleYao@sjtu.edu.cn |
da8575710a6aa43ee73d564d78fc3d5d2c62f28a | 284e3b6e4302e3542edaef473bfd50322244bedf | /boj/1065.py | 1d9ae82eba744dd37fb58f165d9cf2a75fec3bbc | [] | no_license | storykim/problem-solving | 790fe0e877c7378fa62ebf5dca66c38997a06c34 | ed24772c85cee531634593b32f93de91ee558484 | refs/heads/master | 2021-01-07T09:53:19.540372 | 2020-03-07T12:43:18 | 2020-03-07T12:43:18 | 241,655,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 304 | py | def is_han(num):
if num < 100: return True
diff = num % 10 - (num // 10) % 10
while num > 9:
if num % 10 - (num // 10) % 10 != diff:
return False
num //= 10
return True
count = 0
for i in range(1, int(input()) + 1):
if is_han(i): count += 1
print(count)
| [
"donghwa.s.kim@gmail.com"
] | donghwa.s.kim@gmail.com |
852f27dfb45b3afa594f0f341751b29dca07106c | 61c4a57c1aa18b5a1b12e09bf6c370807b81eaac | /liah8_TGB/settings.py | b6cd6d0f9cca094c4da46d17c83c78db08d65ab5 | [] | no_license | sih4sing5hong5/huan1-ik8_gian2-kiu3 | b8ae048591232e7d4eacb528807f9373fb954f71 | 98163068b4a410af5544ba860ef6ea0204610e36 | refs/heads/master | 2020-12-11T08:12:20.231402 | 2018-07-02T07:25:23 | 2018-07-02T07:26:48 | 18,503,539 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 558 | py | # Scrapy settings for liah8_TGB project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'liah8_TGB'
SPIDER_MODULES = ['liah8_TGB.spiders']
NEWSPIDER_MODULE = 'liah8_TGB.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
# USER_AGENT = 'liah8_TGB (+http://www.yourdomain.com)'
DOWNLOAD_DELAY = 0.5
RANDOMIZE_DOWNLOAD_DELAY = True
CONCURRENT_REQUESTS_PER_IP = 2
| [
"Ihc@gmail.com"
] | Ihc@gmail.com |
02a98ade50103283feb338edcc0173523845ad64 | 7e03749969def182e47e1974c9de3b4cbc3ca4ff | /interna/crowdfund/migrations/0009_project_funded.py | d8b876250f32ac5eb2ace47eae0016effc73a605 | [] | no_license | coredump-ch/interna | a04dab1b1c92cbbaec666186f2f09c2bbb989b10 | 63ee3d7bd173368373e3d7c8e20eb962803c8514 | refs/heads/master | 2021-11-25T08:13:01.780169 | 2021-11-21T20:49:44 | 2021-11-21T20:50:13 | 13,701,826 | 5 | 3 | null | 2021-11-15T22:33:08 | 2013-10-19T13:53:03 | Python | UTF-8 | Python | false | false | 523 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-10 20:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crowdfund', '0008_auto_20170906_2346'),
]
operations = [
migrations.AddField(
model_name='project',
name='funded',
field=models.DateTimeField(blank=True, editable=False, help_text='When was this project funded?', null=True),
),
]
| [
"mail@dbrgn.ch"
] | mail@dbrgn.ch |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.