hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c5eaedbb229538331c20c99594c1a1698e147ed4 | 2,607 | py | Python | tests/test_utils.py | BobDotCom/minecraft-launcher-lib | 05e15e72936f8f0db347ae1491860127103b4e32 | [
"BSD-2-Clause"
] | 1 | 2021-11-02T19:27:22.000Z | 2021-11-02T19:27:22.000Z | tests/test_utils.py | BobDotCom/minecraft-launcher-lib | 05e15e72936f8f0db347ae1491860127103b4e32 | [
"BSD-2-Clause"
] | null | null | null | tests/test_utils.py | BobDotCom/minecraft-launcher-lib | 05e15e72936f8f0db347ae1491860127103b4e32 | [
"BSD-2-Clause"
] | null | null | null | import minecraft_launcher_lib
import json
import os
def create_test_version_file(minecraft_directory: str):
os.makedirs(os.path.join(minecraft_directory, "versions", "utilstest"))
with open(os.path.join(minecraft_directory, "versions", "utilstest", "utilstest.json"), "w", encoding="utf-8") as f:
data = {}
data["id"] = "utilstest"
data["type"] = "release"
json.dump(data, f)
def test_get_minecraft_directory():
assert isinstance(minecraft_launcher_lib.utils.get_minecraft_directory(), str)
def test_get_latest_version():
latest_version = minecraft_launcher_lib.utils.get_latest_version()
assert "release" in latest_version
assert "snapshot" in latest_version
def test_get_version_list():
version_list = minecraft_launcher_lib.utils.get_version_list()
assert "type" in version_list[0]
assert "id" in version_list[0]
def test_get_installed_versions(tmpdir):
create_test_version_file(tmpdir)
version_list = minecraft_launcher_lib.utils. get_installed_versions(tmpdir)
version_list = minecraft_launcher_lib.utils. get_installed_versions(str(tmpdir))
assert version_list[0]["id"] == "utilstest"
assert version_list[0]["type"] == "release"
def test_get_available_versions(tmpdir):
create_test_version_file(tmpdir)
version_list = minecraft_launcher_lib.utils.get_available_versions(tmpdir)
version_list = minecraft_launcher_lib.utils.get_available_versions(str(tmpdir))
assert "type" in version_list[0]
assert "id" in version_list[0]
def test_get_java_executable():
assert isinstance(minecraft_launcher_lib.utils.get_java_executable(), str)
def test_get_library_version():
assert isinstance(minecraft_launcher_lib.utils.get_library_version(), str)
def test_generate_test_options():
options = minecraft_launcher_lib.utils.generate_test_options()
assert isinstance(options["username"], str)
assert isinstance(options["uuid"], str)
assert isinstance(options["token"], str)
def test_is_version_valid(tmpdir):
create_test_version_file(tmpdir)
assert minecraft_launcher_lib.utils.is_version_valid("1.16", tmpdir) is True
assert minecraft_launcher_lib.utils.is_version_valid("1.16", str(tmpdir)) is True
assert minecraft_launcher_lib.utils.is_version_valid("utilstest", tmpdir) is True
assert minecraft_launcher_lib.utils.is_version_valid("utilstest", str(tmpdir)) is True
assert minecraft_launcher_lib.utils.is_version_valid("Test123", str(tmpdir)) is False
assert minecraft_launcher_lib.utils.is_version_valid("Test123", tmpdir) is False
| 37.242857 | 120 | 0.7687 |
ce900b8f3da18b4294e5e3e291dadfd9fdaf5a2b | 5,869 | py | Python | src/main.py | rvaccarim/stockfish_simulator | 6491afe9abf6bfa2098a076e9ba69108005cb4f5 | [
"MIT"
] | 2 | 2020-12-16T01:37:23.000Z | 2021-09-07T02:36:47.000Z | src/main.py | rvaccarim/stockfish_simulator | 6491afe9abf6bfa2098a076e9ba69108005cb4f5 | [
"MIT"
] | null | null | null | src/main.py | rvaccarim/stockfish_simulator | 6491afe9abf6bfa2098a076e9ba69108005cb4f5 | [
"MIT"
] | null | null | null | import logging
import os
import copy
import time
import shutil
import chess.engine
import chess.polyglot
import chess.svg
from svglib.svglib import svg2rlg
from reportlab.graphics import renderPM
output_root = "../output"
def save_match(filename, board, moves):
with open(f"{filename}.log", "w") as game_file:
if board.is_checkmate():
if board.turn == chess.WHITE:
game_file.write("Black checkmate\n")
else:
game_file.write("White checkmate\n")
else:
if board.is_stalemate():
game_file.write("Draw - Stalemate\n")
else:
if board.is_insufficient_material():
game_file.write("Draw - Insufficient material\n")
else:
if board.is_fivefold_repetition():
game_file.write("Draw - Fivefold repetition\n")
else:
if board.is_seventyfive_moves():
game_file.write("Draw - Seventyfive Moves\n")
else:
game_file.write("Draw - Other\n")
game_file.write(f"\n{str(board)}\n\n")
for i, move in enumerate(moves):
if i % 2 == 0:
game_file.write(f"White: {str(move)}\n")
else:
game_file.write(f"Black: {str(move)}\n")
boardsvg = chess.svg.board(board=board)
with open(f"{filename}.svg", "w") as image_file:
image_file.write(boardsvg)
svg_image = svg2rlg(f"{filename}.svg")
renderPM.drawToFile(svg_image, f"{filename}.png", fmt="PNG")
os.remove(f"{filename}.svg")
def setup_board(starting_moves, use_opening_book):
board = chess.Board()
opening_moves = []
for fm in starting_moves:
m = chess.Move.from_uci(fm)
board.push(m)
opening_moves.append(m)
book_depth = 22
if use_opening_book:
with chess.polyglot.open_reader("D:/Users/frozen/Documents/03_programming/online/stockfish/books/elo-2700.bin") as reader:
while True:
found = False
# uses the first play from the recommended plays according to the opening book
for entry in reader.find_all(board):
board.push(entry.move)
opening_moves.append(entry.move)
found = True
break
if not found or len(opening_moves) >= book_depth:
break
return board, opening_moves
def play(engine, starting_moves, use_opening_book, matches, depth, log_file, summary_file):
results = {"1-0": 0,
"0-1": 0,
"1/2-1/2": 0,
"*": 0
}
initial_board, initial_moves = setup_board(starting_moves, use_opening_book)
starting_str = '_'.join(starting_moves)
output_dir = f"{output_root}/{depth}/{starting_str}"
for match in range(0, matches):
tic = time.perf_counter()
board = copy.deepcopy(initial_board)
moves = copy.deepcopy(initial_moves)
while not board.is_game_over():
result = engine.play(board, chess.engine.Limit(depth=depth), ponder=False)
board.push(result.move)
moves.append(result.move)
results[board.result()] += 1
# record match, saves log and a png image of the board's final state
if use_opening_book:
filename = f"{output_dir}/book_game_{match + 1}"
else:
filename = f"{output_dir}/game_{match + 1}"
save_match(filename, board, moves)
toc = time.perf_counter()
log_str = f'Book: {str(use_opening_book):5s} Depth: {depth} {starting_str} Match: {str(match + 1):>3s} W:{str(results["1-0"]):>3s} D:{str(results["1/2-1/2"]):>3s} B:{str(results["0-1"]):>3s} {str(len(moves)):>3s} moves {toc - tic:0.4f} seconds '
print(log_str)
log_file.write(log_str + "\n")
log_file.flush()
summary_file.write(
f'Book: {str(use_opening_book):5s} Depth: {depth} {starting_str} W:{str(results["1-0"]):>3s} D:{str(results["1/2-1/2"]):>3s} B:{str(results["0-1"]):>3s}\n')
summary_file.flush()
def setup_output(output_dir, depths, moves_list):
if os.path.isdir(output_dir):
shutil.rmtree(output_dir)
for d in depths:
for moves in moves_list:
moves_str = "_".join(moves)
os.makedirs(f"{output_dir}/{d}/{moves_str}")
def simulate():
starting_moves = [["e2e4", "e7e5"],
["e2e4", "c7c5"],
["d2d4", "g8f6"],
["d2d4", "d7d5"],
["g1f3"],
["c2c4"],
["f2f3"]]
depths = [20]
setup_output(output_root, depths, starting_moves)
# logging.basicConfig(level=logging.DEBUG)
engine = chess.engine.SimpleEngine.popen_uci("D:/Users/frozen/Documents/99_temp/stockfish_12/stockfish.exe")
engine.configure({"Threads": 6})
engine.configure({"Hash": 4096})
engine.configure({"SyzygyPath": "D:/Users/frozen/Documents/03_programming/online/stockfish/syzygy"})
with open(f"{output_root}/log.txt", "w") as log:
with open(f"{output_root}/summary.txt", "w") as summary:
for d in depths:
for s_moves in starting_moves:
play(engine, s_moves, use_opening_book=True, matches=2, depth=d, log_file=log,
summary_file=summary)
play(engine, s_moves, use_opening_book=False, matches=2, depth=d, log_file=log,
summary_file=summary)
print("")
log.write("\n")
summary.write("\n")
engine.quit()
if __name__ == "__main__":
simulate()
| 33.537143 | 260 | 0.564151 |
27cf236476854871a3639aaf070099c1c4c249d8 | 4,008 | py | Python | davisputnam/tests/test_match.py | Bram-Hub/DP-Visuals | 43d9c7f17bbe7b54b8528ebcbcce421727103e7c | [
"MIT"
] | null | null | null | davisputnam/tests/test_match.py | Bram-Hub/DP-Visuals | 43d9c7f17bbe7b54b8528ebcbcce421727103e7c | [
"MIT"
] | 6 | 2016-03-18T03:10:46.000Z | 2016-05-03T15:05:02.000Z | davisputnam/tests/test_match.py | Bram-Hub/DP-Visuals | 43d9c7f17bbe7b54b8528ebcbcce421727103e7c | [
"MIT"
] | 1 | 2019-01-15T01:17:49.000Z | 2019-01-15T01:17:49.000Z | import unittest
from match import match
class TestMatching(unittest.TestCase):
def test_literal(self):
val = match("A")
self.assertIsNotNone(val)
[kind, matches] = val
self.assertEqual(kind, "lit")
self.assertEqual(matches, "A")
def test_negation(self):
# define the test cases
cases = {
"~A": "A",
"~(AvB)": "AvB",
"~(A^B)": "A^B",
"~(A->B)": "A->B",
"~(A<->B)": "A<->B",
"~(~A^~B)": "~A^~B",
"~(Av(B^C))": "Av(B^C)"
}
for case in cases:
val = match(case)
self.assertIsNotNone(val, msg="match(%s) == None" % case)
[kind, matches] = val
self.assertEqual(kind, "~")
self.assertEqual(matches, cases[case])
def test_disjunction(self):
cases = {
"AvB": ("A", "B"),
"~AvB": ("~A", "B"),
"Av~B": ("A", "~B"),
"~Av~B": ("~A", "~B"),
"Av(B^C)": ("A", "B^C"),
"Av(BvC)": ("A", "BvC"),
"Av(B->C)": ("A", "B->C"),
"Av(B<->C)": ("A", "B<->C"),
"Av(~B^C)": ("A", "~B^C"),
"Av~(~B^C)": ("A", "~(~B^C)"),
"Av~((B^C)->D)": ("A", "~((B^C)->D)")
}
for case in cases:
val = match(case)
self.assertIsNotNone(val, msg="match(%s) == None" % case)
[kind, matches] = val
self.assertEqual(kind, "v")
self.assertEqual(matches, cases[case])
def test_conjunction(self):
cases = {
"A^B": ("A", "B"),
"~A^B": ("~A", "B"),
"A^~B": ("A", "~B"),
"~A^~B": ("~A", "~B"),
"A^(B^C)": ("A", "B^C"),
"A^(BvC)": ("A", "BvC"),
"A^(B->C)": ("A", "B->C"),
"A^(B<->C)": ("A", "B<->C"),
"A^(~B^C)": ("A", "~B^C"),
"A^~(~B^C)": ("A", "~(~B^C)"),
"A^~((B^C)->D)": ("A", "~((B^C)->D)")
}
for case in cases:
val = match(case)
self.assertIsNotNone(val, msg="match(%s) == None" % case)
[kind, matches] = val
self.assertEqual(kind, "^")
self.assertEqual(matches, cases[case])
def test_implication(self):
cases = {
"A->B": ("A", "B"),
"~A->B": ("~A", "B"),
"A->~B": ("A", "~B"),
"~A->~B": ("~A", "~B"),
"A->(B^C)": ("A", "B^C"),
"A->(BvC)": ("A", "BvC"),
"A->(B->C)": ("A", "B->C"),
"A->(B<->C)": ("A", "B<->C"),
"A->(~B^C)": ("A", "~B^C"),
"A->~(~B^C)": ("A", "~(~B^C)"),
"A->~((B^C)->D)": ("A", "~((B^C)->D)")
}
for case in cases:
val = match(case)
self.assertIsNotNone(val, msg="match(%s) == None" % case)
[kind, matches] = val
self.assertEqual(kind, "->")
self.assertEqual(matches, cases[case])
def test_biconditional(self):
cases = {
"A<->B": ("A", "B"),
"~A<->B": ("~A", "B"),
"A<->~B": ("A", "~B"),
"~A<->~B": ("~A", "~B"),
"A<->(B^C)": ("A", "B^C"),
"A<->(BvC)": ("A", "BvC"),
"A<->(B->C)": ("A", "B->C"),
"A<->(B<->C)": ("A", "B<->C"),
"A<->(~B^C)": ("A", "~B^C"),
"A<->~(~B^C)": ("A", "~(~B^C)"),
"A<->~((B^C)->D)": ("A", "~((B^C)->D)")
}
for case in cases:
val = match(case)
self.assertIsNotNone(val, msg="match(%s) == None" % case)
[kind, matches] = val
self.assertEqual(kind, "<->")
self.assertEqual(matches, cases[case])
def test_no_match(self):
vals = []
vals.append(match("a"))
vals.append(match("AB"))
vals.append(match("A B"))
for val in vals:
self.assertIsNone(val, msg="Invalid pattern matched!")
if __name__ == '__main__':
print "Test match():"
unittest.main()
| 30.830769 | 69 | 0.36003 |
a38f18546bfba3f06ca8e41dc062104aca3122d3 | 980 | py | Python | test-harness/util.py | jeguiguren/http-proxy | 760a29a02097128e1acd70552aba464cbcf39713 | [
"MIT"
] | null | null | null | test-harness/util.py | jeguiguren/http-proxy | 760a29a02097128e1acd70552aba464cbcf39713 | [
"MIT"
] | null | null | null | test-harness/util.py | jeguiguren/http-proxy | 760a29a02097128e1acd70552aba464cbcf39713 | [
"MIT"
] | null | null | null | import requests
import difflib
import numpy as np
import logging
import collections
import sys
import os
import statistics
from threading import Thread
import time
import matplotlib.pyplot as plt
def setup_logger(name, log_file=None, console=False, level=logging.INFO):
try:
# add the logger
l = logging.getLogger(name)
l.setLevel(level)
if console:
handlerConsole = logging.StreamHandler()
handlerConsole.setLevel(logging.INFO)
l.addHandler(handlerConsole)
except Exception as e:
logging.error(
'line:{} type:{}, message:{}'.format(sys.exc_info()[-1].tb_lineno, type(e).__name__, e.message))
finally:
return l
def get_request(url, proxyDict=None):
if proxyDict is None:
r = requests.get(url, timeout=2)
else:
r = requests.get(url, proxies=proxyDict, timeout=2)
if r.status_code != 200:
raise Exception('{} Response Code'.format(r.status_code))
return r
def get_data_size(url):
res = get_request(url)
return len(res.content) | 21.777778 | 99 | 0.734694 |
7ca094b6a96d82a5687de0e9707142a186b1693c | 58 | py | Python | sandbox/quick_script.py | Miladiouss/three-body-sim | 117235141429974aa29f95914c4ed1335d7f3d40 | [
"MIT"
] | 1 | 2021-01-29T22:44:12.000Z | 2021-01-29T22:44:12.000Z | sandbox/quick_script.py | Miladiouss/python-project-sci-template | 676ebd56df81387895d1a9e4dc5c0a79e3e9543d | [
"MIT"
] | null | null | null | sandbox/quick_script.py | Miladiouss/python-project-sci-template | 676ebd56df81387895d1a9e4dc5c0a79e3e9543d | [
"MIT"
] | null | null | null | from pkg1 import add_one_module as add1m
add1m.add_one(9) | 19.333333 | 40 | 0.827586 |
02bad62c7eb9ca31983e6967951df1c4ff197551 | 4,037 | py | Python | IntDezine/settings.py | othienoJoe/Interior-Dezine | 48a84b9cc055af9d1f53ad421c9508f154face24 | [
"Unlicense"
] | null | null | null | IntDezine/settings.py | othienoJoe/Interior-Dezine | 48a84b9cc055af9d1f53ad421c9508f154face24 | [
"Unlicense"
] | null | null | null | IntDezine/settings.py | othienoJoe/Interior-Dezine | 48a84b9cc055af9d1f53ad421c9508f154face24 | [
"Unlicense"
] | null | null | null | """
Django settings for heyapp project.
Generated by 'django-admin startproject' using Django 3.2.9.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
# Cloudinary
import cloudinary
import cloudinary.uploader
import cloudinary.api
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-8b)fytql!@y71z1rm3g&qedi72fw(%zy3-dfl6fzym!afssphe'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'app',
'bootstrap4',
'django_registration',
'rest_framework',
'mathfilters',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'IntDezine.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'IntDezine.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'design',
'USER': 'moringa',
'PASSWORD': 'othieno94',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Nairobi'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL ='/media/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
# setting configuration parameters globally
cloudinary.config(
cloud_name = "nicothieno",
api_key = "138935826953116",
api_secret = "tWXKi8ktGfsF7bFwyEaPDc0U8Y8",
)
# mail configuration
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'nickodemus.otsieno@student.moringaschool.com'
# rest_framework authtoken
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES':(
'rest_framework.authentication.TokenAuthentication',
)
} | 26.045161 | 91 | 0.709438 |
80321bcde20398be61d3dd35c93e7d357355b7b5 | 1,217 | py | Python | dataloader/dataset/SSDD++/divide_data.py | chisyliu/RotationDetection | 6f2bd55a51a6de0bcd0959a85977682511fd440d | [
"Apache-2.0"
] | 2 | 2022-03-05T09:55:49.000Z | 2022-03-05T10:12:51.000Z | dataloader/dataset/SSDD++/divide_data.py | junhai0428/RotationDetection | 4249720ea4dacdd60e696901df8034e5cd0a1843 | [
"Apache-2.0"
] | null | null | null | dataloader/dataset/SSDD++/divide_data.py | junhai0428/RotationDetection | 4249720ea4dacdd60e696901df8034e5cd0a1843 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import sys
sys.path.append('../../')
import shutil
import os
import random
import math
def mkdir(path):
if not os.path.exists(path):
os.makedirs(path)
root_path = '/mnt/nas/dataset_share'
image_path = root_path + '/SSDD++/JPEGImages'
xml_path = root_path + '/SSDD++/Annotations'
image_list = os.listdir(image_path)
image_name = [n.split('.')[0] for n in image_list]
image_output_train = os.path.join(root_path, 'SSDD++/train/JPEGImages')
mkdir(image_output_train)
image_output_test = os.path.join(root_path, 'SSDD++/test/JPEGImages')
mkdir(image_output_test)
xml_train = os.path.join(root_path, 'SSDD++/train/Annotations')
mkdir(xml_train)
xml_test = os.path.join(root_path, 'SSDD++/test/Annotations')
mkdir(xml_test)
for i in image_name:
if i[-1] in ['1', '9']:
shutil.copy(os.path.join(image_path, i + '.jpg'), image_output_test)
shutil.copy(os.path.join(xml_path, i + '.xml'), xml_test)
else:
shutil.copy(os.path.join(image_path, i + '.jpg'), image_output_train)
shutil.copy(os.path.join(xml_path, i + '.xml'), xml_train)
| 28.97619 | 78 | 0.67461 |
d99d91ac0d17c24266dd884fd1022bb6a8a6b984 | 5,831 | py | Python | core.py | RHDZMOTA/social-distancing-simulation | ea6913c6eb788deb3f8691a815f7ec6ad1c33115 | [
"MIT"
] | 3 | 2020-03-24T20:16:03.000Z | 2020-03-28T20:47:55.000Z | core.py | RHDZMOTA/social-distancing-simulation | ea6913c6eb788deb3f8691a815f7ec6ad1c33115 | [
"MIT"
] | 2 | 2020-03-23T00:23:37.000Z | 2020-03-23T00:33:15.000Z | core.py | RHDZMOTA/social-distancing-simulation | ea6913c6eb788deb3f8691a815f7ec6ad1c33115 | [
"MIT"
] | null | null | null | import concurrent.futures
import datetime
import itertools
import queue
import logging
import multiprocessing
import random
import uuid
from typing import List, Optional
import pandas as pd
from models.person import Person, PersonFactory
from settings import *
from utils import get_dict_hash_key
STRATEGY = SOCIAL_DISTANCING_VAR_STRATEGY
logger = logging.getLogger(__name__)
class Simulation(object):
def __init__(self, days: int, risky_interactions: float = 0.05):
self.days = days
self.risky_interactions = risky_interactions
self.people = [
*PersonFactory.create_people_with_route_student(
k=SOCIAL_DISTANCING_VAR_STUDENTS,
infected_cases=0
),
*PersonFactory.create_people_with_route_worker(
k=SOCIAL_DISTANCING_VAR_WORKERS,
infected_cases=1
),
*PersonFactory.create_people_with_route_worker_student(
k=SOCIAL_DISTANCING_VAR_WORKER_STUDENTS,
infected_cases=1
),
*PersonFactory.create_people_with_route_stay_home(
k=SOCIAL_DISTANCING_VAR_STAY_HOME,
infected_cases=0
)
]
def _get_places(self, t: int):
places = {}
infected_cases = 0
for person in self.people:
place = person.position(t)
key = f"{place.name}-{place.id}"
places[key] = places.get(key, []) + [person]
if person.infected:
infected_cases += 1
return places, infected_cases
def _interactions(self, group: List[Person]):
interactions_total = tuple(itertools.combinations(group, r=2))
interactions_risky = int(self.risky_interactions * len(interactions_total))
for a, b in random.choices(interactions_total, k=interactions_risky):
a.interact(b)
return len(interactions_total), interactions_risky
def run(self, item_id: Optional[str] = None):
item_id = item_id if item_id is not None else str(uuid.uuid4())
logger.info(f"Simulation {item_id}: STARTED")
results_confirmed = []
results_interactions = []
for t in range(self.days * 100):
places, infected_cases = self._get_places(t)
results_confirmed.append(
{
"time": t,
"infected_cases": infected_cases
}
)
for place, group in places.items():
# Run interactions every 10 times a day.
if t % 10:
continue
total, risky = self._interactions(group)
results_interactions.append({
"time": t,
"place": place,
"group": len(group),
"interactions_total": total,
"interactions_risky": risky,
"infected": len([p for p in group if p.infected])
})
logger.info(f"Simulation {item_id}: ENDED")
return results_confirmed, results_interactions
class Simulator:
simulation_queue = queue.Queue()
def __init__(self, simulations: int = 1, njobs: int = -1):
self.simulations = simulations
self.njobs = njobs if not njobs else njobs if njobs > 0 else multiprocessing.cpu_count()
@staticmethod
def worker(q: queue.Queue):
worker_id = str(uuid.uuid4())
while True:
try:
item = q.get(block=False)
item_id = item.pop("id")
base_path = item.pop("base_path")
# Configuration
now = datetime.datetime.now().strftime("%Y-%m-%d")
config = get_global_environment_vars()
config_id = get_dict_hash_key(dictionary=config)
# Run Simulation
simulation = Simulation(**item)
confirmed, interactions = simulation.run(item_id)
file_path = os.path.join(base_path, STRATEGY, worker_id)
os.makedirs(file_path, exist_ok=True)
# Save confirmed data
filename_confirmed = f"{now}-{item_id}-{config_id}-confirmed.csv"
df_confirmed = pd.DataFrame(confirmed)
df_confirmed["day"] = [int(t / 100) for t in df_confirmed.time]
df_confirmed.to_csv(os.path.join(file_path, filename_confirmed), index=False)
# Save interactions data
filename_interactions = f"{now}-{item_id}-{config_id}-interactions.csv"
df_interactions = pd.DataFrame(interactions)
df_interactions["day"] = [int(t / 100) for t in df_interactions.time]
df_interactions.to_csv(os.path.join(file_path, filename_interactions), index=False)
# Save configuration variables
with open(os.path.join(file_path, f"{now}-{item_id}-{config_id}-config.json"), "w") as f:
f.write(json.dumps(config))
except Exception as e:
if str(e):
logger.error(f"Worker {worker_id} encountered the following error: {e}")
return
def run(self, days: int = 50, risky_interactions: float = 0.05, output_path: str = ""):
for _ in range(self.simulations):
self.simulation_queue.put({
"id": str(uuid.uuid4()),
"base_path": output_path,
"days": days,
"risky_interactions": risky_interactions
})
with concurrent.futures.ThreadPoolExecutor(max_workers=self.njobs) as executor:
for _ in range(self.njobs):
executor.submit(self.worker, **{"q": self.simulation_queue})
| 39.666667 | 105 | 0.581375 |
ac63dfcb0becdbb6dc6f728f0fc3303e562e5233 | 25,908 | py | Python | src/robustness/_OLD_train_eval_sample.py | nikola3794/robust-models-transfer | f5b80eae6d07fdb123c658a7f4fcd2bc26a50f02 | [
"MIT"
] | null | null | null | src/robustness/_OLD_train_eval_sample.py | nikola3794/robust-models-transfer | f5b80eae6d07fdb123c658a7f4fcd2bc26a50f02 | [
"MIT"
] | null | null | null | src/robustness/_OLD_train_eval_sample.py | nikola3794/robust-models-transfer | f5b80eae6d07fdb123c658a7f4fcd2bc26a50f02 | [
"MIT"
] | null | null | null | from re import M
import torch as ch
import numpy as np
import torch.nn as nn
from torch.optim import SGD, AdamW, lr_scheduler
from torchvision.utils import make_grid
from cox.utils import Parameters
from torch.nn import functional as F
from .tools import helpers
from .tools.helpers import AverageMeter, ckpt_at_epoch, has_attr
from .tools import constants as consts
import dill
import os
import time
import warnings
import csv
if int(os.environ.get("NOTEBOOK_MODE", 0)) == 1:
from tqdm import tqdm_notebook as tqdm
else:
from tqdm import tqdm as tqdm
try:
from apex import amp
except Exception as e:
warnings.warn('Could not import amp.')
def check_required_args(args, eval_only=False):
"""
Check that the required training arguments are present.
Args:
args (argparse object): the arguments to check
eval_only (bool) : whether to check only the arguments for evaluation
"""
required_args_eval = ["adv_eval"]
required_args_train = ["epochs", "out_dir", "adv_train",
"log_iters", "lr", "momentum", "weight_decay"]
adv_required_args = ["attack_steps", "eps", "constraint",
"use_best", "attack_lr", "random_restarts"]
# Generic function for checking all arguments in a list
def check_args(args_list):
for arg in args_list:
assert has_attr(args, arg), f"Missing argument {arg}"
# Different required args based on training or eval:
if not eval_only: check_args(required_args_train)
else: check_args(required_args_eval)
# More required args if we are robustly training or evaling
is_adv = bool(args.adv_train) or bool(args.adv_eval)
if is_adv:
check_args(adv_required_args)
# More required args if the user provides a custom training loss
has_custom_train = has_attr(args, 'custom_train_loss')
has_custom_adv = has_attr(args, 'custom_adv_loss')
if has_custom_train and is_adv and not has_custom_adv:
raise ValueError("Cannot use custom train loss \
without a custom adversarial loss (see docs)")
def make_optimizer_and_schedule(args, model, checkpoint, params):
"""
*Internal Function* (called directly from train_model)
Creates an optimizer and a schedule for a given model, restoring from a
checkpoint if it is non-null.
Args:
args (object) : an arguments object, see
:meth:`~robustness.train.train_model` for details
model (AttackerModel) : the model to create the optimizer for
checkpoint (dict) : a loaded checkpoint saved by this library and loaded
with `ch.load`
params (list|None) : a list of parameters that should be updatable, all
other params will not update. If ``None``, update all params
Returns:
An optimizer (ch.nn.optim.Optimizer) and a scheduler
(ch.nn.optim.lr_schedulers module).
"""
# Make optimizer
param_list = model.parameters() if params is None else params
if args.optimizer.lower() == "sgd":
optimizer = SGD(param_list, args.lr, momentum=args.momentum,
weight_decay=args.weight_decay)
elif args.optimizer.lower() == "adamw":
optimizer = AdamW(param_list, args.lr,weight_decay=args.weight_decay)
else:
raise NotImplementedError
if args.mixed_precision:
model.to('cuda')
model, optimizer = amp.initialize(model, optimizer, 'O1')
# Make schedule
schedule = None
if args.custom_lr_multiplier == 'cyclic':
eps = args.epochs
lr_func = lambda t: np.interp([t], [0, eps*4//15, eps], [0, 1, 0])[0]
schedule = lr_scheduler.LambdaLR(optimizer, lr_func)
elif args.custom_lr_multiplier:
cs = args.custom_lr_multiplier
periods = eval(cs) if type(cs) is str else cs
if args.lr_interpolation == 'linear':
lr_func = lambda t: np.interp([t], *zip(*periods))[0]
else:
def lr_func(ep):
for (milestone, lr) in reversed(periods):
if ep >= milestone: return lr
return 1.0
schedule = lr_scheduler.LambdaLR(optimizer, lr_func)
elif args.step_lr:
schedule = lr_scheduler.StepLR(optimizer, step_size=args.step_lr, gamma=args.step_lr_gamma)
# Fast-forward the optimizer and the scheduler if resuming
if checkpoint:
optimizer.load_state_dict(checkpoint['optimizer'])
try:
schedule.load_state_dict(checkpoint['schedule'])
except:
steps_to_take = checkpoint['epoch']
print('Could not load schedule (was probably LambdaLR).'
f' Stepping {steps_to_take} times instead...')
for i in range(steps_to_take):
schedule.step()
if 'amp' in checkpoint and checkpoint['amp'] not in [None, 'N/A']:
amp.load_state_dict(checkpoint['amp'])
# TODO: see if there's a smarter way to do this
# TODO: see what's up with loading fp32 weights and then MP training
if args.mixed_precision:
model.load_state_dict(checkpoint['model'])
return optimizer, schedule
def eval_model(args, model, loader, store, n_eval_samples=1):
"""
Evaluate a model for standard (and optionally adversarial) accuracy.
Args:
args (object) : A list of arguments---should be a python object
implementing ``getattr()`` and ``setattr()``.
model (AttackerModel) : model to evaluate
loader (iterable) : a dataloader serving `(input, label)` batches from
the validation set
store (cox.Store) : store for saving results in (via tensorboardX)
"""
check_required_args(args, eval_only=True)
start_time = time.time()
if store is not None:
store.add_table(consts.LOGS_TABLE, consts.LOGS_SCHEMA)
writer = store.tensorboard if store else None
assert not hasattr(model, "module"), "model is already in DataParallel."
model = ch.nn.DataParallel(model)
prec1, nat_loss = _model_loop(args, 'val', loader,
model, None, 0, False, writer, n_samples=n_eval_samples, last_epoch=True)
adv_prec1, adv_loss = float('nan'), float('nan')
if args.adv_eval:
args.eps = eval(str(args.eps)) if has_attr(args, 'eps') else None
args.attack_lr = eval(str(args.attack_lr)) if has_attr(args, 'attack_lr') else None
adv_prec1, adv_loss = _model_loop(args, 'val', loader,
model, None, 0, True, writer, n_samples=n_eval_samples, last_epoch=True)
log_info = {
'epoch':0,
'nat_prec1':prec1,
'adv_prec1':adv_prec1,
'nat_loss':nat_loss,
'adv_loss':adv_loss,
'train_prec1':float('nan'),
'train_loss':float('nan'),
'time': time.time() - start_time
}
# Log info into the logs table
if store: store[consts.LOGS_TABLE].append_row(log_info)
return log_info
def train_model(args, model, loaders, *, checkpoint=None, dp_device_ids=None,
store=None, update_params=None, disable_no_grad=False, n_eval_samples=1):
"""
Main function for training a model.
Args:
args (object) : A python object for arguments, implementing
``getattr()`` and ``setattr()`` and having the following
attributes. See :attr:`robustness.defaults.TRAINING_ARGS` for a
list of arguments, and you can use
:meth:`robustness.defaults.check_and_fill_args` to make sure that
all required arguments are filled and to fill missing args with
reasonable defaults:
adv_train (int or bool, *required*)
if 1/True, adversarially train, otherwise if 0/False do
standard training
epochs (int, *required*)
number of epochs to train for
lr (float, *required*)
learning rate for SGD optimizer
weight_decay (float, *required*)
weight decay for SGD optimizer
momentum (float, *required*)
momentum parameter for SGD optimizer
step_lr (int)
if given, drop learning rate by 10x every `step_lr` steps
custom_lr_multplier (str)
If given, use a custom LR schedule, formed by multiplying the
original ``lr`` (format: [(epoch, LR_MULTIPLIER),...])
lr_interpolation (str)
How to drop the learning rate, either ``step`` or ``linear``,
ignored unless ``custom_lr_multiplier`` is provided.
adv_eval (int or bool)
If True/1, then also do adversarial evaluation, otherwise skip
(ignored if adv_train is True)
log_iters (int, *required*)
How frequently (in epochs) to save training logs
save_ckpt_iters (int, *required*)
How frequently (in epochs) to save checkpoints (if -1, then only
save latest and best ckpts)
attack_lr (float or str, *required if adv_train or adv_eval*)
float (or float-parseable string) for the adv attack step size
constraint (str, *required if adv_train or adv_eval*)
the type of adversary constraint
(:attr:`robustness.attacker.STEPS`)
eps (float or str, *required if adv_train or adv_eval*)
float (or float-parseable string) for the adv attack budget
attack_steps (int, *required if adv_train or adv_eval*)
number of steps to take in adv attack
custom_eps_multiplier (str, *required if adv_train or adv_eval*)
If given, then set epsilon according to a schedule by
multiplying the given eps value by a factor at each epoch. Given
in the same format as ``custom_lr_multiplier``, ``[(epoch,
MULTIPLIER)..]``
use_best (int or bool, *required if adv_train or adv_eval*) :
If True/1, use the best (in terms of loss) PGD step as the
attack, if False/0 use the last step
random_restarts (int, *required if adv_train or adv_eval*)
Number of random restarts to use for adversarial evaluation
custom_train_loss (function, optional)
If given, a custom loss instead of the default CrossEntropyLoss.
Takes in `(logits, targets)` and returns a scalar.
custom_adv_loss (function, *required if custom_train_loss*)
If given, a custom loss function for the adversary. The custom
loss function takes in `model, input, target` and should return
a vector representing the loss for each element of the batch, as
well as the classifier output.
custom_accuracy (function)
If given, should be a function that takes in model outputs
and model targets and outputs a top1 and top5 accuracy, will
displayed instead of conventional accuracies
regularizer (function, optional)
If given, this function of `model, input, target` returns a
(scalar) that is added on to the training loss without being
subject to adversarial attack
iteration_hook (function, optional)
If given, this function is called every training iteration by
the training loop (useful for custom logging). The function is
given arguments `model, iteration #, loop_type [train/eval],
current_batch_ims, current_batch_labels`.
epoch hook (function, optional)
Similar to iteration_hook but called every epoch instead, and
given arguments `model, log_info` where `log_info` is a
dictionary with keys `epoch, nat_prec1, adv_prec1, nat_loss,
adv_loss, train_prec1, train_loss`.
model (AttackerModel) : the model to train.
loaders (tuple[iterable]) : `tuple` of data loaders of the form
`(train_loader, val_loader)`
checkpoint (dict) : a loaded checkpoint previously saved by this library
(if resuming from checkpoint)
dp_device_ids (list|None) : if not ``None``, a list of device ids to
use for DataParallel.
store (cox.Store) : a cox store for logging training progress
update_params (list) : list of parameters to use for training, if None
then all parameters in the model are used (useful for transfer
learning)
disable_no_grad (bool) : if True, then even model evaluation will be
run with autograd enabled (otherwise it will be wrapped in a ch.no_grad())
"""
# Logging setup
writer = store.tensorboard if store else None
prec1_key = f"{'adv' if args.adv_train else 'nat'}_prec1"
if store is not None:
store.add_table(consts.LOGS_TABLE, consts.LOGS_SCHEMA)
# Reformat and read arguments
check_required_args(args) # Argument sanity check
for p in ['eps', 'attack_lr', 'custom_eps_multiplier']:
setattr(args, p, eval(str(getattr(args, p))) if has_attr(args, p) else None)
if args.custom_eps_multiplier is not None:
eps_periods = args.custom_eps_multiplier
args.custom_eps_multiplier = lambda t: np.interp([t], *zip(*eps_periods))[0]
# Initial setup
train_loader, val_loader = loaders
opt, schedule = make_optimizer_and_schedule(args, model, checkpoint, update_params)
# Put the model into parallel mode
assert not hasattr(model, "module"), "model is already in DataParallel."
model = ch.nn.DataParallel(model, device_ids=dp_device_ids).cuda()
best_prec1, start_epoch = (0, 0)
if checkpoint:
start_epoch = checkpoint['epoch']
best_prec1 = checkpoint[prec1_key] if prec1_key in checkpoint \
else _model_loop(args, 'val', val_loader, model, None, start_epoch-1, args.adv_train, writer=None)[0]
# Timestamp for training start time
start_time = time.time()
for epoch in range(start_epoch, args.epochs):
# # TODO <--------------
# if epoch == 1:
# break
# train for one epoch
train_prec1, train_loss = _model_loop(args, 'train', train_loader,
model, opt, epoch, args.adv_train, writer)
last_epoch = (epoch == (args.epochs - 1))
# evaluate on validation set
sd_info = {
'model':model.state_dict(),
'optimizer':opt.state_dict(),
'schedule':(schedule and schedule.state_dict()),
'epoch': epoch+1,
'amp': amp.state_dict() if args.mixed_precision else None,
}
def save_checkpoint(filename):
ckpt_save_path = os.path.join(args.out_dir if not store else \
store.path, filename)
ch.save(sd_info, ckpt_save_path, pickle_module=dill)
save_its = args.save_ckpt_iters
should_save_ckpt = (epoch % save_its == 0) and (save_its > 0)
should_log = (epoch % args.log_iters == 0)
if should_log or last_epoch or should_save_ckpt:
# log + get best
ctx = ch.enable_grad() if disable_no_grad else ch.no_grad()
with ctx:
prec1, nat_loss = _model_loop(args, 'val', val_loader, model,
None, epoch, False, writer, n_samples=n_eval_samples, last_epoch=last_epoch)
# loader, model, epoch, input_adv_exs
should_adv_eval = args.adv_eval or args.adv_train
adv_val = should_adv_eval and _model_loop(args, 'val', val_loader,
model, None, epoch, True, writer, n_samples=n_eval_samples, last_epoch=last_epoch)
adv_prec1, adv_loss = adv_val or (-1.0, -1.0)
# remember best prec@1 and save checkpoint
our_prec1 = adv_prec1 if args.adv_train else prec1
is_best = our_prec1 > best_prec1
best_prec1 = max(our_prec1, best_prec1)
sd_info[prec1_key] = our_prec1
# log every checkpoint
log_info = {
'epoch':epoch + 1,
'nat_prec1':prec1,
'adv_prec1':adv_prec1,
'nat_loss':nat_loss,
'adv_loss':adv_loss,
'train_prec1':train_prec1,
'train_loss':train_loss,
'time': time.time() - start_time
}
# Log info into the logs table
if store: store[consts.LOGS_TABLE].append_row(log_info)
# If we are at a saving epoch (or the last epoch), save a checkpoint
if should_save_ckpt or last_epoch: save_checkpoint(ckpt_at_epoch(epoch))
# Update the latest and best checkpoints (overrides old one)
save_checkpoint(consts.CKPT_NAME_LATEST)
if is_best: save_checkpoint(consts.CKPT_NAME_BEST)
if last_epoch:
with open(os.path.join(args.out_dir, args.exp_name, 'results.txt'), 'w') as fh:
# Log to .txt file
exp_info = {
"exp_name": args.exp_name,
"out_dir": args.out_dir,
"dataset": args.dataset,
"data": args.data,
"data_aug": args.data_aug,
"arch": args.arch,
"model_path": args.model_path
}
fh.write('Log info\n')
fh.write('-------------------------------------------------------\n')
for k in log_info:
fh.write(f'{k}: {log_info[k]}\n')
fh.write('\n\n\n')
fh.write('Experiment info\n')
fh.write('-------------------------------------------------------\n')
for k in exp_info:
fh.write(f'{k}: {exp_info[k]}\n')
fh.write('\n\n\n')
fh.write('Configuration\n')
fh.write('-------------------------------------------------------\n')
for arg in vars(args):
fh.write(f'{arg}:{getattr(args, arg)}\n')
fh.write('\n\n\n')
# Log to .csv file
header = ['dataset', 'arch', 'freeze-level', 'only-learn-slope', 'min-slope', 'max-slope', 'rnd-act', 'top1_val', 'top1_train', 'optimizer', 'lr', 'wd', 'epochs', 'batch-size', 'data_aug']
data = [ args.dataset, args.arch, args.freeze_level, args.only_learn_slope_trf, args.min_slope, args.max_slope, args.rnd_act, f'{prec1:.3f}', f'{train_prec1:.3f}',args.optimizer, args.lr, args.weight_decay, args.epochs, args.batch_size, args.data_aug]
with open(os.path.join(args.out_dir, args.exp_name, 'results.csv'), 'w') as fh:
writer = csv.writer(fh)
writer.writerow(header)
writer.writerow(data)
if schedule: schedule.step()
if has_attr(args, 'epoch_hook'): args.epoch_hook(model, log_info)
return model
def _model_loop(args, loop_type, loader, model, opt, epoch, adv, writer, n_samples=1, last_epoch=False):
"""
*Internal function* (refer to the train_model and eval_model functions for
how to train and evaluate models).
Runs a single epoch of either training or evaluating.
Args:
args (object) : an arguments object (see
:meth:`~robustness.train.train_model` for list of arguments
loop_type ('train' or 'val') : whether we are training or evaluating
loader (iterable) : an iterable loader of the form
`(image_batch, label_batch)`
model (AttackerModel) : model to train/evaluate
opt (ch.optim.Optimizer) : optimizer to use (ignored for evaluation)
epoch (int) : which epoch we are currently on
adv (bool) : whether to evaluate adversarially (otherwise standard)
writer : tensorboardX writer (optional)
Returns:
The average top1 accuracy and the average loss across the epoch.
"""
run_N_inference_samples = (last_epoch and (loop_type == "val") and (n_samples>1))
if run_N_inference_samples:
print(f"Running {n_samples} inference samples to compute output")
if not loop_type in ['train', 'val']:
err_msg = "loop_type ({0}) must be 'train' or 'val'".format(loop_type)
raise ValueError(err_msg)
is_train = (loop_type == 'train')
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
prec = 'NatPrec' if not adv else 'AdvPrec'
loop_msg = 'Train' if loop_type == 'train' else 'Val'
# switch to train/eval mode depending
model = model.train() if is_train else model.eval()
# If adv training (or evaling), set eps and random_restarts appropriately
if adv:
eps = args.custom_eps_multiplier(epoch) * args.eps \
if (is_train and args.custom_eps_multiplier) else args.eps
random_restarts = 0 if is_train else args.random_restarts
# Custom training criterion
has_custom_train_loss = has_attr(args, 'custom_train_loss')
train_criterion = args.custom_train_loss if has_custom_train_loss \
else ch.nn.CrossEntropyLoss()
has_custom_adv_loss = has_attr(args, 'custom_adv_loss')
adv_criterion = args.custom_adv_loss if has_custom_adv_loss else None
attack_kwargs = {}
if adv:
attack_kwargs = {
'constraint': args.constraint,
'eps': eps,
'step_size': args.attack_lr,
'iterations': args.attack_steps,
'random_start': args.random_start,
'custom_loss': adv_criterion,
'random_restarts': random_restarts,
'use_best': bool(args.use_best)
}
iterator = tqdm(enumerate(loader), total=len(loader))
for i, (inp, target) in iterator:
# # TODO <------------------
# if i == 3:
# break
# measure data loading time
target = target.cuda(non_blocking=True)
# # TODO Nikola: Modified this
# output, final_inp = model(inp, target=target, make_adv=adv,
# **attack_kwargs)
if run_N_inference_samples:
for cnt in range(n_samples):
output_tmp, final_inp = model(inp, min_slope=args.min_slope, max_slope=args.max_slope, rnd_act=args.rnd_act)
if cnt == 0:
output = output_tmp
softmax = F.softmax((output_tmp), dim=1)
else:
output += output_tmp
softmax += F.softmax((output_tmp), dim=1)
output /= (cnt+1)
softmax /= (cnt+1)
else:
output, final = model(inp, min_slope=args.min_slope, max_slope=args.max_slope, rnd_act=args.rnd_act)
loss = train_criterion(output, target)
if len(loss.shape) > 0: loss = loss.mean()
# TODO Quick check
assert not (type(output) is tuple)
#model_logits = output[0] if (type(output) is tuple) else output
if run_N_inference_samples:
model_logits = softmax
else:
model_logits = output
# measure accuracy and record loss
top1_acc = float('nan')
top5_acc = float('nan')
try:
maxk = min(5, model_logits.shape[-1])
if has_attr(args, "custom_accuracy"):
prec1, prec5 = args.custom_accuracy(model_logits, target)
else:
prec1, prec5 = helpers.accuracy(model_logits, target, topk=(1, maxk))
prec1, prec5 = prec1[0], prec5[0]
losses.update(loss.item(), inp.size(0))
top1.update(prec1, inp.size(0))
top5.update(prec5, inp.size(0))
top1_acc = top1.avg
top5_acc = top5.avg
except:
warnings.warn('Failed to calculate the accuracy.')
reg_term = 0.0
if has_attr(args, "regularizer"):
reg_term = args.regularizer(model, inp, target)
loss = loss + reg_term
# compute gradient and do SGD step
if is_train:
opt.zero_grad()
if args.mixed_precision:
with amp.scale_loss(loss, opt) as sl:
sl.backward()
else:
loss.backward()
opt.step()
elif adv and i == 0 and writer:
# add some examples to the tensorboard
nat_grid = make_grid(inp[:15, ...])
adv_grid = make_grid(final_inp[:15, ...])
writer.add_image('Nat input', nat_grid, epoch)
writer.add_image('Adv input', adv_grid, epoch)
# ITERATOR
desc = ('{2} Epoch:{0} | Loss {loss.avg:.4f} | '
'{1}1 {top1_acc:.3f} | {1}5 {top5_acc:.3f} | '
'Reg term: {reg} ||'.format( epoch, prec, loop_msg,
loss=losses, top1_acc=top1_acc, top5_acc=top5_acc, reg=reg_term))
# USER-DEFINED HOOK
if has_attr(args, 'iteration_hook'):
args.iteration_hook(model, i, loop_type, inp, target)
iterator.set_description(desc)
iterator.refresh()
if writer is not None:
prec_type = 'adv' if adv else 'nat'
descs = ['loss', 'top1', 'top5']
vals = [losses, top1, top5]
for d, v in zip(descs, vals):
writer.add_scalar('_'.join([prec_type, loop_type, d]), v.avg,
epoch)
return top1.avg, losses.avg
| 43.252087 | 267 | 0.597576 |
d65a78f3825994539ef1c605880c27eddcca6986 | 1,289 | py | Python | app/recipe/serializers.py | Ningthem/recipe-app-api | 0fcaa49a7a4345d2e67ffc8688d1f20d40b31115 | [
"MIT"
] | null | null | null | app/recipe/serializers.py | Ningthem/recipe-app-api | 0fcaa49a7a4345d2e67ffc8688d1f20d40b31115 | [
"MIT"
] | null | null | null | app/recipe/serializers.py | Ningthem/recipe-app-api | 0fcaa49a7a4345d2e67ffc8688d1f20d40b31115 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag objects"""
class Meta:
model = Tag
fields = ("id", "name")
read_only_fields = ("id",)
class IngredientSerializer(serializers.ModelSerializer):
"""Serializer for ingredient objects"""
class Meta:
model = Ingredient
fields = (
"id",
"name",
)
read_only_fields = ("id",)
class RecipeSerializer(serializers.ModelSerializer):
"""Serialize a recipe"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True, queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True, queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = [
"id",
"title",
"ingredients",
"tags",
"time_minutes",
"price",
"link",
]
read_only_fields = ("id",)
class RecipeDetailSerializer(RecipeSerializer):
"""Serialize a recipe detail"""
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
| 23.87037 | 65 | 0.600465 |
ba7f2f01da8e1a857ff25ebb64c75c2cc5b29142 | 7,900 | py | Python | hopcolony/jobs/jobs.py | hopcolony/python-hopcolony | 9cb2a5e988abe6196b2b0c5ebd8cb7595986879d | [
"MIT"
] | null | null | null | hopcolony/jobs/jobs.py | hopcolony/python-hopcolony | 9cb2a5e988abe6196b2b0c5ebd8cb7595986879d | [
"MIT"
] | null | null | null | hopcolony/jobs/jobs.py | hopcolony/python-hopcolony | 9cb2a5e988abe6196b2b0c5ebd8cb7595986879d | [
"MIT"
] | null | null | null | import logging
import requests
import re
from itertools import cycle
from urllib.parse import urlparse
from parsel import Selector
from selenium.webdriver import Chrome
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver import ActionChains
from selenium import common
from webdriver_manager.chrome import ChromeDriverManager
import time
import json
from selenium.webdriver.support.ui import WebDriverWait as wait
from selenium.webdriver.support import expected_conditions as EC
import sys
sys.setrecursionlimit(2000)
class Job:
entrypoint = None
selenium = False
def __init__(self, *args, **kwargs):
# Set the input args as attributes to the job
self.__dict__.update(kwargs)
self.logger = logging.getLogger(self.name)
logging.basicConfig(level=logging.INFO, format='%(message)s')
def write(self, data, name=None, extension="html"):
name = name if name is not None else self.page
with open(f"{name}.{extension}", "w") as f:
data = json.dumps(data) if extension == "json" else data
f.write(data)
class Engine:
last_gets = ["a", "b", "c", "d"]
proxies = cycle(
["3.239.28.97:8080", "3.238.35.140:8080", "174.129.147.226:8080"])
headers = {
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.182 Safari/537.36"}
def __init__(self, job, pipelines):
self.job = job
self.pipelines = pipelines
assert job.entrypoint, "Please provide an entrypoint"
def do_captcha(self, driver):
try:
wait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it(
driver.find_element_by_xpath("/html/body/iframe")))
wait(driver, 10).until(EC.frame_to_be_available_and_switch_to_it(
driver.find_element_by_xpath("//*[@id='captcha-submit']/div/div/iframe")))
captcha = driver.find_element_by_xpath(
"//*[@id='recaptcha-anchor']")
ActionChains(driver).move_to_element(captcha).click().perform()
except common.exceptions.NoSuchElementException as e:
print(e)
pass
return False
def get(self, url, callback=None):
# Check you are not following the url you visited the last 3 times
if isinstance(url, str):
self.last_gets.pop(0)
self.last_gets.append(url)
assert all(
x == self.last_gets[0] for x in self.last_gets) == False, f"You're trying to follow an url you visited the last {len(self.last_gets)-1} times: {url}"
# Assign different callback if provided
parse_callback = self.job.parse
if callback:
parse_callback = callback
try:
if not self.job.selenium:
# data = requests.get(url, headers = self.headers, proxies = {"http": self.proxy})
data = requests.get(url, headers=self.headers)
response = RESTJobResponse(self, data)
else:
if isinstance(url, str):
self.browser.get(url)
else:
ActionChains(self.browser).click(url).perform()
response = SeleniumJobResponse(self)
# Check captcha
# if self.do_captcha(self.browser):
# return
except requests.exceptions.ConnectionError:
self.job.logger.error(f"Could not get a connection with \"{url}\"")
return
except common.exceptions.WebDriverException as e:
print(e)
self.browser.close()
return
# Rotate proxy
self.proxy = next(self.proxies)
try:
for item in parse_callback(response):
# Send to pipelines
for pipeline in self.pipelines:
item = pipeline.process_item(item, self.job)
except TypeError as e:
# Probably returning nothing from parse method
pass
except Exception as e:
print(e)
headless = False
options = None
profile = None
capabilities = None
# def setUpProfile(self):
# self.profile = FirefoxProfile()
# self.profile._install_extension("buster_captcha_solver_for_humans-1.1.0-an+fx.xpi", unpack=False)
# self.profile.set_preference("security.fileuri.strict_origin_policy", False)
# self.profile.update_preferences()
# def setUpOptions(self):
# self.options = FirefoxOptions()
# # self.options.headless = self.headless
# def setUpCapabilities(self):
# self.capabilities = DesiredCapabilities.FIREFOX
# self.capabilities['marionette'] = True
# def setUpProxy(self):
# self.proxy = next(self.proxies)
# self.capabilities['proxy'] = { "proxyType": "MANUAL", "httpProxy": self.proxy, "ftpProxy": self.proxy, "sslProxy": self.proxy }
def start(self):
if self.job.selenium:
# self.setUpProfile()
# self.setUpOptions()
# self.setUpCapabilities()
# self.setUpProxy()
# self.browser = Firefox(options=self.options, capabilities=self.capabilities,
# firefox_profile=self.profile, executable_path=GeckoDriverManager().install())
self.browser = Chrome(ChromeDriverManager().install())
self.get(self.job.entrypoint)
class RESTJobResponse:
def __init__(self, engine, response):
self.engine = engine
self.url = response.url
self.response = response
parsed_uri = urlparse(self.url)
self.base = f"{parsed_uri.scheme}://{parsed_uri.netloc}"
self.status = response.status_code
self.raw = response.text
self.selector = Selector(text=self.raw)
def css(self, query):
return self.selector.css(query)
def xpath(self, query):
return self.selector.xpath(query)
def get(self, url, params={}):
data = requests.get(url, params=params)
return RESTJobResponse(self, data)
def post(self, url, json={}):
data = requests.post(url, json=json)
return RESTJobResponse(self, data)
def follow(self, endpoint, callback=None):
if endpoint is None:
return
url = self.base + endpoint
if re.match(r"http.*", endpoint):
url = endpoint
self.engine.get(url, callback)
class SeleniumJobResponse:
def __init__(self, engine):
self.engine = engine
self.url = engine.browser.current_url
parsed_uri = urlparse(self.url)
self.base = f"{parsed_uri.scheme}://{parsed_uri.netloc}/"
self.raw = engine.browser.page_source
@property
def driver(self):
return self.engine.browser
def xpath(self, query):
return self.driver.find_elements_by_xpath(query)
def css(self, selector):
return self.driver.find_elements_by_css_selector(selector)
def tag(self, tag):
return self.driver.find_element_by_tag_name(tag)
def click(self, element):
ActionChains(self.driver).click(element).perform()
def click_and_call(self, element, callback=None):
self.engine.get(element, callback)
def move_to_element(self, element):
ActionChains(self.driver).move_to_element(element).perform()
def go_to_end(self):
self.driver.execute_script(
"window.scrollTo(0, document.body.scrollHeight);")
def follow(self, endpoint, callback=None):
url = self.base + endpoint
if re.match(r"http.*", endpoint):
url = endpoint
self.engine.get(url, callback)
def close(self):
self.engine.browser.close()
| 34.649123 | 165 | 0.626709 |
3b6e7a122c90aba2a4213ef60d9a9df1611d1673 | 1,411 | py | Python | corehq/apps/hqadmin/management/commands/fix_checkpoints_from_file.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 471 | 2015-01-10T02:55:01.000Z | 2022-03-29T18:07:18.000Z | corehq/apps/hqadmin/management/commands/fix_checkpoints_from_file.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 14,354 | 2015-01-01T07:38:23.000Z | 2022-03-31T20:55:14.000Z | corehq/apps/hqadmin/management/commands/fix_checkpoints_from_file.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 175 | 2015-01-06T07:16:47.000Z | 2022-03-29T13:27:01.000Z | import json
from django.core.management import BaseCommand, CommandError
from pillowtop import get_pillow_by_name
class Command(BaseCommand):
help = ("Update the pillow sequence IDs based on a passed in file")
def add_arguments(self, parser):
parser.add_argument('filename')
parser.add_argument(
'--noinput',
action='store_true',
dest='noinput',
default=False,
help="Disable interactive mode",
)
def handle(self, filename, **options):
with open(filename, encoding='utf-8') as f:
checkpoint_map = json.loads(f.read())
for pillow_name in sorted(checkpoint_map.keys()):
checkpoint_to_set = checkpoint_map[pillow_name]
pillow = get_pillow_by_name(pillow_name)
if not pillow:
raise CommandError("No pillow found with name: {}".format(pillow_name))
old_seq = pillow.get_checkpoint().wrapped_sequence
msg = "\nReset checkpoint for '{}' pillow from:\n\n{}\n\nto\n\n{}\n\n".format(
pillow_name, old_seq, checkpoint_to_set,
)
if not options['noinput'] and \
input("{} Type ['y', 'yes'] to continue.\n".format(msg)) not in ['y', 'yes']:
print('skipped')
continue
pillow.checkpoint.update_to(checkpoint_to_set)
| 35.275 | 97 | 0.593196 |
96f25b462c9fbdbf2aedd96632100126aa38bc74 | 3,316 | py | Python | Subd0mains_Finder.py | admdev8/Subd0mains_Finder | df72cfa0b888d4a96de5c262c25f9e1791847dde | [
"MIT"
] | 1 | 2020-11-23T05:11:13.000Z | 2020-11-23T05:11:13.000Z | Subd0mains_Finder.py | admdev8/Subd0mains_Finder | df72cfa0b888d4a96de5c262c25f9e1791847dde | [
"MIT"
] | null | null | null | Subd0mains_Finder.py | admdev8/Subd0mains_Finder | df72cfa0b888d4a96de5c262c25f9e1791847dde | [
"MIT"
] | 2 | 2019-12-27T09:46:26.000Z | 2020-12-25T02:07:35.000Z | #!/usr/bin/python
#Subd0mains_Finder v2.1 - by @moradorex
#Python Subdomain searcher based of virustotal.com API
import requests
import sys
import getopt
import json
#DATA
domain = ''
api = ''
version = '2.1'
#OPTIONS
recursive = False
hosts = False
output = False
def usage():
print("usage:")
print(" Subd0mains_Finder.py [options] <domain>")
print("\n options:")
print(" -t, --hosts \t\tShow in hosts file format, 127.0.0.1 <subdomain>")
print(" -o <file>, --output <file> \tSave to file")
print(" -v, --version \t\tShow version")
print(" -h, --help \t\t\tShow this screen")
print("\nPut your API key in the config.json file.")
print("You can only enter one domain at a time. Get yours API key at:")
print(" https://www.virustotal.com/gui/user/YOUR_USERNAME/apikey\n")
sys.exit(2)
def getVirusTotal():
params = {'apikey':api,'domain':domain}
headers = {'User-Agent': 'Subd0mains_Finder/2.1'}
response = requests.get('https://www.virustotal.com/vtapi/v2/domain/report', headers=headers, params=params)
if(response.status_code == 400):
print("Bad request. Your request was somehow incorrect. This can be caused by missing arguments or arguments with wrong values.")
sys.exit(2)
elif(response.status_code == 403):
print("Forbidden. You don't have enough privileges to make the request. You may be doing a request without providing an API key or you may be making a request to a Private API without having the appropriate privileges.")
sys.exit(2)
elif(response.status_code == 201):
print("Request rate limit exceeded. You are making more requests than allowed. You have exceeded one of your quotas (minute, daily or monthly). Daily quotas are reset every day at 00:00 UTC.")
sys.exit(2)
elif(response.json()['response_code']==0):
print("Domain not found")
sys.exit(2)
return response.json()
def main():
#DATA
global api, domain
#OPTIONS
global recursive, hosts, output
#GET API FROM CONFIG FILE
#READ FILE
Fconfig = open('config.json', 'r')
data=Fconfig.read()
#PARSE FILE
obj = json.loads(data)
api = str(obj['api'])
if(api=="API_KEY" or api==""):
print("\nAPI NOT FOUND")
usage()
#OPTIONS AND ARGUMENTS
try:
opts, args = getopt.getopt(sys.argv[1:], 'td:o:vh', ['hosts', 'domain=', 'output=', 'version', 'help'])
except getopt.GetoptError:
usage()
## GET OPTIONS
for opt, arg in opts:
if opt in ('-t', '--hosts'):
hosts = True
elif opt in ('-o', '--output'):
output = arg
elif opt in ('-v', '--version'):
print(version)
sys.exit(2)
elif opt in ('-h', '--help'):
usage()
else:
usage()
## GET DOMAIN
try:
domain = args[0]
except Exception:
usage()
## ERROR IF NO API
if(api == ''):
usage()
## OPEN FILE IF OUTPUT SELECTED
if(output != False):
file = open(output, 'w')
## MAKE REQUEST
req = getVirusTotal()
for subdomain in req['subdomains']:
if(hosts):
if(output != False):
file.write("127.0.0.1 " + subdomain + "\n")
print("127.0.0.1 " + subdomain)
else:
print("127.0.0.1 " + subdomain)
else:
if(output != False):
file.write(subdomain + "\n")
print(subdomain)
else:
print(subdomain)
## CLOSE FILE WHEN FINISHED
if(output != False):
print("\nSaved to file " + output)
file.close()
if __name__ == "__main__":
main()
| 23.352113 | 222 | 0.658022 |
3cfc9c54110ce20e402aa6d41deb2b6456c17531 | 1,527 | py | Python | rsconf/component/comsol.py | radiasoft/rsconf | 18ca2304661adbc79b07c2966d5fa4a47362ebcc | [
"Apache-2.0"
] | null | null | null | rsconf/component/comsol.py | radiasoft/rsconf | 18ca2304661adbc79b07c2966d5fa4a47362ebcc | [
"Apache-2.0"
] | 158 | 2018-02-12T16:04:49.000Z | 2022-03-17T20:12:35.000Z | rsconf/component/comsol.py | radiasoft/rsconf | 18ca2304661adbc79b07c2966d5fa4a47362ebcc | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
u"""manage comsol
Installing::
groupadd -g 524 lmcomsol
useradd -g lmcomsol -u 524 lmcomsol
groupadd -g 525 comsol-admin
useradd -g 525 -u 525 comsol-admin
yum install -y webkitgtk libXtst redhat-lsb-core canberra-gtk-module gnome-classic-session gnome-terminal
disable systemd listen on sunrpc all sockets
comsol listens on 0.0.0.0:tcp
On the mac, to get X11 working:
https://github.com/ControlSystemStudio/cs-studio/issues/1828
defaults write org.macosforge.xquartz.X11 enable_iglx -bool true
:copyright: Copyright (c) 2018 Bivio Software, Inc. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
from pykern import pkcollections
from rsconf import component
class T(component.T):
def internal_build(self):
from rsconf.component import db_bkp
from rsconf import systemd
self.buildt.require_component('db_bkp')
j2_ctx = self.hdb.j2_ctx_copy()
z = j2_ctx.setdefault(self.name, pkcollections.Dict(
run_u='comsol',
run_d=systemd.unit_run_d(j2_ctx, 'comsol')
))
self.install_access(mode='700', owner=z.run_u)
self.install_directory(z.run_d)
db_bkp.install_script_and_subdir(
self,
j2_ctx,
# db_bkp runs as root as comsol user doesn't have shell
run_u=j2_ctx.rsconf_db.root_u,
run_d=z.run_d,
)
| 32.489362 | 109 | 0.679764 |
73ef18a461b19b7a295806aad817ae023d9b4761 | 5,031 | py | Python | sdk/python/pulumi_azure_native/dbforpostgresql/get_server_administrator.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/dbforpostgresql/get_server_administrator.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/dbforpostgresql/get_server_administrator.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
__all__ = [
'GetServerAdministratorResult',
'AwaitableGetServerAdministratorResult',
'get_server_administrator',
]
@pulumi.output_type
class GetServerAdministratorResult:
"""
Represents a and external administrator to be created.
"""
def __init__(__self__, administrator_type=None, id=None, login=None, name=None, sid=None, tenant_id=None, type=None):
if administrator_type and not isinstance(administrator_type, str):
raise TypeError("Expected argument 'administrator_type' to be a str")
pulumi.set(__self__, "administrator_type", administrator_type)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if login and not isinstance(login, str):
raise TypeError("Expected argument 'login' to be a str")
pulumi.set(__self__, "login", login)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if sid and not isinstance(sid, str):
raise TypeError("Expected argument 'sid' to be a str")
pulumi.set(__self__, "sid", sid)
if tenant_id and not isinstance(tenant_id, str):
raise TypeError("Expected argument 'tenant_id' to be a str")
pulumi.set(__self__, "tenant_id", tenant_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="administratorType")
def administrator_type(self) -> str:
"""
The type of administrator.
"""
return pulumi.get(self, "administrator_type")
@property
@pulumi.getter
def id(self) -> str:
"""
Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def login(self) -> str:
"""
The server administrator login account name.
"""
return pulumi.get(self, "login")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def sid(self) -> str:
"""
The server administrator Sid (Secure ID).
"""
return pulumi.get(self, "sid")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The server Active Directory Administrator tenant id.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
class AwaitableGetServerAdministratorResult(GetServerAdministratorResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetServerAdministratorResult(
administrator_type=self.administrator_type,
id=self.id,
login=self.login,
name=self.name,
sid=self.sid,
tenant_id=self.tenant_id,
type=self.type)
def get_server_administrator(resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetServerAdministratorResult:
"""
Represents a and external administrator to be created.
API Version: 2017-12-01.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str server_name: The name of the server.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:dbforpostgresql:getServerAdministrator', __args__, opts=opts, typ=GetServerAdministratorResult).value
return AwaitableGetServerAdministratorResult(
administrator_type=__ret__.administrator_type,
id=__ret__.id,
login=__ret__.login,
name=__ret__.name,
sid=__ret__.sid,
tenant_id=__ret__.tenant_id,
type=__ret__.type)
| 34.696552 | 193 | 0.644007 |
c33320185d76a335f5266e03722142d42ca60a3c | 10,587 | py | Python | pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py | AJSVB/pytorch-lightning | 00211c1de3c5901789417263f14a36c846cc42d1 | [
"Apache-2.0"
] | 2 | 2022-01-24T12:40:51.000Z | 2022-01-25T02:26:32.000Z | pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py | AJSVB/pytorch-lightning | 00211c1de3c5901789417263f14a36c846cc42d1 | [
"Apache-2.0"
] | 1 | 2022-02-09T17:24:56.000Z | 2022-02-09T17:24:56.000Z | pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py | AJSVB/pytorch-lightning | 00211c1de3c5901789417263f14a36c846cc42d1 | [
"Apache-2.0"
] | 2 | 2022-02-11T08:26:13.000Z | 2022-03-21T03:48:34.000Z | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional, Tuple, Union
from typing_extensions import TypedDict
from pytorch_lightning.utilities.exceptions import MisconfigurationException
class _FxValidator:
class _LogOptions(TypedDict):
allowed_on_step: Union[Tuple[bool], Tuple[bool, bool]]
allowed_on_epoch: Union[Tuple[bool], Tuple[bool, bool]]
default_on_step: bool
default_on_epoch: bool
functions = {
"on_before_accelerator_backend_setup": None,
"on_configure_sharded_model": None,
"on_before_backward": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"backward": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_after_backward": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_before_optimizer_step": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"optimizer_step": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"lr_scheduler_step": None,
"on_before_zero_grad": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"optimizer_zero_grad": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_init_start": None,
"on_init_end": None,
"on_fit_start": None,
"on_fit_end": None,
"on_sanity_check_start": None,
"on_sanity_check_end": None,
"on_train_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_train_end": None,
"on_validation_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_validation_end": None,
"on_test_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_test_end": None,
"on_predict_start": None,
"on_predict_end": None,
"on_pretrain_routine_start": None,
"on_pretrain_routine_end": None,
"on_train_epoch_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_train_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_validation_epoch_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_validation_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_test_epoch_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_test_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_predict_epoch_start": None,
"on_predict_epoch_end": None,
"on_epoch_start": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"on_batch_start": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_batch_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_train_batch_start": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_train_batch_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"on_validation_batch_start": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"on_validation_batch_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"on_test_batch_start": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"on_test_batch_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"on_predict_batch_start": None,
"on_predict_batch_end": None,
"on_keyboard_interrupt": None,
"on_exception": None,
"on_save_checkpoint": None,
"on_load_checkpoint": None,
"setup": None,
"teardown": None,
"configure_sharded_model": None,
"training_step": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"validation_step": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"test_step": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"predict_step": None,
"training_step_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=True, default_on_epoch=False
),
"validation_step_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"test_step_end": _LogOptions(
allowed_on_step=(False, True), allowed_on_epoch=(False, True), default_on_step=False, default_on_epoch=True
),
"training_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"validation_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"test_epoch_end": _LogOptions(
allowed_on_step=(False,), allowed_on_epoch=(True,), default_on_step=False, default_on_epoch=True
),
"configure_optimizers": None,
"on_train_dataloader": None,
"train_dataloader": None,
"on_val_dataloader": None,
"val_dataloader": None,
"on_test_dataloader": None,
"test_dataloader": None,
"prepare_data": None,
"configure_callbacks": None,
"on_validation_model_eval": None,
"on_test_model_eval": None,
"on_validation_model_train": None,
"on_test_model_train": None,
}
@classmethod
def check_logging(cls, fx_name: str) -> None:
"""Check if the given hook is allowed to log."""
if fx_name not in cls.functions:
raise RuntimeError(
f"Logging inside `{fx_name}` is not implemented."
" Please, open an issue in `https://github.com/PyTorchLightning/pytorch-lightning/issues`."
)
if cls.functions[fx_name] is None:
raise MisconfigurationException(f"You can't `self.log()` inside `{fx_name}`.")
@classmethod
def get_default_logging_levels(
cls, fx_name: str, on_step: Optional[bool], on_epoch: Optional[bool]
) -> Tuple[bool, bool]:
"""Return default logging levels for given hook."""
fx_config = cls.functions[fx_name]
assert fx_config is not None
on_step = fx_config["default_on_step"] if on_step is None else on_step
on_epoch = fx_config["default_on_epoch"] if on_epoch is None else on_epoch
return on_step, on_epoch
@classmethod
def check_logging_levels(cls, fx_name: str, on_step: bool, on_epoch: bool) -> None:
"""Check if the logging levels are allowed in the given hook."""
fx_config = cls.functions[fx_name]
assert fx_config is not None
m = "You can't `self.log({}={})` inside `{}`, must be one of {}."
if on_step not in fx_config["allowed_on_step"]:
msg = m.format("on_step", on_step, fx_name, fx_config["allowed_on_step"])
raise MisconfigurationException(msg)
if on_epoch not in fx_config["allowed_on_epoch"]:
msg = m.format("on_epoch", on_epoch, fx_name, fx_config["allowed_on_epoch"])
raise MisconfigurationException(msg)
@classmethod
def check_logging_and_get_default_levels(
cls, fx_name: str, on_step: Optional[bool], on_epoch: Optional[bool]
) -> Tuple[bool, bool]:
"""Check if the given hook name is allowed to log and return logging levels."""
cls.check_logging(fx_name)
on_step, on_epoch = cls.get_default_logging_levels(fx_name, on_step, on_epoch)
cls.check_logging_levels(fx_name, on_step, on_epoch)
return on_step, on_epoch
| 47.475336 | 119 | 0.666667 |
420c0a1c91106d508fdf07cd8abf393557ff8e23 | 133 | py | Python | nose2/tests/functional/support/scenario/module_import_err/test_import_err.py | ltfish/nose2 | e47363dad10056cf906daf387613c21d74f37e56 | [
"BSD-2-Clause"
] | null | null | null | nose2/tests/functional/support/scenario/module_import_err/test_import_err.py | ltfish/nose2 | e47363dad10056cf906daf387613c21d74f37e56 | [
"BSD-2-Clause"
] | null | null | null | nose2/tests/functional/support/scenario/module_import_err/test_import_err.py | ltfish/nose2 | e47363dad10056cf906daf387613c21d74f37e56 | [
"BSD-2-Clause"
] | null | null | null | import unittest
raise ImportError("booms")
def test():
pass
class Test(unittest.TestCase):
def test(self):
pass
| 10.230769 | 30 | 0.646617 |
f324b9abbc39027d0f5d722e3a6fd1a12ba2426e | 998 | py | Python | crispy_forms/tests/runtests_bootstrap3.py | itinken/django-crispy-forms-pure | e31e39561c10b01be3693914aad1aa79350f5647 | [
"MIT"
] | null | null | null | crispy_forms/tests/runtests_bootstrap3.py | itinken/django-crispy-forms-pure | e31e39561c10b01be3693914aad1aa79350f5647 | [
"MIT"
] | null | null | null | crispy_forms/tests/runtests_bootstrap3.py | itinken/django-crispy-forms-pure | e31e39561c10b01be3693914aad1aa79350f5647 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os, sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
parent = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__))))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
from django.conf import settings
settings.CRISPY_TEMPLATE_PACK = 'bootstrap3'
def runtests():
return DjangoTestSuiteRunner(failfast=False).run_tests([
'crispy_forms.TestBasicFunctionalityTags',
'crispy_forms.TestFormHelper',
'crispy_forms.TestBootstrapFormHelper',
'crispy_forms.TestBootstrap3FormHelper',
'crispy_forms.TestFormLayout',
'crispy_forms.TestBootstrapFormLayout',
'crispy_forms.TestBootstrap3FormLayout',
'crispy_forms.TestLayoutObjects',
'crispy_forms.TestBootstrapLayoutObjects',
'crispy_forms.TestDynamicLayouts',
], verbosity=1, interactive=True)
if __name__ == '__main__':
if runtests():
sys.exit(1)
| 28.514286 | 60 | 0.720441 |
0a59b50e171c5eefcfdb44bebdeda4464eb0401d | 5,397 | py | Python | docs/mcpi/algorytmy/mcpi-rbrowna.py | damiankarol7/python101 | 1978a9402a8fb0f20c4ca7bd542cb8d7d4501b9b | [
"MIT"
] | 44 | 2015-02-11T19:10:37.000Z | 2021-11-11T09:45:43.000Z | docs/mcpi/algorytmy/mcpi-rbrowna.py | damiankarol7/python101 | 1978a9402a8fb0f20c4ca7bd542cb8d7d4501b9b | [
"MIT"
] | 9 | 2015-02-06T21:26:25.000Z | 2022-03-31T10:44:22.000Z | docs/mcpi/algorytmy/mcpi-rbrowna.py | damiankarol7/python101 | 1978a9402a8fb0f20c4ca7bd542cb8d7d4501b9b | [
"MIT"
] | 172 | 2015-06-13T07:16:24.000Z | 2022-03-30T20:41:11.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import numpy as np # import biblioteki do obliczeń naukowych
import matplotlib.pyplot as plt # import biblioteki do tworzenia wykresów
from random import randint
from time import sleep
import mcpi.minecraft as minecraft # import modułu minecraft
import mcpi.block as block # import modułu block
os.environ["USERNAME"] = "Steve" # wpisz dowolną nazwę użytkownika
os.environ["COMPUTERNAME"] = "mykomp" # wpisz dowolną nazwę komputera
mc = minecraft.Minecraft.create("192.168.1.10") # połączenie z serwerem
def plac(x, y, z, roz=10, gracz=False):
"""
Funkcja tworzy podłoże i wypełnia sześcienny obszar od podanej pozycji,
opcjonalnie umieszcza gracza w środku.
Parametry: x, y, z - współrzędne pozycji początkowej,
roz - rozmiar wypełnianej przestrzeni,
gracz - czy umieścić gracza w środku
Wymaga: globalnych obiektów mc i block.
"""
podloga = block.SAND
wypelniacz = block.AIR
# podloga i czyszczenie
mc.setBlocks(x, y - 1, z, x + roz, y - 1, z + roz, podloga)
mc.setBlocks(x, y, z, x + roz, y + roz, z + roz, wypelniacz)
# umieść gracza w środku
if gracz:
mc.player.setPos(x + roz / 2, y + roz / 2, z + roz / 2)
def wykres(x, y, tytul="Wykres funkcji", *extra):
"""
Funkcja wizualizuje wykres funkcji, której argumenty zawiera lista x
a wartości lista y i ew. dodatkowe listy w parametrze *extra
"""
if len(extra):
plt.plot(x, y, extra[0], extra[1]) # dwa wykresy na raz
else:
plt.plot(x, y, "o:", color="blue", linewidth="3", alpha=0.8)
plt.title(tytul)
plt.grid(True)
plt.show()
def rysuj(x, y, z, blok=block.IRON_BLOCK):
"""
Funkcja wizualizuje wykres funkcji, umieszczając bloki w pionie/poziomie
w punktach wyznaczonych przez pary elementów list x, y lub x, z
"""
czylista = True if len(y) > 1 else False
for i in range(len(x)):
if czylista:
print(x[i], y[i])
mc.setBlock(x[i], y[i], z[0], blok)
else:
print(x[i], z[i])
mc.setBlock(x[i], y[0], z[i], blok)
def rysuj_linie(x, y, z, blok=block.IRON_BLOCK):
"""
Funkcja wizualizuje wykres funkcji, umieszczając bloki w pionie/poziomie
w punktach wyznaczonych przez pary elementów list x, y lub x, z
przy użyciu metody drawLine()
"""
import local.minecraftstuff as mcstuff
mcfig = mcstuff.MinecraftDrawing(mc)
czylista = True if len(y) > 1 else False
for i in range(len(x) - 1):
x1 = int(x[i])
x2 = int(x[i + 1])
if czylista:
y1 = int(y[i])
y2 = int(y[i + 1])
mc.setBlock(x2, y2, z[0], block.GRASS)
mc.setBlock(x1, y1, z[0], block.GRASS)
mcfig.drawLine(x1, y1, z[0], x2, y2, z[0], blok)
mc.setBlock(x2, y2, z[0], block.GRASS)
mc.setBlock(x1, y1, z[0], block.GRASS)
print (x1, y1, z[0], x2, y2, z[0])
else:
z1 = int(z[i])
z2 = int(z[i + 1])
mc.setBlock(x1, y[0], z1, block.GRASS)
mc.setBlock(x2, y[0], z2, block.GRASS)
mcfig.drawLine(x1, y[0], z1, x2, y[0], z2, blok)
mc.setBlock(x1, y[0], z1, block.GRASS)
mc.setBlock(x2, y[0], z2, block.GRASS)
print (x1, y[0], z1, x2, y[0], z2)
sleep(1) # przerwa na reklamę :-)
mc.setBlock(0, 1, 0, block.OBSIDIAN)
if czylista:
mc.setBlock(x2, y2, z[0], block.OBSIDIAN)
else:
mc.setBlock(x2, y[0], z2, block.OBSIDIAN)
def ruchyBrowna(dane=[]):
if len(dane):
lx, ly = dane # rozpakowanie listy
x = lx[-1] # ostatni element lx
y = ly[-1] # ostatni element ly
else:
n = int(raw_input("Ile ruchów? "))
r = int(raw_input("Krok przesunięcia? "))
x = y = 0
lx = [0] # lista odciętych
ly = [0] # lista rzędnych
for i in range(0, n):
# losujemy kąt i zamieniamy na radiany
rad = float(randint(0, 360)) * np.pi / 180
x = x + r * np.cos(rad) # wylicz współrzędną x
y = y + r * np.sin(rad) # wylicz współrzędną y
x = int(round(x, 2)) # zaokrągl
y = int(round(y, 2)) # zaokrągl
print(x, y)
lx.append(x)
ly.append(y)
# oblicz wektor końcowego przesunięcia
s = np.fabs(np.sqrt(x**2 + y**2))
print "Wektor przesunięcia: {:.2f}".format(s)
wykres(lx, ly, "Ruchy Browna")
rysuj_linie(lx, [1], ly, block.WOOL)
if not len(dane):
zapisz_dane((lx, ly))
def zapisz_dane(dane):
"""Funkcja zapisuje dane w formacie json w pliku"""
import json
plik = open('rbrowna.log', 'w')
json.dump(dane, plik)
plik.close()
def czytaj_dane():
"""Funkcja odczytuje dane w formacie json z pliku"""
import json
dane = []
nazwapliku = raw_input("Podaj nazwę pliku z danymi lub naciśnij ENTER: ")
if os.path.isfile(nazwapliku):
with open(nazwapliku, "r") as plik:
dane = json.load(plik)
else:
print "Podany plik nie istnieje!"
return dane
def main():
mc.postToChat("Ruchy Browna") # wysłanie komunikatu do mc
plac(-80, -20, -80, 160)
plac(-80, 0, -80, 160)
ruchyBrowna(czytaj_dane())
return 0
if __name__ == '__main__':
main()
| 31.377907 | 77 | 0.580878 |
0fe54d2ea3974f4c9cd41a8790103d46c04f6f7b | 268 | py | Python | Modelling/Embedding/LLE.py | fraunhofer-iais/IAIS-Python-Snippets | a3ee610d6270cda2c891688851696c34831ffa2b | [
"MIT"
] | null | null | null | Modelling/Embedding/LLE.py | fraunhofer-iais/IAIS-Python-Snippets | a3ee610d6270cda2c891688851696c34831ffa2b | [
"MIT"
] | null | null | null | Modelling/Embedding/LLE.py | fraunhofer-iais/IAIS-Python-Snippets | a3ee610d6270cda2c891688851696c34831ffa2b | [
"MIT"
] | null | null | null | # Calculate the Locally Linear Embedding
# NOTE THAT YOUR DATA NEEDS TO BE NORMALIZED
from sklearn.manifold import LocallyLinearEmbedding
# Calculate LLE embedding
lle = LocallyLinearEmbedding(n_neighbors=20, n_components=2)
x, y = np.array(lle.fit_transform(data)).T | 38.285714 | 60 | 0.813433 |
7a7d84dff3f7fdd32bc43c865e1f30e0b577dfda | 11 | py | Python | example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Rounding and clipping/trunc Truncated value of the input, element-wise.py | kuanpern/jupyterlab-snippets-multimenus | 477f51cfdbad7409eab45abe53cf774cd70f380c | [
"BSD-3-Clause"
] | null | null | null | example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Rounding and clipping/trunc Truncated value of the input, element-wise.py | kuanpern/jupyterlab-snippets-multimenus | 477f51cfdbad7409eab45abe53cf774cd70f380c | [
"BSD-3-Clause"
] | null | null | null | example_snippets/multimenus_snippets/Snippets/NumPy/Vectorized (universal) functions/Rounding and clipping/trunc Truncated value of the input, element-wise.py | kuanpern/jupyterlab-snippets-multimenus | 477f51cfdbad7409eab45abe53cf774cd70f380c | [
"BSD-3-Clause"
] | 1 | 2021-02-04T04:51:48.000Z | 2021-02-04T04:51:48.000Z | np.trunc(a) | 11 | 11 | 0.727273 |
e479e7364f36311eef255e18634633cf24c4f32f | 2,214 | py | Python | tests/planar_tests/test_segments_cross_or_overlap.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 13 | 2020-04-03T04:43:44.000Z | 2022-01-18T10:40:40.000Z | tests/planar_tests/test_segments_cross_or_overlap.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 19 | 2020-01-31T05:25:42.000Z | 2021-04-01T13:20:05.000Z | tests/planar_tests/test_segments_cross_or_overlap.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 3 | 2020-06-08T11:15:32.000Z | 2021-02-15T12:37:01.000Z | from typing import List
import pytest
from ground.base import (Context,
Relation)
from ground.hints import Segment
from hypothesis import given
from bentley_ottmann.planar import segments_cross_or_overlap
from tests.utils import (reverse_segment,
reverse_segments_coordinates)
from . import strategies
@given(strategies.segments_lists)
def test_basic(segments: List[Segment]) -> None:
result = segments_cross_or_overlap(segments)
assert isinstance(result, bool)
@given(strategies.empty_segments_lists)
def test_base_case(segments: List[Segment]) -> None:
result = segments_cross_or_overlap(segments)
assert not result
@given(strategies.non_empty_segments_lists)
def test_step(context: Context, segments: List[Segment]) -> None:
first_segment, *rest_segments = segments
result = segments_cross_or_overlap(rest_segments)
next_result = segments_cross_or_overlap(segments)
assert (next_result
is (result
or any(context.segments_relation(first_segment, segment)
in (Relation.COMPONENT, Relation.COMPOSITE,
Relation.CROSS, Relation.EQUAL, Relation.OVERLAP)
for segment in rest_segments)))
@given(strategies.segments_lists)
def test_reversed(segments: List[Segment]) -> None:
result = segments_cross_or_overlap(segments)
assert result is segments_cross_or_overlap(segments[::-1])
@given(strategies.segments_lists)
def test_reversed_endpoints(segments: List[Segment]) -> None:
result = segments_cross_or_overlap(segments)
assert result is segments_cross_or_overlap([reverse_segment(segment)
for segment in segments])
@given(strategies.segments_lists)
def test_reversed_coordinates(segments: List[Segment]) -> None:
result = segments_cross_or_overlap(segments)
assert result is segments_cross_or_overlap(reverse_segments_coordinates(
segments))
@given(strategies.degenerate_segments_lists)
def test_degenerate_segments(segments: List[Segment]) -> None:
with pytest.raises(ValueError):
segments_cross_or_overlap(segments)
| 31.183099 | 76 | 0.722674 |
e37c28d5507f490b24b64ffcf51c39bd724f0190 | 3,802 | py | Python | comodit_client/api/hostGroup.py | AymericDuvivier/comodit-client | cd92b43240181ab7178545e48ca854ee6bc86bfc | [
"MIT"
] | 1 | 2015-01-20T17:24:34.000Z | 2015-01-20T17:24:34.000Z | comodit_client/api/hostGroup.py | AymericDuvivier/comodit-client | cd92b43240181ab7178545e48ca854ee6bc86bfc | [
"MIT"
] | null | null | null | comodit_client/api/hostGroup.py | AymericDuvivier/comodit-client | cd92b43240181ab7178545e48ca854ee6bc86bfc | [
"MIT"
] | 24 | 2016-09-07T15:28:00.000Z | 2021-12-08T16:03:16.000Z | # coding: utf-8
"""
Provides the classes related to hostgroup entity: L{HostGroup}
and L{HostGroupCollection}.
"""
from __future__ import print_function
from __future__ import absolute_import
from .collection import Collection
from comodit_client.api.entity import Entity
from comodit_client.util.json_wrapper import JsonWrapper
from comodit_client.api.settings import HasSettings
class HostGroupCollection(Collection):
"""
Collection of host group. A host group collection is owned by an organization
L{Organization}.
"""
def _new(self, json_data = None):
return HostGroup(self, json_data)
def new(self, json_data):
"""
Instantiates a new host group object.
@rtype: L{HostGroup}
"""
hostGroup = self._new(json_data)
return hostGroup
def create(self):
"""
Creates a remote hostGroup entity and returns associated local
object.
@rtype: L{HostGroup}
"""
hostGroup = self.new()
hostGroup.create()
return hostGroup
class HostGroup(HasSettings):
"""
HostGroup entity representation. A host group is a group of host.
"""
@property
def organization(self):
"""
The name of the organization owning this orchestration.
@rtype: string
"""
return self._get_field("organization")
@property
def name(self):
"""
The name of the hostGroup
@rtype: string
"""
return self._get_field("name")
@property
def description(self):
"""
The description of the hostGroup
@rtype: string
"""
return self._get_field("description")
@property
def ordered_host(self):
"""
List of host's in group
@rtype: list of ordered hosts L{HostQueue}
"""
return self._get_list_field("hosts", lambda x: OrderedHost(x))
def _show(self, indent = 0):
print(" "*indent, "Name:", self.name)
print(" "*indent, "Description:", self.description)
print(" "*indent, "Hosts:")
#sort by position
self.ordered_host.sort(key=lambda x: x.position)
for h in self.ordered_host:
h._show(indent + 2)
class OrderedHostGroup(JsonWrapper):
@property
def position(self):
"""
the position of hostgroups
@rtype: string
"""
return self._get_field("position")
@property
def hostgroup(self):
"""
Hostgroups is a container of ordered hosts
@rtype: string
"""
return HostGroup(None, self._get_field("hostGroup"));
def _show(self, indent = 0):
print(" "*indent, "Position:", self.position)
self.hostgroup._show(indent+2)
class OrderedHost(JsonWrapper):
@property
def organization(self):
"""
The organization name
@rtype: string
"""
return self._get_field("organization")
@property
def environment(self):
"""
The environment name
@rtype: string
"""
return self._get_field("environment")
@property
def host(self):
"""
The host name
@rtype: string
"""
return self._get_field("host")
@property
def canonical_name(self):
"""
the identifier name of host
@rtype: string
"""
return self._get_field("canonicalName")
@property
def position(self):
"""
the identifier name of host
@rtype: string
"""
return self._get_field("position")
def _show(self, indent = 0):
print(" "*indent, "Position:", self.position)
print(" "*indent, "Name:", self.canonical_name)
| 21.850575 | 81 | 0.587849 |
053b8d7873790a737286d2ce75346e22e456a7ab | 12,697 | py | Python | Lib/fontbakery/profiles/os2.py | paullinnerud/fontbakery | 666b3425b14f6c59a43cddf30279ca2fdc6e714e | [
"Apache-2.0"
] | null | null | null | Lib/fontbakery/profiles/os2.py | paullinnerud/fontbakery | 666b3425b14f6c59a43cddf30279ca2fdc6e714e | [
"Apache-2.0"
] | null | null | null | Lib/fontbakery/profiles/os2.py | paullinnerud/fontbakery | 666b3425b14f6c59a43cddf30279ca2fdc6e714e | [
"Apache-2.0"
] | null | null | null | from fontbakery.callable import check
from fontbakery.status import FAIL, PASS, WARN, INFO
from fontbakery.message import Message
# used to inform get_module_profile whether and how to create a profile
from fontbakery.fonts_profile import profile_factory # NOQA pylint: disable=unused-import
profile_imports = [
('.shared_conditions', ('vmetrics', )),
('.googlefonts_conditions', ('RIBBI_ttFonts', ))
]
@check(
id = 'com.google.fonts/check/family/panose_proportion',
proposal = 'legacy:check/009'
)
def com_google_fonts_check_family_panose_proportion(ttFonts):
"""Fonts have consistent PANOSE proportion?"""
passed = True
proportion = None
missing = False
for ttFont in ttFonts:
if "OS/2" not in ttFont:
missing = True
passed = False
continue
if proportion is None:
proportion = ttFont['OS/2'].panose.bProportion
if proportion != ttFont['OS/2'].panose.bProportion:
passed = False
if missing:
yield FAIL,\
Message("lacks-OS/2",
"One or more fonts lack the required OS/2 table.")
if not passed:
yield FAIL,\
Message("inconsistency",
"PANOSE proportion is not the same across this family."
" In order to fix this, please make sure that"
" the panose.bProportion value is the same"
" in the OS/2 table of all of this family font files.")
else:
yield PASS, "Fonts have consistent PANOSE proportion."
@check(
id = 'com.google.fonts/check/family/panose_familytype',
proposal = 'legacy:check/010'
)
def com_google_fonts_check_family_panose_familytype(ttFonts):
"""Fonts have consistent PANOSE family type?"""
passed = True
familytype = None
missing = False
for ttfont in ttFonts:
if "OS/2" not in ttfont:
passed = False
missing = True
continue
if familytype is None:
familytype = ttfont['OS/2'].panose.bFamilyType
if familytype != ttfont['OS/2'].panose.bFamilyType:
passed = False
if missing:
yield FAIL,\
Message("lacks-OS/2",
"One or more fonts lack the required OS/2 table.")
if not passed:
yield FAIL,\
Message("inconsistency",
"PANOSE family type is not the same across this family."
" In order to fix this, please make sure that"
" the panose.bFamilyType value is the same"
" in the OS/2 table of all of this family font files.")
else:
yield PASS, "Fonts have consistent PANOSE family type."
@check(
id = 'com.google.fonts/check/xavgcharwidth',
conditions = ['is_ttf'],
proposal = 'legacy:check/034'
)
def com_google_fonts_check_xavgcharwidth(ttFont):
"""Check if OS/2 xAvgCharWidth is correct."""
if "OS/2" not in ttFont:
yield FAIL,\
Message("lacks-OS/2",
"Required OS/2 table is missing.")
return
current_value = ttFont['OS/2'].xAvgCharWidth
ACCEPTABLE_ERROR = 10 # Width deviation tolerance in font units
# Since version 3, the average is computed using _all_ glyphs in a font.
if ttFont['OS/2'].version >= 3:
calculation_rule = "the average of the widths of all glyphs in the font"
if not ttFont['hmtx'].metrics: # May contain just '.notdef', which is valid.
yield FAIL,\
Message("missing-glyphs",
"CRITICAL: Found no glyph width data in the hmtx table!")
return
width_sum = 0
count = 0
for glyph_id in ttFont['glyf'].glyphs: # At least .notdef must be present.
width = ttFont['hmtx'].metrics[glyph_id][0]
# The OpenType spec doesn't exclude negative widths, but only positive
# widths seems to be the assumption in the wild?
if width > 0:
count += 1
width_sum += width
expected_value = int(round(width_sum / count))
else: # Version 2 and below only consider lowercase latin glyphs and space.
calculation_rule = ("the weighted average of the widths of the latin"
" lowercase glyphs in the font")
weightFactors = {
'a': 64,
'b': 14,
'c': 27,
'd': 35,
'e': 100,
'f': 20,
'g': 14,
'h': 42,
'i': 63,
'j': 3,
'k': 6,
'l': 35,
'm': 20,
'n': 56,
'o': 56,
'p': 17,
'q': 4,
'r': 49,
's': 56,
't': 71,
'u': 31,
'v': 10,
'w': 18,
'x': 3,
'y': 18,
'z': 2,
'space': 166
}
glyph_order = ttFont.getGlyphOrder()
if not all(character in glyph_order for character in weightFactors):
yield FAIL,\
Message("missing-glyphs",
"Font is missing the required"
" latin lowercase letters and/or space.")
return
width_sum = 0
for glyph_id in weightFactors:
width = ttFont['hmtx'].metrics[glyph_id][0]
width_sum += (width * weightFactors[glyph_id])
expected_value = int(width_sum / 1000.0 + 0.5) # round to closest int
difference = abs(current_value - expected_value)
# We accept matches and off-by-ones due to rounding as correct.
if current_value == expected_value or difference == 1:
yield PASS, "OS/2 xAvgCharWidth value is correct."
elif difference < ACCEPTABLE_ERROR:
yield INFO, \
Message("xAvgCharWidth-close",
f"OS/2 xAvgCharWidth is {current_value} but it should be"
f" {expected_value} which corresponds to {calculation_rule}."
f" These are similar values, which"
f" may be a symptom of the slightly different"
f" calculation of the xAvgCharWidth value in"
f" font editors. There's further discussion on"
f" this at https://github.com/googlefonts/fontbakery"
f"/issues/1622")
else:
yield WARN, \
Message("xAvgCharWidth-wrong",
f"OS/2 xAvgCharWidth is {current_value} but it should be"
f" {expected_value} which corresponds to {calculation_rule}.")
@check(
id = 'com.adobe.fonts/check/fsselection_matches_macstyle',
rationale = """
The bold and italic bits in OS/2.fsSelection must match the bold and italic
bits in head.macStyle per the OpenType spec.
""",
proposal = 'https://github.com/googlefonts/fontbakery/pull/2382'
)
def com_adobe_fonts_check_fsselection_matches_macstyle(ttFont):
"""Check if OS/2 fsSelection matches head macStyle bold and italic bits."""
# Check both OS/2 and head are present.
missing_tables = False
required = ["OS/2", "head"]
for key in required:
if key not in ttFont:
missing_tables = True
yield FAIL,\
Message(f'lacks-{key}',
f"The '{key}' table is missing.")
if missing_tables:
return
from fontbakery.constants import FsSelection, MacStyle
failed = False
head_bold = (ttFont['head'].macStyle & MacStyle.BOLD) != 0
os2_bold = (ttFont['OS/2'].fsSelection & FsSelection.BOLD) != 0
if head_bold != os2_bold:
failed = True
yield FAIL, \
Message("fsselection-macstyle-bold",
"The OS/2.fsSelection and head.macStyle " \
"bold settings do not match.")
head_italic = (ttFont['head'].macStyle & MacStyle.ITALIC) != 0
os2_italic = (ttFont['OS/2'].fsSelection & FsSelection.ITALIC) != 0
if head_italic != os2_italic:
failed = True
yield FAIL, \
Message("fsselection-macstyle-italic",
"The OS/2.fsSelection and head.macStyle " \
"italic settings do not match.")
if not failed:
yield PASS, ("The OS/2.fsSelection and head.macStyle "
"bold and italic settings match.")
@check(
id = 'com.adobe.fonts/check/family/bold_italic_unique_for_nameid1',
conditions=['RIBBI_ttFonts'],
rationale = """
Per the OpenType spec: name ID 1 'is used in combination with Font Subfamily
name (name ID 2), and should be shared among at most four fonts that differ
only in weight or style.
This four-way distinction should also be reflected in the OS/2.fsSelection
field, using bits 0 and 5.
""",
proposal = 'https://github.com/googlefonts/fontbakery/pull/2388'
)
def com_adobe_fonts_check_family_bold_italic_unique_for_nameid1(RIBBI_ttFonts):
"""Check that OS/2.fsSelection bold & italic settings are unique
for each NameID1"""
from collections import Counter
from fontbakery.utils import get_name_entry_strings
from fontbakery.constants import NameID, FsSelection
failed = False
family_name_and_bold_italic = list()
for ttFont in RIBBI_ttFonts:
names_list = get_name_entry_strings(ttFont, NameID.FONT_FAMILY_NAME)
# names_list will likely contain multiple entries, e.g. multiple copies
# of the same name in the same language for different platforms, but
# also different names in different languages, we use set() below
# to remove the duplicates and only store the unique family name(s)
# used for a given font
names_set = set(names_list)
bold = (ttFont['OS/2'].fsSelection & FsSelection.BOLD) != 0
italic = (ttFont['OS/2'].fsSelection & FsSelection.ITALIC) != 0
bold_italic = 'Bold=%r, Italic=%r' % (bold, italic)
for name in names_set:
family_name_and_bold_italic.append((name, bold_italic,))
counter = Counter(family_name_and_bold_italic)
for (family_name, bold_italic), count in counter.items():
if count > 1:
failed = True
yield FAIL, \
Message("unique-fsselection",
f"Family '{family_name}' has {count} fonts"
f" (should be no more than 1) with the"
f" same OS/2.fsSelection bold & italic settings:"
f" {bold_italic}")
if not failed:
yield PASS, ("The OS/2.fsSelection bold & italic settings were unique "
"within each compatible family group.")
@check(
id = 'com.google.fonts/check/code_pages',
rationale = """
At least some programs (such as Word and Sublime Text) under Windows 7
do not recognize fonts unless code page bits are properly set on the
ulCodePageRange1 (and/or ulCodePageRange2) fields of the OS/2 table.
More specifically, the fonts are selectable in the font menu, but whichever
Windows API these applications use considers them unsuitable for any
character set, so anything set in these fonts is rendered with Arial as a
fallback font.
This check currently does not identify which code pages should be set.
Auto-detecting coverage is not trivial since the OpenType specification
leaves the interpretation of whether a given code page is "functional"
or not open to the font developer to decide.
So here we simply detect as a FAIL when a given font has no code page
declared at all.
""",
proposal = 'https://github.com/googlefonts/fontbakery/issues/2474'
)
def com_google_fonts_check_code_pages(ttFont):
"""Check code page character ranges"""
if "OS/2" not in ttFont:
yield FAIL,\
Message("lacks-OS/2",
"The required OS/2 table is missing.")
return
if not hasattr(ttFont['OS/2'], "ulCodePageRange1") or \
not hasattr(ttFont['OS/2'], "ulCodePageRange2") or \
(ttFont['OS/2'].ulCodePageRange1 == 0 and \
ttFont['OS/2'].ulCodePageRange2 == 0):
yield FAIL, \
Message("no-code-pages",
"No code pages defined in the OS/2 table"
" ulCodePageRange1 and CodePageRange2 fields.")
else:
yield PASS, "At least one code page is defined."
| 38.243976 | 89 | 0.588092 |
14186d39a2e896dcbf7694d98d1c457a3725daad | 1,916 | py | Python | app/web/app.py | liephat/ai-image-eye | 583d099e9b46c222597cab3f77a87ce4dd1d7698 | [
"MIT"
] | 1 | 2021-07-17T13:12:10.000Z | 2021-07-17T13:12:10.000Z | app/web/app.py | liephat/ai-image-eye | 583d099e9b46c222597cab3f77a87ce4dd1d7698 | [
"MIT"
] | 28 | 2020-12-11T21:10:05.000Z | 2021-08-05T21:00:13.000Z | app/web/app.py | liephat/ai-image-eye | 583d099e9b46c222597cab3f77a87ce4dd1d7698 | [
"MIT"
] | 1 | 2020-12-10T21:11:01.000Z | 2020-12-10T21:11:01.000Z | import logging
import os
from typing import Type, List, Optional
from flask import Flask, render_template
from flask.helpers import get_debug_flag
from flask_cors import CORS
from app.web.api import RestApi
from app.data.ops import ImageDataHandler
from app.web import EndpointBase
from app.web.routes.images import ImagesEndpoints
from app.web.util.filters import init_filters
logger = logging.getLogger(__name__)
class AppWrapper:
APP_NAME = 'flask-image-gallery'
@classmethod
def get_asset_folder(cls, subfolder):
return os.path.join(os.path.dirname(__file__), subfolder)
@classmethod
def get_endpoint_classes(cls) -> List[Type[EndpointBase]]:
return [ImagesEndpoints, RestApi]
def __init__(self):
self.app: Optional[Flask] = None
self.debug = get_debug_flag()
def init_flask_app(self):
assert self.app is None
self.app = Flask(self.APP_NAME,
template_folder=self.get_asset_folder('templates'),
static_folder=self.get_asset_folder('static'))
self.app.config['EXPLAIN_TEMPLATE_LOADING'] = False
self.__enable_cors()
init_filters(self.app)
self._init_endpoints()
logger.info('Starting up ... welcome to flask-image-gallery')
logger.debug(f'\n{self.app.url_map}')
return self.app
def __enable_cors(self):
if not self.debug:
# only enable CORS in a debug environment where the react-app
# runs from a different host
return
CORS(self.app)
self.app.config['CORS_HEADERS'] = 'Content-Type'
def _init_endpoints(self):
@self.app.route('/')
def index():
return render_template('index.html', images=ImageDataHandler.all_images())
for endpoint_class in self.get_endpoint_classes():
endpoint_class.init(self.app)
| 29.9375 | 86 | 0.670668 |
04dbc74b53159b843f4beb717f92bf151c023278 | 95 | py | Python | c15/p295_test158.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | c15/p295_test158.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | c15/p295_test158.py | pkingpeng/-python- | f7c3269b6c13edf31449a3f21c3314c27095c984 | [
"Apache-2.0"
] | null | null | null | import subprocess
# 打开系统计算器
subprocess.Popen(['open', '/System/Applications/Calculator.app'])
| 19 | 65 | 0.768421 |
e57726bb217c1002ee4b817fb44a594c056363f7 | 426 | py | Python | project/app/models/base.py | rvaccari/sbgo | 8739d90842a5ecd9362e67647eb51f2bce38493d | [
"MIT"
] | null | null | null | project/app/models/base.py | rvaccari/sbgo | 8739d90842a5ecd9362e67647eb51f2bce38493d | [
"MIT"
] | null | null | null | project/app/models/base.py | rvaccari/sbgo | 8739d90842a5ecd9362e67647eb51f2bce38493d | [
"MIT"
] | null | null | null | from tortoise import models, fields
class BaseModel(models.Model):
"""Default tortoise model."""
id = fields.UUIDField(pk=True)
created_at = fields.DatetimeField(auto_now_add=True)
updated_at = fields.DatetimeField(auto_now=True)
class Meta:
"""Base model meta"""
abstract = True
ordering = [
"-created_at",
]
def __str__(self):
return self.id
| 20.285714 | 56 | 0.615023 |
cc8d5f115be468eb22a06c5acc94ce0cfb3ad6c3 | 757 | py | Python | mysite/forms.py | DishenMakwana/Sports-Academy-Info | c36e7ccd643869a72308c8af32d2611ca0484dbb | [
"Apache-2.0"
] | 3 | 2020-10-05T05:02:42.000Z | 2020-10-27T16:38:24.000Z | todolist/forms.py | DishenMakwana/TodoApp | c1e44dc37d317f3f81ecec359fc84757141ee67e | [
"MIT"
] | 2 | 2020-10-06T03:39:41.000Z | 2020-10-08T05:39:30.000Z | todolist/forms.py | DishenMakwana/TodoApp | c1e44dc37d317f3f81ecec359fc84757141ee67e | [
"MIT"
] | 6 | 2020-10-05T04:50:18.000Z | 2021-02-16T12:34:04.000Z | from django.contrib.auth.forms import UserCreationForm
from django import forms
from django.contrib.auth.models import User
class RegisterForm(UserCreationForm):
email = forms.EmailField(label="Email")
first_name = forms.CharField(label="First name")
last_name = forms.CharField(label="Last name")
class Meta:
model = User
fields = ("username", "email", "first_name", "last_name")
def save(self, commit=True):
user = super(RegisterForm, self).save(commit=False)
user.first_name = self.cleaned_data["first_name"]
user.last_name = self.cleaned_data["last_name"]
user.email = self.cleaned_data["email"]
if commit:
user.save()
return user
| 32.913043 | 66 | 0.652576 |
2f9249a712ec67697adcfcc3d61e8733af4e13f5 | 54,885 | py | Python | civictechprojects/views.py | modelearth/CivicTechExchange | d5ef098e3f6894a7a77987caf3123b7acf811570 | [
"MIT"
] | null | null | null | civictechprojects/views.py | modelearth/CivicTechExchange | d5ef098e3f6894a7a77987caf3123b7acf811570 | [
"MIT"
] | null | null | null | civictechprojects/views.py | modelearth/CivicTechExchange | d5ef098e3f6894a7a77987caf3123b7acf811570 | [
"MIT"
] | null | null | null | import re
from django.shortcuts import redirect
from django.http import HttpResponse, HttpResponseForbidden, JsonResponse
from django.core.exceptions import PermissionDenied
from django.core.paginator import Paginator
from django.conf import settings
from django.contrib import messages
from django.contrib.gis.geos import Point
from django.contrib.gis.measure import D
from django.template import loader
from django.utils import timezone
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import ensure_csrf_cookie
from time import time
from urllib import parse as urlparse
import simplejson as json
from django.views.decorators.csrf import csrf_exempt
from django.db.models import Q
from .models import FileCategory, Project, ProjectFile, ProjectPosition, UserAlert, VolunteerRelation, Group, Event, \
ProjectRelationship, Testimonial, ProjectFavorite
from .sitemaps import SitemapPages
from .caching.cache import ProjectSearchTagsCache
from common.caching.cache import Cache
from common.helpers.collections import flatten, count_occurrences
from common.helpers.db import unique_column_values
from common.helpers.s3 import presign_s3_upload, user_has_permission_for_s3_file, delete_s3_file
from common.helpers.tags import get_tags_by_category,get_tag_dictionary
from common.helpers.form_helpers import is_co_owner_or_staff, is_co_owner, is_co_owner_or_owner, is_creator_or_staff, is_creator
from .forms import ProjectCreationForm, EventCreationForm, GroupCreationForm
from common.helpers.qiqo_chat import get_user_qiqo_iframe
from democracylab.models import Contributor, get_request_contributor
from common.models.tags import Tag
from common.helpers.constants import FrontEndSection, TagCategory
from democracylab.emails import send_to_project_owners, send_to_project_volunteer, HtmlEmailTemplate, send_volunteer_application_email, \
send_volunteer_conclude_email, notify_project_owners_volunteer_renewed_email, notify_project_owners_volunteer_concluded_email, \
notify_project_owners_project_approved, contact_democracylab_email, send_to_group_owners, send_group_project_invitation_email, \
notify_group_owners_group_approved, notify_event_owners_event_approved
from civictechprojects.helpers.context_preload import context_preload
from civictechprojects.helpers.projects.annotations import apply_project_annotations
from common.helpers.front_end import section_url, get_page_section, get_clean_url, redirect_from_deprecated_url
from common.helpers.redirectors import redirect_by, InvalidArgumentsRedirector, DirtyUrlsRedirector, DeprecatedUrlsRedirector
from common.helpers.user_helpers import get_my_projects, get_my_groups, get_my_events, get_user_context
from django.views.decorators.cache import cache_page
from rest_framework.decorators import api_view, throttle_classes
from rest_framework.throttling import AnonRateThrottle, UserRateThrottle
import requests
def get_tag_counts(category=None, event=None, group=None):
queryset = get_tags_by_category(category) if category is not None else Tag.objects.all()
activetagdict = ProjectSearchTagsCache.get(event=event, group=group)
querydict = {tag.tag_name: tag for tag in queryset}
resultdict = {}
for slug in querydict.keys():
resultdict[slug] = Tag.hydrate_tag_model(querydict[slug])
resultdict[slug]['num_times'] = activetagdict[slug] if slug in activetagdict else 0
return list(resultdict.values())
def tags(request):
url_parts = request.GET.urlencode()
query_terms = urlparse.parse_qs(url_parts, keep_blank_values=0, strict_parsing=0)
category = query_terms.get('category')[0] if 'category' in query_terms else None
queryset = get_tags_by_category(category) if category is not None else Tag.objects.all()
tags_result = list(map(lambda tag: Tag.hydrate_tag_model(tag), queryset))
return JsonResponse(tags_result, safe=False)
@cache_page(1200) #cache duration in seconds, cache_page docs: https://docs.djangoproject.com/en/2.1/topics/cache/#the-per-view-cache
def group_tags_counts(request):
# Get all groups
all_groups = Group.objects.all()
# Get Groups issue areas
group_issues = list(map(lambda group: group.get_project_issue_areas(with_counts=False), all_groups))
# Count up instances of tags
group_issues_counts = count_occurrences(flatten(group_issues))
issue_tags = {}
for issue_tag in group_issues_counts.keys():
issue_tags[issue_tag] = Tag.hydrate_tag_model(Tag.get_by_name(issue_tag))
issue_tags[issue_tag]['num_times'] = group_issues_counts[issue_tag]
return JsonResponse(list(issue_tags.values()), safe=False)
def to_rows(items, width):
rows = [[]]
row_number = 0
column_number = 0
for item in items:
rows[row_number].append(item)
column_number += 1
if column_number >= width:
column_number = 0
rows.append([])
row_number += 1
return rows
def to_tag_map(tags):
tag_map = ((tag.tag_name, tag.display_name) for tag in tags)
return list(tag_map)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def group_create(request):
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn))
user = get_request_contributor(request)
if not user.email_verified:
# TODO: Log this
return HttpResponse(status=403)
group = GroupCreationForm.create_or_edit_group(request, None)
return JsonResponse(group.hydrate_to_json())
def group_edit(request, group_id):
if not request.user.is_authenticated:
return redirect('/signup')
group = None
try:
group = GroupCreationForm.create_or_edit_group(request, group_id)
except PermissionDenied:
return HttpResponseForbidden()
if request.is_ajax():
return JsonResponse(group.hydrate_to_json())
else:
return redirect(section_url(FrontEndSection.AboutGroup, {'id': group_id}))
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def group_delete(request, group_id):
# if not logged in, send user to login page
if not request.user.is_authenticated:
return HttpResponse(status=401)
try:
GroupCreationForm.delete_group(request, group_id)
except PermissionDenied:
return HttpResponseForbidden()
return HttpResponse(status=204)
def get_group(request, group_id):
group = Group.objects.get(id=group_id)
if group is not None:
if group.is_searchable or is_creator_or_staff(get_request_contributor(request), group):
return JsonResponse(group.hydrate_to_json())
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
def approve_group(request, group_id):
group = Group.objects.get(id=group_id)
user = get_request_contributor(request)
if group is not None:
if user.is_staff:
group.is_searchable = True
group.save()
# SitemapPages.update()
ProjectSearchTagsCache.refresh(event=None, group=group)
notify_group_owners_group_approved(group)
messages.success(request, 'Group Approved')
group.group_creator.purge_cache()
return redirect(section_url(FrontEndSection.AboutGroup, {'id': str(group.id)}))
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def event_create(request):
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn))
user = get_request_contributor(request)
if not user.email_verified:
# TODO: Log this
return HttpResponse(status=403)
event = None
try:
event = EventCreationForm.create_or_edit_event(request, None)
except PermissionDenied:
return HttpResponseForbidden()
return JsonResponse(event.hydrate_to_json())
def event_edit(request, event_id):
if not request.user.is_authenticated:
return redirect('/signup')
event = None
try:
event = EventCreationForm.create_or_edit_event(request, event_id)
except PermissionDenied:
return HttpResponseForbidden()
if request.is_ajax():
return JsonResponse(event.hydrate_to_json())
else:
return redirect(section_url(FrontEndSection.AboutEvent, {'id': event_id}))
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def event_delete(request, event_id):
# if not logged in, send user to login page
if not request.user.is_authenticated:
return HttpResponse(status=401)
try:
EventCreationForm.delete_event(request, event_id)
except PermissionDenied:
return HttpResponseForbidden()
return HttpResponse(status=204)
def get_event(request, event_id):
try:
event = Event.get_by_id_or_slug(event_id)
if event_id.isnumeric() and event.is_private and not is_creator_or_staff(get_request_contributor(request), event):
# Don't let non-admins/non-owners load a private event by numeric id
raise PermissionDenied()
except PermissionDenied:
return HttpResponseForbidden()
return JsonResponse(event.hydrate_to_json()) if event else HttpResponse(status=404)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def project_create(request):
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn))
user = get_request_contributor(request)
if not user.email_verified:
# TODO: Log this
return HttpResponse(status=403)
project = ProjectCreationForm.create_or_edit_project(request, None)
return JsonResponse(project.hydrate_to_json())
def project_edit(request, project_id):
if not request.user.is_authenticated:
return redirect('/signup')
try:
project = ProjectCreationForm.create_or_edit_project(request, project_id)
# TODO:
# update_cached_project_url(project_id)
except PermissionDenied:
return HttpResponseForbidden()
if request.is_ajax():
return JsonResponse(project.hydrate_to_json())
else:
return redirect(section_url(FrontEndSection.AboutProject, {'id': project_id}))
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def project_delete(request, project_id):
# if not logged in, send user to login page
if not request.user.is_authenticated:
return HttpResponse(status=401)
try:
ProjectCreationForm.delete_project(request, project_id)
except PermissionDenied:
return HttpResponseForbidden()
return HttpResponse(status=204)
def get_project(request, project_id):
project = Project.objects.get(id=project_id)
if project is not None:
if project.is_searchable or is_co_owner_or_staff(get_request_contributor(request), project):
hydrated_project = project.hydrate_to_json()
del hydrated_project['project_volunteers']
return JsonResponse(hydrated_project, safe=False)
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
def approve_project(request, project_id):
project = Project.objects.get(id=project_id)
user = get_request_contributor(request)
if project is not None:
if user.is_staff:
project.is_searchable = True
project.save()
project.recache(recache_linked=True)
ProjectSearchTagsCache.refresh()
project.project_creator.purge_cache()
SitemapPages.update()
notify_project_owners_project_approved(project)
messages.success(request, 'Project Approved')
return redirect(section_url(FrontEndSection.AboutProject, {'id': project_id}))
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
def approve_event(request, event_id):
event = Event.objects.get(id=event_id)
user = get_request_contributor(request)
if event is not None:
if user.is_staff:
event.is_searchable = True
event.save()
notify_event_owners_event_approved(event)
event.update_linked_items()
event.event_creator.purge_cache()
messages.success(request, 'Event Approved')
return redirect(section_url(FrontEndSection.AboutEvent, {'id': str(event.id)}))
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
@ensure_csrf_cookie
@xframe_options_exempt
@api_view()
@throttle_classes([AnonRateThrottle, UserRateThrottle])
def index(request, id='Unused but needed for routing purposes; do not remove!'):
page = get_page_section(request.get_full_path())
# TODO: Add to redirectors.py
# Redirect to AddUserDetails page if First/Last name hasn't been entered yet
if page not in [FrontEndSection.AddUserDetails.value, FrontEndSection.SignUp.value] \
and request.user.is_authenticated and \
(not request.user.first_name or not request.user.last_name):
from allauth.socialaccount.models import SocialAccount
account = SocialAccount.objects.filter(user=request.user).first()
return redirect(section_url(FrontEndSection.AddUserDetails, {'provider': account.provider}))
redirect_result = redirect_by([InvalidArgumentsRedirector, DirtyUrlsRedirector, DeprecatedUrlsRedirector], request.get_full_path())
if redirect_result is not None:
return redirect(redirect_result)
template = loader.get_template('new_index.html')
context = {
'DLAB_PROJECT_ID': settings.DLAB_PROJECT_ID or '',
'PROJECT_DESCRIPTION_EXAMPLE_URL': settings.PROJECT_DESCRIPTION_EXAMPLE_URL,
'POSITION_DESCRIPTION_EXAMPLE_URL': settings.POSITION_DESCRIPTION_EXAMPLE_URL,
'STATIC_CDN_URL': settings.STATIC_CDN_URL,
'HEADER_ALERT': settings.HEADER_ALERT,
'SPONSORS_METADATA': settings.SPONSORS_METADATA,
'userImgUrl' : '',
'PAYPAL_ENDPOINT': settings.PAYPAL_ENDPOINT,
'PAYPAL_PAYEE': settings.PAYPAL_PAYEE,
'PRESS_LINKS': settings.PRESS_LINKS,
'organizationSnippet': loader.render_to_string('scripts/org_snippet.txt'),
'GR_SITEKEY': settings.GR_SITEKEY,
'FAVICON_PATH': settings.FAVICON_PATH,
'BLOG_URL': settings.BLOG_URL,
'EVENT_URL': settings.EVENT_URL,
'PRIVACY_POLICY_URL': settings.PRIVACY_POLICY_URL,
'DONATE_PAGE_BLURB': settings.DONATE_PAGE_BLURB,
'HEAP_ANALYTICS_ID': settings.HEAP_ANALYTICS_ID
}
if settings.HOTJAR_APPLICATION_ID:
context['hotjarScript'] = loader.render_to_string('scripts/hotjar_snippet.txt',
{'HOTJAR_APPLICATION_ID': settings.HOTJAR_APPLICATION_ID})
GOOGLE_CONVERSION_ID = None
context = context_preload(page, request, context)
if page and settings.GOOGLE_CONVERSION_IDS and page in settings.GOOGLE_CONVERSION_IDS:
GOOGLE_CONVERSION_ID = settings.GOOGLE_CONVERSION_IDS[page]
if settings.GOOGLE_PROPERTY_ID:
context['googleScript'] = loader.render_to_string('scripts/google_snippet.txt',
{
'GOOGLE_PROPERTY_ID': settings.GOOGLE_PROPERTY_ID,
'GOOGLE_ADS_ID': settings.GOOGLE_ADS_ID,
'GOOGLE_CONVERSION_ID': GOOGLE_CONVERSION_ID
})
if settings.GOOGLE_TAGS_ID:
google_tag_context = {'GOOGLE_TAGS_ID': settings.GOOGLE_TAGS_ID}
context['googleTagsHeadScript'] = loader.render_to_string('scripts/google_tag_manager_snippet_head.txt', google_tag_context)
context['googleTagsBodyScript'] = loader.render_to_string('scripts/google_tag_manager_snippet_body.txt', google_tag_context)
if hasattr(settings, 'SOCIAL_APPS_VISIBILITY'):
context['SOCIAL_APPS_VISIBILITY'] = json.dumps(settings.SOCIAL_APPS_VISIBILITY)
if hasattr(settings, 'HERE_CONFIG'):
context['HERE_CONFIG'] = settings.HERE_CONFIG
if hasattr(settings, 'GHOST_URL'):
context['GHOST_URL'] = settings.GHOST_URL
context['GHOST_CONTENT_API_KEY'] = settings.GHOST_CONTENT_API_KEY
if request.user.is_authenticated:
contributor = Contributor.objects.get(id=request.user.id)
context['userContext'] = json.dumps(get_user_context(contributor))
context['userID'] = request.user.id
context['emailVerified'] = contributor.email_verified
context['email'] = contributor.email
context['firstName'] = contributor.first_name
context['lastName'] = contributor.last_name
context['isStaff'] = contributor.is_staff
context['volunteeringUpForRenewal'] = contributor.is_up_for_volunteering_renewal()
context['QIQO_IFRAME_URL'] = get_user_qiqo_iframe(contributor)
thumbnail = ProjectFile.objects.filter(file_user=request.user.id,
file_category=FileCategory.THUMBNAIL.value).first()
if thumbnail:
context['userImgUrl'] = thumbnail.file_url
else:
context['userContext'] = '{}'
return HttpResponse(template.render(context, request))
def get_site_stats(request):
active_volunteers = VolunteerRelation.objects.filter(deleted=False)
stats = {
'projectCount': Project.objects.filter(is_searchable=True, deleted=False).count(),
'userCount': Contributor.objects.filter(is_active=True).count(),
'activeVolunteerCount': active_volunteers.distinct('volunteer__id').count(),
'dlVolunteerCount': active_volunteers.filter(is_approved=True, project__id=settings.DLAB_PROJECT_ID).count()
}
return JsonResponse(stats)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def add_alert(request):
body = json.loads(request.body)
UserAlert.create_or_update(
email=body['email'], filters=body['filters'], country=body['country'], postal_code=body['postal_code'])
return HttpResponse(status=200)
def projects_list(request):
url_parts = request.GET.urlencode()
query_params = urlparse.parse_qs(url_parts, keep_blank_values=0, strict_parsing=0)
event = None
group = None
if 'group_id' in query_params:
group_id = query_params['group_id'][0]
group = Group.objects.get(id=group_id)
project_list = group.get_group_projects(approved_only=True)
elif 'event_id' in query_params:
event_id = query_params['event_id'][0]
event = Event.get_by_id_or_slug(event_id)
project_list = event.get_linked_projects().filter(is_searchable=True)
else:
project_list = Project.objects.filter(is_searchable=True)
project_list = apply_tag_filters(project_list, query_params, 'issues', projects_by_issue_areas)
project_list = apply_tag_filters(project_list, query_params, 'tech', projects_by_technologies)
project_list = apply_tag_filters(project_list, query_params, 'role', projects_by_roles)
project_list = apply_tag_filters(project_list, query_params, 'org', projects_by_orgs)
project_list = apply_tag_filters(project_list, query_params, 'orgType', projects_by_org_types)
project_list = apply_tag_filters(project_list, query_params, 'stage', projects_by_stage)
if 'favoritesOnly' in query_params:
user = get_request_contributor(request)
project_list = project_list & ProjectFavorite.get_for_user(user)
if 'keyword' in query_params:
project_list = project_list & projects_by_keyword(query_params['keyword'][0])
if 'locationRadius' in query_params:
project_list = projects_by_location(project_list, query_params['locationRadius'][0])
if 'location' in query_params:
project_list = projects_by_legacy_city(project_list, query_params['location'][0])
project_list = project_list.distinct()
if 'sortField' in query_params:
project_list = projects_by_sortField(project_list, query_params['sortField'][0])
else:
project_list = projects_by_sortField(project_list, '-project_date_modified')
project_count = len(project_list)
project_paginator = Paginator(project_list, settings.PROJECTS_PER_PAGE)
if 'page' in query_params:
project_list_page = project_paginator.page(query_params['page'][0])
project_pages = project_paginator.num_pages
else:
project_list_page = project_list
project_pages = 1
tag_counts = get_tag_counts(category=None, event=event, group=group)
response = projects_with_meta_data(query_params, project_list_page, project_pages, project_count, tag_counts)
return JsonResponse(response)
def recent_projects_list(request):
if request.method == 'GET':
url_parts = request.GET.urlencode()
query_params = urlparse.parse_qs(url_parts, keep_blank_values=0, strict_parsing=0)
project_count = int(query_params['count'][0]) if 'count' in query_params else 3
project_list = Project.objects.filter(is_searchable=True)
# Filter out the DemocracyLab project
if settings.DLAB_PROJECT_ID.isdigit():
project_list = project_list.exclude(id=int(settings.DLAB_PROJECT_ID))
project_list = projects_by_sortField(project_list, '-project_date_modified')[:project_count]
hydrated_project_list = list(project.hydrate_to_tile_json() for project in project_list)
return JsonResponse({'projects': hydrated_project_list})
def limited_listings(request):
"""Summarizes current positions in a format specified by the LinkedIn "Limited Listings" feature."""
def cdata(str):
# Using CDATA tags (and escaping the close sequence) protects us from XSS attacks when
# displaying user provided string values.
return f"<![CDATA[{str.replace(']]>', ']]]]><![CDATA[>')}]]>"
def position_to_job(position):
project = position.position_project
roleTag = Tag.get_by_name(position.position_role.first().slug)
return f"""
<job>
<company>{cdata(project.project_name)}</company>
<title>{cdata(roleTag.display_name)}</title>
<description>{cdata(position.position_description)}</description>
<partnerJobId>{cdata(str(position.id))}</partnerJobId>
<location>{cdata(", ".join([project.project_city, project.project_state]) if (project.project_city and project.project_state) else "")}</location>
<city>{cdata(project.project_city)}</city>
<state>{cdata(project.project_state)}</state>
<country>{cdata(project.project_country)}</country>
<applyUrl>{cdata(position.description_url or project.project_url)}</applyUrl>
<industryCodes><industryCode>{cdata("4")}</industryCode></industryCodes>
</job>
"""
approved_projects = ProjectPosition.objects.filter(position_project__is_searchable=True)
xml_response = f"""<?xml version="1.0" encoding="UTF-8"?>
<source>
<lastBuildDate>{timezone.now().strftime('%a, %d %b %Y %H:%M:%S %Z')}</lastBuildDate>
<publisherUrl>https://www.democracylab.org</publisherUrl>
<publisher>DemocracyLab</publisher>
{"".join(map(position_to_job, approved_projects))}
</source>"""
return HttpResponse(xml_response, content_type="application/xml")
def apply_tag_filters(project_list, query_params, param_name, tag_filter):
if param_name in query_params:
tag_dict = get_tag_dictionary()
tags_to_filter_by = query_params[param_name][0].split(',')
tags_to_filter_by = clean_nonexistent_tags(tags_to_filter_by, tag_dict)
if len(tags_to_filter_by):
project_list = project_list & tag_filter(tags_to_filter_by)
return project_list
def clean_nonexistent_tags(tags, tag_dict):
return list(filter(lambda tag: tag in tag_dict, tags))
def projects_by_keyword(keyword):
return Project.objects.filter(full_text__icontains=keyword)
# TODO: Rename to something generic
def projects_by_sortField(project_list, sortField):
return project_list.order_by(sortField)
def projects_by_location(project_list, param):
param_parts = param.split(',')
location = Point(float(param_parts[1]), float(param_parts[0]))
radius = float(param_parts[2])
project_list = project_list.filter(project_location_coords__distance_lte=(location, D(mi=radius)))
return project_list
def projects_by_legacy_city(project_list, param):
param_parts = param.split(', ')
if len(param_parts) > 1:
project_list = project_list.filter(project_city=param_parts[0], project_state=param_parts[1])
return project_list
def projects_by_issue_areas(tags):
return Project.objects.filter(project_issue_area__name__in=tags)
def projects_by_technologies(tags):
return Project.objects.filter(project_technologies__name__in=tags)
def projects_by_orgs(tags):
return Project.objects.filter(project_organization__name__in=tags)
def projects_by_org_types(tags):
return Project.objects.filter(project_organization_type__name__in=tags)
def projects_by_stage(tags):
return Project.objects.filter(project_stage__name__in=tags)
def projects_by_roles(tags):
# Get roles by tags
positions = ProjectPosition.objects.filter(position_role__name__in=tags).select_related('position_project')
# Get the list of projects linked to those roles
return Project.objects.filter(positions__in=positions)
def project_countries():
return unique_column_values(Project, 'project_country', lambda country: country and len(country) == 2)
def projects_with_meta_data(query_params, projects, project_pages, project_count, tag_counts):
projects_json = apply_project_annotations(query_params, [project.hydrate_to_tile_json() for project in projects])
return {
'projects': projects_json,
'availableCountries': project_countries(),
'tags': tag_counts,
'numPages': project_pages,
'numProjects': project_count
}
# TODO: Move group search code into new file
def groups_list(request):
url_parts = request.GET.urlencode()
query_params = urlparse.parse_qs(url_parts, keep_blank_values=0, strict_parsing=0)
group_list = Group.objects.filter(is_searchable=True)
if request.method == 'GET':
group_list = group_list & apply_tag_filters(group_list, query_params, 'issues', groups_by_issue_areas)
if 'keyword' in query_params:
group_list = group_list & groups_by_keyword(query_params['keyword'][0])
if 'locationRadius' in query_params:
group_list = groups_by_location(group_list, query_params['locationRadius'][0])
group_list = group_list.distinct()
if 'sortField' in query_params:
group_list = projects_by_sortField(group_list, query_params['sortField'][0])
else:
group_list = projects_by_sortField(group_list, 'group_name')
group_count = len(group_list)
group_paginator = Paginator(group_list, settings.PROJECTS_PER_PAGE)
if 'page' in query_params:
group_list_page = group_paginator.page(query_params['page'][0])
group_pages = group_paginator.num_pages
else:
group_list_page = group_list
group_pages = 1
response = groups_with_meta_data(group_list_page, group_pages, group_count)
return JsonResponse(response)
def groups_by_keyword(keyword):
return Group.objects.filter(Q(group_name__icontains=keyword)
| Q(group_short_description__icontains=keyword)
| Q(group_description__icontains=keyword))
def groups_by_location(group_list, param):
param_parts = param.split(',')
location = Point(float(param_parts[1]), float(param_parts[0]))
radius = float(param_parts[2])
group_list = group_list.filter(group_location_coords__distance_lte=(location, D(mi=radius)))
return group_list
def groups_by_issue_areas(issues):
group_relationships = ProjectRelationship.objects.exclude(relationship_group=None)\
.filter(relationship_project__project_issue_area__name__in=issues)
relationship_ids = list(map(lambda pr: pr.relationship_group.id, group_relationships))
return Group.objects.filter(id__in=relationship_ids)
def groups_with_meta_data(groups, group_pages, group_count):
return {
'groups': [group.hydrate_to_tile_json() for group in groups],
'availableCountries': group_countries(),
'tags': list(Tag.objects.filter(category=TagCategory.ISSUE_ADDRESSED.value).values()),
'numPages': group_pages,
'numGroups': group_count
}
def group_countries():
return unique_column_values(Group, 'group_country', lambda country: country and len(country) == 2)
def events_list(request):
events = Event.objects.filter(is_created=True, is_searchable=True, is_private=False)
return JsonResponse({'events': [event.hydrate_to_tile_json() for event in events]})
def presign_project_thumbnail_upload(request):
uploader = request.user.username
file_name = request.GET['file_name'][:150]
file_type = request.GET['file_type']
file_extension = file_type.split('/')[-1]
unique_file_name = file_name + '_' + str(time())
s3_key = 'thumbnails/%s/%s.%s' % (
uploader, unique_file_name, file_extension)
return presign_s3_upload(
raw_key=s3_key, file_name=file_name, file_type=file_type, acl="public-read")
# TODO: Replace with is_co_owner_or_owner
def volunteer_operation_is_authorized(request, volunteer_relation):
project_volunteers = VolunteerRelation.objects.filter(project=volunteer_relation.project)
authorized_usernames = ([volunteer_relation.project.project_creator.username]
+ list(map(lambda co: co.volunteer.username, list(filter(lambda v: v.is_co_owner, project_volunteers)))))
return request.user.username in authorized_usernames
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def delete_uploaded_file(request, s3_key):
uploader = request.user.username
has_permisson = user_has_permission_for_s3_file(uploader, s3_key)
if has_permisson:
delete_s3_file(s3_key)
return HttpResponse(status=202)
else:
# TODO: Log this
return HttpResponse(status=401)
def get_project_volunteers(request,project_id):
project = Project.objects.get(id=project_id)
if project is not None:
if project.is_searchable or is_co_owner_or_staff(get_request_contributor(request), project):
data = {
'project_id' : project_id,
'project_volunteers': project.hydrate_to_json()['project_volunteers']
}
return JsonResponse(data, safe=False)
else:
return HttpResponseForbidden()
else:
return HttpResponse(status=404)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def contact_project_owner(request, project_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
if not user.email_verified:
return HttpResponse(status=403)
body = json.loads(request.body)
message = body['message']
project = Project.objects.get(id=project_id)
email_subject = '{firstname} {lastname} would like to connect with {project}'.format(
firstname=user.first_name,
lastname=user.last_name,
project=project.project_name)
email_template = HtmlEmailTemplate(use_signature=False)\
.paragraph('\"{message}\" - {firstname} {lastname}'.format(
message=message,
firstname=user.first_name,
lastname=user.last_name))\
.paragraph('To contact this person, email them at {email}'.format(email=user.email))
send_to_project_owners(project=project, sender=user, subject=email_subject, template=email_template)
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def contact_project_volunteers(request, project_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
body = json.loads(request.body)
subject = body['subject']
message = body['message']
project = Project.objects.get(id=project_id)
if not user.email_verified or not is_co_owner_or_owner(user, project):
return HttpResponse(status=403)
volunteers = VolunteerRelation.get_by_project(project)
email_subject = '{project}: {subject}'.format(
project=project.project_name,
subject=subject)
email_template = HtmlEmailTemplate(use_signature=False) \
.paragraph('\"{message}\" - {firstname} {lastname}'.format(
message=message,
firstname=user.first_name,
lastname=user.last_name)) \
.paragraph('To reply, email at {email}'.format(email=user.email))
for volunteer in volunteers:
# TODO: See if we can send emails in a batch
# https://docs.djangoproject.com/en/2.2/topics/email/#topics-sending-multiple-emails
send_to_project_volunteer(volunteer, email_subject, email_template)
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def contact_project_volunteer(request, application_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
project = volunteer_relation.project
body = json.loads(request.body)
subject = body['subject']
message = body['message']
# TODO: Condense common code between this and contact_project_volunteers
if not user.email_verified or not is_co_owner_or_owner(user, project):
return HttpResponse(status=403)
email_subject = '{project}: {subject}'.format(
project=project.project_name,
subject=subject)
email_template = HtmlEmailTemplate(use_signature=False) \
.paragraph('\"{message}\" - {firstname} {lastname}'.format(
message=message,
firstname=user.first_name,
lastname=user.last_name)) \
.paragraph('To reply, email at {email}'.format(email=user.email))
send_to_project_volunteer(volunteer_relation, email_subject, email_template)
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def volunteer_with_project(request, project_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
if not user.email_verified:
return HttpResponse(status=403)
project = Project.objects.get(id=project_id)
body = json.loads(request.body)
projected_end_date = body['projectedEndDate']
message = body['message']
role = body['roleTag']
volunteer_relation = VolunteerRelation.create(
project=project,
volunteer=user,
projected_end_date=projected_end_date,
role=role,
application_text=message)
send_volunteer_application_email(volunteer_relation)
project.recache()
user.purge_cache()
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def renew_volunteering_with_project(request, application_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if not user.id == volunteer_relation.volunteer.id:
return HttpResponse(status=403)
body = json.loads(request.body)
volunteer_relation.projected_end_date = body['projectedEndDate']
volunteer_relation.re_enrolled_last_date = timezone.now()
volunteer_relation.re_enroll_reminder_count = 0
volunteer_relation.re_enroll_last_reminder_date = None
volunteer_relation.save()
volunteer_relation.volunteer.purge_cache()
notify_project_owners_volunteer_renewed_email(volunteer_relation, body['message'])
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def conclude_volunteering_with_project(request, application_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if not user.id == volunteer_relation.volunteer.id:
return HttpResponse(status=403)
send_volunteer_conclude_email(user, volunteer_relation.project.project_name)
body = json.loads(request.body)
project = Project.objects.get(id=volunteer_relation.project.id)
user = volunteer_relation.volunteer
volunteer_relation.delete()
project.recache()
user.purge_cache()
notify_project_owners_volunteer_concluded_email(volunteer_relation, body['message'])
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def accept_project_volunteer(request, application_id):
# Redirect to login if not logged in
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn, {'prev': request.get_full_path()}))
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
about_project_url = section_url(FrontEndSection.AboutProject, {'id': str(volunteer_relation.project.id)})
if volunteer_relation.is_approved:
messages.add_message(request, messages.ERROR, 'This volunteer has already been approved.')
return redirect(about_project_url)
if volunteer_operation_is_authorized(request, volunteer_relation):
# Set approved flag
volunteer_relation.is_approved = True
volunteer_relation.approved_date = timezone.now()
volunteer_relation.save()
volunteer_relation.volunteer.purge_cache()
update_project_timestamp(request, volunteer_relation.project)
volunteer_relation.project.recache()
if request.method == 'GET':
messages.add_message(request, messages.SUCCESS, volunteer_relation.volunteer.full_name() + ' has been approved as a volunteer.')
return redirect(about_project_url)
else:
return HttpResponse(status=200)
else:
messages.add_message(request, messages.ERROR, 'You do not have permission to approve this volunteer.')
return redirect(about_project_url)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def promote_project_volunteer(request, application_id):
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if volunteer_operation_is_authorized(request, volunteer_relation):
# Set co_owner flag
volunteer_relation.is_co_owner = True
volunteer_relation.save()
update_project_timestamp(request, volunteer_relation.project)
volunteer_relation.project.recache()
volunteer_relation.volunteer.purge_cache()
return HttpResponse(status=200)
else:
raise PermissionDenied()
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def reject_project_volunteer(request, application_id):
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if volunteer_operation_is_authorized(request, volunteer_relation):
body = json.loads(request.body)
message = body['rejection_message']
email_template = HtmlEmailTemplate()\
.paragraph('The project owner of {project_name} has declined your application for the following reason:'.format(project_name=volunteer_relation.project.project_name))\
.paragraph('\"{message}\"'.format(message=message))
email_subject = 'Your application to join {project_name}'.format(
project_name=volunteer_relation.project.project_name)
send_to_project_volunteer(volunteer_relation=volunteer_relation,
subject=email_subject,
template=email_template)
update_project_timestamp(request, volunteer_relation.project)
project = Project.objects.get(id=volunteer_relation.project.id)
user = volunteer_relation.volunteer
volunteer_relation.delete()
project.recache()
user.purge_cache()
return HttpResponse(status=200)
else:
raise PermissionDenied()
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def dismiss_project_volunteer(request, application_id):
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if volunteer_operation_is_authorized(request, volunteer_relation):
body = json.loads(request.body)
message = body['dismissal_message']
email_template = HtmlEmailTemplate()\
.paragraph('The owner of {project_name} has removed you from the project for the following reason:'.format(
project_name=volunteer_relation.project.project_name))\
.paragraph('\"{message}\"'.format(message=message))
email_subject = 'You have been dismissed from {project_name}'.format(
project_name=volunteer_relation.project.project_name)
send_to_project_volunteer(volunteer_relation=volunteer_relation,
subject=email_subject,
template=email_template)
update_project_timestamp(request, volunteer_relation.project)
project = Project.objects.get(id=volunteer_relation.project.id)
user = volunteer_relation.volunteer
volunteer_relation.delete()
project.recache()
user.purge_cache()
return HttpResponse(status=200)
else:
raise PermissionDenied()
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def demote_project_volunteer(request, application_id):
volunteer_relation = VolunteerRelation.objects.get(id=application_id)
if volunteer_operation_is_authorized(request, volunteer_relation):
volunteer_relation.is_co_owner = False
volunteer_relation.save()
update_project_timestamp(request, volunteer_relation.project)
body = json.loads(request.body)
message = body['demotion_message']
email_template = HtmlEmailTemplate()\
.paragraph('The owner of {project_name} has removed you as a co-owner of the project for the following reason:'.format(
project_name=volunteer_relation.project.project_name))\
.paragraph('\"{message}\"'.format(message=message))
email_subject = 'You have been removed as a co-owner from {project_name}'.format(
project_name=volunteer_relation.project.project_name)
send_to_project_volunteer(volunteer_relation=volunteer_relation,
subject=email_subject,
template=email_template)
volunteer_relation.project.recache()
volunteer_relation.volunteer.purge_cache()
return HttpResponse(status=200)
else:
raise PermissionDenied()
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def leave_project(request, project_id):
volunteer_relation = VolunteerRelation.objects.filter(project_id=project_id, volunteer_id=request.user.id).first()
if request.user.id == volunteer_relation.volunteer.id:
body = json.loads(request.body)
message = body['departure_message']
if len(message) > 0:
email_template = HtmlEmailTemplate()\
.paragraph('{volunteer_name} is leaving {project_name} for the following reason:'.format(
volunteer_name=volunteer_relation.volunteer.full_name(),
project_name=volunteer_relation.project.project_name))\
.paragraph('\"{message}\"'.format(message=message))
else:
email_template = HtmlEmailTemplate() \
.paragraph('{volunteer_name} is leaving {project_name} for unspecified reasons.'.format(
volunteer_name=volunteer_relation.volunteer.full_name(),
project_name=volunteer_relation.project.project_name))
email_subject = '{volunteer_name} is leaving {project_name}'.format(
volunteer_name=volunteer_relation.volunteer.full_name(),
project_name=volunteer_relation.project.project_name)
send_to_project_owners(project=volunteer_relation.project,
sender=volunteer_relation.volunteer,
subject=email_subject,
template=email_template)
update_project_timestamp(request, volunteer_relation.project)
user = volunteer_relation.volunteer
volunteer_relation.delete()
project = Project.objects.get(id=project_id)
project.recache()
user.purge_cache()
return HttpResponse(status=200)
else:
raise PermissionDenied()
def update_project_timestamp(request, project):
if not request.user.is_staff:
project.update_timestamp()
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def contact_group_owner(request, group_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
if not user.email_verified:
return HttpResponse(status=403)
body = json.loads(request.body)
message = body['message']
group = Group.objects.get(id=group_id)
email_subject = '{firstname} {lastname} would like to connect with {group}'.format(
firstname=user.first_name,
lastname=user.last_name,
group=group.group_name)
email_template = HtmlEmailTemplate(use_signature=False) \
.paragraph('\"{message}\" - {firstname} {lastname}'.format(
message=message,
firstname=user.first_name,
lastname=user.last_name)) \
.paragraph('To contact this person, email them at {email}'.format(email=user.email))
send_to_group_owners(group=group, sender=user, subject=email_subject, template=email_template)
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def invite_project_to_group(request, group_id):
if not request.user.is_authenticated:
return HttpResponse(status=401)
user = get_request_contributor(request)
if not user.email_verified:
return HttpResponse(status=403)
group = Group.objects.get(id=group_id)
if not is_creator(user, group):
return HttpResponse(status=403)
body = json.loads(request.body)
project = Project.objects.get(id=body['projectId'])
message = body['message']
is_approved = is_co_owner_or_owner(user, project)
project_relation = ProjectRelationship.create(group, project, is_approved, message)
project_relation.save()
project_relation.relationship_project.recache()
project_relation.relationship_group.recache()
if not is_approved:
send_group_project_invitation_email(project_relation)
return HttpResponse(status=200)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def accept_group_invitation(request, invite_id):
# Redirect to login if not logged in
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn, {'prev': request.get_full_path()}))
project_relation = ProjectRelationship.objects.get(id=invite_id)
project = project_relation.relationship_project
about_project_url = section_url(FrontEndSection.AboutProject, {'id': str(project.id)})
if project_relation.is_approved:
messages.add_message(request, messages.ERROR, 'The project is already part of the group.')
return redirect(about_project_url)
user = get_request_contributor(request)
if is_co_owner_or_owner(user, project):
# Set approved flag
project_relation.is_approved = True
project_relation.save()
update_project_timestamp(request, project)
project_relation.relationship_project.recache()
project_relation.relationship_group.recache()
if request.method == 'GET':
messages.add_message(request, messages.SUCCESS, 'Your project is now part of the group ' + project_relation.relationship_group.group_name)
return redirect(about_project_url)
else:
return HttpResponse(status=200)
else:
messages.add_message(request, messages.ERROR, 'You do not have permission to accept this group invitation.')
return redirect(about_project_url)
# TODO: Pass csrf token in ajax call so we can check for it
@csrf_exempt
def reject_group_invitation(request, invite_id):
# Redirect to login if not logged in
if not request.user.is_authenticated:
return redirect(section_url(FrontEndSection.LogIn, {'prev': request.get_full_path()}))
project_relation = ProjectRelationship.objects.get(id=invite_id)
project = project_relation.relationship_project
about_project_url = section_url(FrontEndSection.AboutProject, {'id': str(project.id)})
if project_relation.is_approved:
messages.add_message(request, messages.ERROR, 'The project is already part of the group.')
return redirect(about_project_url)
user = get_request_contributor(request)
if is_co_owner_or_owner(user, project):
project = Project.objects.get(id=project_relation.relationship_project.id)
project_relation.delete()
update_project_timestamp(request, project)
project.recache()
if request.method == 'GET':
# TODO: Add messaging of some kind to front end
return redirect(about_project_url)
else:
return HttpResponse(status=200)
else:
messages.add_message(request, messages.ERROR, 'You do not have permission to reject this group invitation.')
return redirect(about_project_url)
@csrf_exempt
def project_favorite(request, project_id):
user = get_request_contributor(request)
project = Project.objects.get(id=project_id)
existing_fav = ProjectFavorite.get_for_project(project, user)
if existing_fav is not None:
print("Favoriting project:{project} by user:{user}".format(project=project.id, user=user.id))
ProjectFavorite.create(user, project)
user.purge_cache()
else:
print("Favorite already exists for project:{project}, user:{user}".format(project=project.id, user=user.id))
return HttpResponse(status=400)
return HttpResponse(status=200)
@csrf_exempt
def project_unfavorite(request, project_id):
user = get_request_contributor(request)
project = Project.objects.get(id=project_id)
existing_fav = ProjectFavorite.get_for_project(project, user)
if existing_fav is not None:
print("Unfavoriting project:{project} by user:{user}".format(project=project.id, user=user.id))
existing_fav.delete()
user.purge_cache()
else:
print("Can't Unfavorite project:{project} by user:{user}".format(project=project.id, user=user.id))
return HttpResponse(status=400)
return HttpResponse(status=200)
#This will ask Google if the recaptcha is valid and if so send email, otherwise return an error.
#TODO: Return text strings to be displayed on the front end so we know specifically what happened
#TODO: Figure out why changing the endpoint to /api/contact/democracylab results in CSRF issues
@csrf_exempt
def contact_democracylab(request):
#first prepare all the data from the request body
body = json.loads(request.body)
# submit validation request to recaptcha
r = requests.post(
'https://www.google.com/recaptcha/api/siteverify',
data={
'secret': settings.GR_SECRETKEY,
'response': body['reCaptchaValue']
}
)
if r.json()['success']:
# Successfully validated, send email
first_name = body['fname']
last_name = body['lname']
email_addr = body['emailaddr']
message = body['message']
company_name = body['company_name'] if 'company_name' in body else None
interest_flags = list(filter(lambda key: body[key] and isinstance(body[key], bool), body.keys()))
contact_democracylab_email(first_name, last_name, email_addr, message, company_name, interest_flags)
return HttpResponse(status=200)
# Error while verifying the captcha, do not send the email
return HttpResponse(status=401)
def robots(request):
template = loader.get_template('robots.txt')
context = {
'PROTOCOL_DOMAIN': settings.PROTOCOL_DOMAIN,
'DISALLOW_CRAWLING': settings.DISALLOW_CRAWLING
}
return HttpResponse(template.render(context, request))
def team(request):
response = {
'board_of_directors': settings.BOARD_OF_DIRECTORS
}
if settings.DLAB_PROJECT_ID is not None:
project = Project.objects.get(id=settings.DLAB_PROJECT_ID)
response['project'] = project.hydrate_to_json()
return JsonResponse(response)
def redirect_v1_urls(request):
page_url = request.get_full_path()
print(page_url)
clean_url = get_clean_url(page_url)
section_match = re.findall(r'/index/\?section=(\w+)', clean_url)
section_name = section_match[0] if len(section_match) > 0 else FrontEndSection.Home
deprecated_redirect_url = redirect_from_deprecated_url(section_name)
if deprecated_redirect_url:
print('Redirecting deprecated url {name}: {url}'.format(name=section_name, url=clean_url))
return redirect(deprecated_redirect_url)
print('Redirecting v1 url: ' + clean_url)
section_id_match = re.findall(r'&id=([\w-]+)', clean_url)
section_id = section_id_match[0] if len(section_id_match) > 0 else ''
return redirect(section_url(section_name, {'id': section_id}))
def get_testimonials(request, category=None):
testimonials = Testimonial.objects.filter(active=True)
if category:
testimonials = testimonials.filter(categories__name__in=[category])
return JsonResponse(list(map(lambda t: t.to_json(), testimonials.order_by('-priority'))), safe=False)
| 41.235913 | 175 | 0.718521 |
f7cffe2e8206854f6079d0a062643fe2055f32ca | 1,230 | py | Python | twisted/test/stdio_test_lastwrite.py | jMyles/twisted | a5108c0db829847bbfd4bf427868eb0b13da0465 | [
"Unlicense",
"MIT"
] | 1 | 2015-08-18T06:57:28.000Z | 2015-08-18T06:57:28.000Z | twisted/test/stdio_test_lastwrite.py | jMyles/twisted | a5108c0db829847bbfd4bf427868eb0b13da0465 | [
"Unlicense",
"MIT"
] | null | null | null | twisted/test/stdio_test_lastwrite.py | jMyles/twisted | a5108c0db829847bbfd4bf427868eb0b13da0465 | [
"Unlicense",
"MIT"
] | 1 | 2020-11-01T20:40:01.000Z | 2020-11-01T20:40:01.000Z | # -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTests.test_lastWriteReceived -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Main program for the child process run by
L{twisted.test.test_stdio.StandardInputOutputTests.test_lastWriteReceived}
to test that L{os.write} can be reliably used after
L{twisted.internet.stdio.StandardIO} has finished.
"""
from __future__ import absolute_import, division
__import__('_preamble')
import sys
from twisted.internet.protocol import Protocol
from twisted.internet.stdio import StandardIO
from twisted.python.reflect import namedAny
class LastWriteChild(Protocol):
def __init__(self, reactor, magicString):
self.reactor = reactor
self.magicString = magicString
def connectionMade(self):
self.transport.write(self.magicString)
self.transport.loseConnection()
def connectionLost(self, reason):
self.reactor.stop()
def main(reactor, magicString):
p = LastWriteChild(reactor, magicString.encode('ascii'))
StandardIO(p)
reactor.run()
if __name__ == '__main__':
namedAny(sys.argv[1]).install()
from twisted.internet import reactor
main(reactor, sys.argv[2])
| 25.102041 | 97 | 0.745528 |
aa15b156c7ab2cde84b550b0d2c96fa0f9c77c90 | 2,158 | py | Python | litex_boards/__init__.py | Fatsie/litex-boards | 228a9650d4cd624de3e9f51612bb5b3970e2e2e0 | [
"BSD-2-Clause"
] | null | null | null | litex_boards/__init__.py | Fatsie/litex-boards | 228a9650d4cd624de3e9f51612bb5b3970e2e2e0 | [
"BSD-2-Clause"
] | null | null | null | litex_boards/__init__.py | Fatsie/litex-boards | 228a9650d4cd624de3e9f51612bb5b3970e2e2e0 | [
"BSD-2-Clause"
] | null | null | null | import os
import sys
import glob
import importlib
# Boards Vendors.
vendors = [
"1bitsquared",
"antmicro",
"colorlight",
"digilent",
"enclustra",
"gsd",
"hackaday",
"kosagi",
"lattice",
"lambdaconcept",
"linsn",
"numato",
"qmtech",
"radiona",
"rhsresearchllc",
"saanlima",
"scarabhardware",
"siglent",
"sqrl",
"terasic",
"trenz",
"xilinx",
]
# Get all platforms/targets.
litex_boards_dir = os.path.dirname(os.path.realpath(__file__))
platforms = glob.glob(f"{litex_boards_dir}/platforms/*.py")
targets = glob.glob(f"{litex_boards_dir}/targets/*.py")
# For each platform:
for platform in platforms:
platform = os.path.basename(platform)
platform = platform.replace(".py", "")
# Verify if a Vendor prefix is present in platform name, if so create the short import to
# allow the platform to be imported with the full name or short name ex:
# from litex_boards.platforms import digilent_arty or
# from litex_boards.platforms import arty
if platform.split("_")[0] in vendors:
short_platform = platform[len(platform.split("_")[0])+1:]
p = importlib.import_module(f"litex_boards.platforms.{platform}")
vars()[short_platform] = p
sys.modules[f"litex_boards.platforms.{short_platform}"] = p
# For each target:
for target in targets:
target = os.path.basename(target)
target = target.replace(".py", "")
# Verify if a Vendor prefix is present in target name, if so create the short import to
# allow the target to be imported with the full name or short name ex:
# from litex_boards.targets import digilent_arty or
# from litex_boards.targets import arty
if target.split("_")[0] in vendors:
try:
short_target = target[len(target.split("_")[0])+1:]
t = importlib.import_module(f"litex_boards.targets.{target}")
vars()[short_target] = t
sys.modules[f"litex_boards.targets.{short_target}"] = t
except ModuleNotFoundError:
# Not all dependencies for this target is satisfied. Skip.
pass
| 31.275362 | 93 | 0.649212 |
bd280765e0590414eb5f38661fc3904988fc4053 | 6,593 | py | Python | LowLevelApi/NGPF/REST/1_Most_Common/loadQuickTestCmdlineRestApi.py | NickKeating/IxNetwork | 0a54c0b8d1a1664d2826ad20a826ef384c48432f | [
"MIT"
] | 46 | 2018-01-24T06:43:45.000Z | 2022-03-17T07:27:08.000Z | LowLevelApi/NGPF/REST/1_Most_Common/loadQuickTestCmdlineRestApi.py | NickKeating/IxNetwork | 0a54c0b8d1a1664d2826ad20a826ef384c48432f | [
"MIT"
] | 104 | 2018-03-16T18:16:29.000Z | 2022-03-17T07:16:43.000Z | LowLevelApi/NGPF/REST/1_Most_Common/loadQuickTestCmdlineRestApi.py | NickKeating/IxNetwork | 0a54c0b8d1a1664d2826ad20a826ef384c48432f | [
"MIT"
] | 58 | 2018-01-23T05:54:20.000Z | 2022-03-30T22:55:20.000Z |
#
# Requirement:
# IxNetwork 8.0+
# - Python2.7/3.4
# - Python modules: requests, paramiko
# - IxNetRestApi.py
#
# Description:
#
# Using REST API to connect to an existing Quick Test configuration.
# If the variable userSelectQuickTestList is 'all', then execute all
# the configured Quick Tests. Else, execute the list provided by
# the user from the commandline.
#
# Each Quick Test will retrieve its own AggregateResults.csv file and
# includes a timestamp on it.
#
# Usage:
# ------
# Enter: python loadQuickTestCmdlineRestApi.py help
#
# -ixNetworkApiServerIp: The IxNetwork API server
# -ixNetworkPort: The IxNetwork API server socket port number
# -quickTestNamesToRun: 'all' or a list of all the Quick Test names to run wrapped inside double quotes
# and separate each QT name with a comma
# Example: "broadcast 2544, throughput"
# -copyResultsToLinuxPath: The full path and file name to save the Quick Test results on
# your local Linux.
# Example: /automation/resultFolder
# -quickTestCsvResultFile: The statistic result file to get when test is done
# -resume: Don\'t load a config file. Resuming testing from an existing config.
from __future__ import absolute_import, print_function
import sys
import os
from IxNetRestApi import *
# Set default values
ixNetworkApiServer = '192.168.70.127'
ixNetworkApiServerPort = '11009'
userSelectQuickTestList = 'all'
loadConfig = True
copyResultFileToLocalLinuxPath = '/home/hgee'
quickTestCsvResultFile = 'AggregateResults.csv'
quickTestConfigFile = '/home/hgee/Dropbox/MyIxiaWork/Temp/QuickTest_vm8.30.ixncfg'
ixChassisIp = '192.168.70.11'
# Format = [chassisIp, slotNumber, portNumber]
portList = [[ixChassisIp, '1', '1'],
[ixChassisIp, '2', '1']]
def help():
os.system('clear')
print('\n\nUsage:')
print('-'*75)
print()
print('\t-ixNetworkApiServerIp: The IxNetwork API server.')
print('\t-ixNetworkPort: The IxNetwork API server socket port number.')
print('\t-quickTestNamesToRun: \'all\' or a list of all the Quick Test names to run wrapped inside double quotes')
print('\t and separate each QT name with a comma.')
print('\t Example: \'broadcast 2544, throughput\'.')
print('\t-copyResultsToLinuxPath: The full path and file name to save the Quick Test results on')
print('\t your local Linux.')
print('\t Example: /automation/resultFolder.')
print('\t-quickTestCsvResultFile: The statistic result file to get when test is done.')
print('\t-resume: Don\'t load a config file. Resuming testing from an existing config.')
print()
sys.exit()
parameters = sys.argv[1:]
argIndex = 0
while argIndex < len(parameters):
currentArg = parameters[argIndex]
if currentArg == '-ixNetworkApiServerIp':
ixNetworkApiServerIp = parameters[argIndex + 1]
argIndex += 2
elif currentArg == '-ixNetworkPort':
ixNetworkPort = parameters[argIndex + 1]
argIndex += 2
elif currentArg == '-quickTestNamesToRun':
params = parameters[argIndex + 1]
userSelectQuickTestList = [x.strip() for x in params.split(',')]
argIndex += 2
elif currentArg == '-copyResultsToLinuxPath':
copyResultFileToLocalLinuxPath = parameters[argIndex + 1]
argIndex += 2
elif currentArg == '-quickTestCsvResultFile':
quickTestCsvResultFile = parameters[argIndex + 1]
argIndex += 2
elif currentArg == '-resume':
loadConfig = False
argIndex += 1
elif currentArg == 'help':
help()
else:
sys.exit('No such parameter: %s' % currentArg)
try:
restObj = Connect(apiServerIp=ixNetworkApiServer, serverIpPort=ixNetworkApiServerPort, serverOs=connectToApiServer)
# You could set to False if have an existing configuration.
if loadConfig is True:
restObj.loadConfigFile(quickTestConfigFile)
# If portList variable is defined, this means to reassign ports.
if 'portList' in locals():
restObj.assignPorts(portList, createVports=False)
restObj.verifyPortState()
if userSelectQuickTestList == 'all':
configuredQuickTestList = restObj.getAllQuickTestHandles()
if configuredQuickTestList:
quickTestNameList = restObj.getAllQuickTestNames()
else:
raise IxNetRestApiException('No Quick Test configured found')
else:
# Verify user selected Quick Test to run
restObj.verifyAllQuickTestNames(userSelectQuickTestList)
quickTestNameList = userSelectQuickTestList
print('\nList of Quick Tests to run ...')
for quickTestToRun in quickTestNameList:
print('\t', quickTestToRun)
quickTestHandle = None
for quickTestName in quickTestNameList:
print('\nStarting Quick Test: %s...\n' % quickTestName)
quickTestHandle = restObj.getQuickTestHandleByName(quickTestName)
currentQuickTestName = restObj.getQuickTestNameByHandle(quickTestHandle)
restObj.applyQuickTest(quickTestHandle)
restObj.startQuickTest(quickTestHandle)
restObj.verifyQuickTestInitialization(quickTestHandle)
restObj.monitorQuickTestRunningProgress(quickTestHandle)
# Optional: Display the final stats on the terminal when test completes.
stats = restObj.getStats(viewName='Flow View')
resultPath = restObj.getQuickTestResultPath(quickTestHandle)
resultPath = resultPath+'\\'+quickTestCsvResultFile
if copyResultFileToLocalLinuxPath.split('/')[:-1] == '/':
copyResultFileToLocalLinuxPath = copyResultFileToLocalLinuxPath[:-1]
quickTestStatsToGet = quickTestCsvResultFile.split('.')[0]
resultFileName = quickTestStatsToGet+'_'+quickTestName.replace(' ', '')
restObj.copyFileWindowsToLocalLinux(resultPath, copyResultFileToLocalLinuxPath, renameDestinationFile=resultFileName, includeTimestamp=True)
print('\nQuick Test ended:', quickTestName)
except (IxNetRestApiException, Exception, KeyboardInterrupt) as errMsg:
if not bool(re.search('ConnectionError', traceback.format_exc())):
print('\n%s' % traceback.format_exc())
print('\nException Error! %s\n' % errMsg)
if quickTestHandle is not None:
restObj.stopQuickTest(quickTestHandle)
| 41.993631 | 148 | 0.675413 |
c8491487ed767a6f627e0efd04a8bd858eeb911c | 3,483 | py | Python | clumioapi/models/host_list_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | null | null | null | clumioapi/models/host_list_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | 1 | 2021-09-16T05:56:05.000Z | 2021-09-16T05:56:05.000Z | clumioapi/models/host_list_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2021. Clumio, Inc.
#
from typing import Any, Dict, Mapping, Optional, Sequence, Type, TypeVar
from clumioapi.models import hateoas_first_link
from clumioapi.models import hateoas_last_link
from clumioapi.models import hateoas_next_link
from clumioapi.models import hateoas_prev_link
from clumioapi.models import hateoas_self_link
T = TypeVar('T', bound='HostListLinks')
class HostListLinks:
"""Implementation of the 'HostListLinks' model.
URLs to pages related to the resource.
Attributes:
first:
The HATEOAS link to the first page of results.
last:
The HATEOAS link to the last page of results.
next:
The HATEOAS link to the next page of results.
prev:
The HATEOAS link to the previous page of results.
p_self:
The HATEOAS link to this resource.
"""
# Create a mapping from Model property names to API property names
_names = {
'first': '_first',
'last': '_last',
'next': '_next',
'prev': '_prev',
'p_self': '_self',
}
def __init__(
self,
first: hateoas_first_link.HateoasFirstLink = None,
last: hateoas_last_link.HateoasLastLink = None,
next: hateoas_next_link.HateoasNextLink = None,
prev: hateoas_prev_link.HateoasPrevLink = None,
p_self: hateoas_self_link.HateoasSelfLink = None,
) -> None:
"""Constructor for the HostListLinks class."""
# Initialize members of the class
self.first: hateoas_first_link.HateoasFirstLink = first
self.last: hateoas_last_link.HateoasLastLink = last
self.next: hateoas_next_link.HateoasNextLink = next
self.prev: hateoas_prev_link.HateoasPrevLink = prev
self.p_self: hateoas_self_link.HateoasSelfLink = p_self
@classmethod
def from_dictionary(cls: Type, dictionary: Mapping[str, Any]) -> Optional[T]:
"""Creates an instance of this model from a dictionary
Args:
dictionary: A dictionary representation of the object as obtained
from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if not dictionary:
return None
# Extract variables from the dictionary
key = '_first'
first = (
hateoas_first_link.HateoasFirstLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
key = '_last'
last = (
hateoas_last_link.HateoasLastLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
key = '_next'
next = (
hateoas_next_link.HateoasNextLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
key = '_prev'
prev = (
hateoas_prev_link.HateoasPrevLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
key = '_self'
p_self = (
hateoas_self_link.HateoasSelfLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
# Return an object of this model
return cls(first, last, next, prev, p_self)
| 30.823009 | 84 | 0.621878 |
4bda05019bde0c901369e4d7578b8c48d75ac235 | 14,443 | py | Python | rnn/modules.py | DingKe/pytorch_workplace | 4bc60a2c3640de522d0b72262667ba70391ba16e | [
"MIT"
] | 184 | 2017-05-23T12:06:08.000Z | 2021-12-21T09:09:51.000Z | rnn/modules.py | xzm2004260/pytorch_workplace | 4bc60a2c3640de522d0b72262667ba70391ba16e | [
"MIT"
] | 8 | 2017-08-14T11:40:20.000Z | 2019-01-16T13:14:32.000Z | rnn/modules.py | xzm2004260/pytorch_workplace | 4bc60a2c3640de522d0b72262667ba70391ba16e | [
"MIT"
] | 61 | 2017-04-29T09:53:47.000Z | 2021-02-19T02:13:14.000Z | import math
import torch
from torch.nn import Module, Parameter
import torch.nn.functional as F
from torch.autograd import Variable
def clip_grad(v, min, max):
v_tmp = v.expand_as(v)
v_tmp.register_hook(lambda g: g.clamp(min, max))
return v_tmp
class RNNCellBase(Module):
def __repr__(self):
s = '{name}({input_size}, {hidden_size}'
if 'bias' in self.__dict__ and self.bias is not True:
s += ', bias={bias}'
if 'nonlinearity' in self.__dict__ and self.nonlinearity != "tanh":
s += ', nonlinearity={nonlinearity}'
s += ')'
return s.format(name=self.__class__.__name__, **self.__dict__)
class RNNCell(RNNCellBase):
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(RNNCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(hidden_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, h):
output = F.linear(input, self.weight_ih, self.bias) + F.linear(h, self.weight_hh)
if self.grad_clip:
output = clip_grad(output, -self.grad_clip, self.grad_clip) # avoid explosive gradient
output = F.relu(output)
return output
class GRUCell(RNNCellBase):
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(GRUCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(3 * hidden_size, input_size))
self.weight_hh_rz = Parameter(torch.Tensor(2 * hidden_size, hidden_size))
self.weight_hh = Parameter(torch.Tensor(hidden_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(3 * hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, h):
ih = F.linear(input, self.weight_ih, self.bias)
hh_rz = F.linear(h, self.weight_hh_rz)
if self.grad_clip:
ih = clip_grad(ih, -self.grad_clip, self.grad_clip)
hh_rz = clip_grad(hh_rz, -self.grad_clip, self.grad_clip)
r = F.sigmoid(ih[:, :self.hidden_size] + hh_rz[:, :self.hidden_size])
i = F.sigmoid(ih[:, self.hidden_size: self.hidden_size * 2] + hh_rz[:, self.hidden_size:])
hhr = F.linear(h * r, self.weight_hh)
if self.grad_clip:
hhr = clip_grad(hhr, -self.grad_clip, self.grad_clip)
n = F.relu(ih[:, self.hidden_size * 2:] + hhr)
h = (1 - i) * n + i * h
return h
class LSTMCell(RNNCellBase):
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(LSTMCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(4 * hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(4 * hidden_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(4 * hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, hx):
h, c = hx
pre = F.linear(input, self.weight_ih, self.bias) \
+ F.linear(h, self.weight_hh)
if self.grad_clip:
pre = clip_grad(pre, -self.grad_clip, self.grad_clip)
i = F.sigmoid(pre[:, :self.hidden_size])
f = F.sigmoid(pre[:, self.hidden_size: self.hidden_size * 2])
g = F.tanh(pre[:, self.hidden_size * 2: self.hidden_size * 3])
o = F.sigmoid(pre[:, self.hidden_size * 3:])
c = f * c + i * g
h = o * F.tanh(c)
return h, c
def cumax(logits, dim=-1):
return torch.cumsum(F.softmax(logits, dim), dim=dim)
class LSTMONCell(RNNCellBase):
'''
Shen & Tan et al. ORDERED NEURONS: INTEGRATING TREE STRUCTURES INTO RECURRENT NEURAL NETWORKS
'''
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(LSTMONCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(6 * hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(6 * hidden_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(6 * hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, hx):
h, c = hx
pre = F.linear(input, self.weight_ih, self.bias) \
+ F.linear(h, self.weight_hh)
if self.grad_clip:
pre = clip_grad(pre, -self.grad_clip, self.grad_clip)
i = F.sigmoid(pre[:, :self.hidden_size])
f = F.sigmoid(pre[:, self.hidden_size: self.hidden_size * 2])
g = F.tanh(pre[:, self.hidden_size * 2: self.hidden_size * 3])
o = F.sigmoid(pre[:, self.hidden_size * 3: self.hidden_size * 4])
ff = cumax(pre[:, self.hidden_size * 4: self.hidden_size * 5])
ii = 1 - cumax(pre[:, self.hidden_size * 5: self.hidden_size * 6])
w = ff * ii
f = f * w + (ff - w)
i = i * w + (ii - w)
c = f * c + i * g
h = o * F.tanh(c)
return h, c
class LSTMPCell(RNNCellBase):
def __init__(self, input_size, hidden_size, recurrent_size, bias=True, grad_clip=None):
super(LSTMPCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.recurrent_size = recurrent_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(4 * hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(4 * hidden_size, recurrent_size))
self.weight_rec = Parameter(torch.Tensor(recurrent_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(4 * hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, hx):
h, c = hx
pre = F.linear(input, self.weight_ih, self.bias) \
+ F.linear(h, self.weight_hh)
if self.grad_clip:
pre = clip_grad(pre, -self.grad_clip, self.grad_clip)
i = F.sigmoid(pre[:, :self.hidden_size])
f = F.sigmoid(pre[:, self.hidden_size: self.hidden_size * 2])
g = F.tanh(pre[:, self.hidden_size * 2: self.hidden_size * 3])
o = F.sigmoid(pre[:, self.hidden_size * 3:])
c = f * c + i * g
h = o * F.tanh(c)
h = F.linear(h, self.weight_rec)
return h, c
class MGRUCell(RNNCellBase):
'''Minimal GRU
Reference:
Ravanelli et al. [Improving speech recognition by revising gated recurrent units](https://arxiv.org/abs/1710.00641).
'''
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(MGRUCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(2 * hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(2 * hidden_size, hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(2 * hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, h):
ih = F.linear(input, self.weight_ih, self.bias)
hh = F.linear(h, self.weight_hh)
if self.grad_clip:
ih = clip_grad(ih, -self.grad_clip, self.grad_clip)
hh = clip_grad(hh, -self.grad_clip, self.grad_clip)
z = F.sigmoid(ih[:, :self.hidden_size] + hh[:, :self.hidden_size])
n = F.relu(ih[:, self.hidden_size:] + hh[:, self.hidden_size:])
h = (1 - z) * n + z * h
return h
class IndRNNCell(RNNCellBase):
'''
References:
Li et al. [Independently Recurrent Neural Network (IndRNN): Building A Longer and Deeper RNN](https://arxiv.org/abs/1803.04831).
'''
def __init__(self, input_size, hidden_size, bias=True, grad_clip=None):
super(IndRNNCell, self).__init__()
self.input_size = input_size
self.hidden_size = hidden_size
self.grad_clip = grad_clip
self.weight_ih = Parameter(torch.Tensor(hidden_size, input_size))
self.weight_hh = Parameter(torch.Tensor(hidden_size))
if bias:
self.bias = Parameter(torch.Tensor(hidden_size))
else:
self.register_parameter('bias', None)
self.reset_parameters()
def reset_parameters(self):
stdv = 1.0 / math.sqrt(self.hidden_size)
for weight in self.parameters():
weight.data.uniform_(-stdv, stdv)
def forward(self, input, h):
output = F.linear(input, self.weight_ih, self.bias) + h * self.weight_hh
if self.grad_clip:
output = clip_grad(output, -self.grad_clip, self.grad_clip) # avoid explosive gradient
output = F.relu(output)
return output
class RNNBase(Module):
def __init__(self, mode, input_size, hidden_size, recurrent_size=None, num_layers=1, bias=True,
return_sequences=True, grad_clip=None):
super(RNNBase, self).__init__()
self.mode = mode
self.input_size = input_size
self.hidden_size = hidden_size
self.recurrent_size = recurrent_size
self.num_layers = num_layers
self.bias = bias
self.return_sequences = return_sequences
self.grad_clip = grad_clip
mode2cell = {'RNN': RNNCell,
'IndRNN': IndRNNCell,
'GRU': GRUCell,
'MGRU': GRUCell,
'LSTM': LSTMCell,
'LSTMON': LSTMONCell,
'LSTMP': LSTMPCell}
Cell = mode2cell[mode]
kwargs = {'input_size': input_size,
'hidden_size': hidden_size,
'bias': bias,
'grad_clip': grad_clip}
if self.mode == 'LSTMP':
kwargs['recurrent_size'] = recurrent_size
self.cell0= Cell(**kwargs)
for i in range(1, num_layers):
kwargs['input_size'] = recurrent_size if self.mode == 'LSTMP' else hidden_size
cell = Cell(**kwargs)
setattr(self, 'cell{}'.format(i), cell)
def forward(self, input, initial_states=None):
if initial_states is None:
zeros = Variable(torch.zeros(input.size(0), self.hidden_size))
if self.mode == 'LSTM' or self.mode == 'LSTMON':
initial_states = [(zeros, zeros), ] * self.num_layers
elif self.mode == 'LSTMP':
zeros_h = Variable(torch.zeros(input.size(0), self.recurrent_size))
initial_states = [(zeros_h, zeros), ] * self.num_layers
else:
initial_states = [zeros] * self.num_layers
assert len(initial_states) == self.num_layers
states = initial_states
outputs = []
time_steps = input.size(1)
for t in range(time_steps):
x = input[:, t, :]
for l in range(self.num_layers):
hx = getattr(self, 'cell{}'.format(l))(x, states[l])
states[l] = hx
if self.mode.startswith('LSTM'):
x = hx[0]
else:
x = hx
outputs.append(hx)
if self.return_sequences:
if self.mode.startswith('LSTM'):
hs, cs = zip(*outputs)
h = torch.stack(hs).transpose(0, 1)
c = torch.stack(cs).transpose(0, 1)
output = (h, c)
else:
output = torch.stack(outputs).transpose(0, 1)
else:
output = outputs[-1]
return output
class RNN(RNNBase):
def __init__(self, *args, **kwargs):
super(RNN, self).__init__('RNN', *args, **kwargs)
class GRU(RNNBase):
def __init__(self, *args, **kwargs):
super(GRU, self).__init__('GRU', *args, **kwargs)
class MGRU(RNNBase):
def __init__(self, *args, **kwargs):
super(MGRU, self).__init__('MGRU', *args, **kwargs)
class LSTM(RNNBase):
def __init__(self, *args, **kwargs):
super(LSTM, self).__init__('LSTM', *args, **kwargs)
class LSTMON(RNNBase):
def __init__(self, *args, **kwargs):
super(LSTMON, self).__init__('LSTMON', *args, **kwargs)
class LSTMP(RNNBase):
def __init__(self, *args, **kwargs):
super(LSTMP, self).__init__('LSTMP', *args, **kwargs)
class IndRNN(RNNBase):
'''
References:
Li et al. [Independently Recurrent Neural Network (IndRNN): Building A Longer and Deeper RNN](https://arxiv.org/abs/1803.04831).
'''
def __init__(self, *args, **kwargs):
super(IndRNN, self).__init__('IndRNN', *args, **kwargs)
| 33.126147 | 132 | 0.594267 |
12ed38a26de06c73d436434889cc962d7c0df9a3 | 10,121 | py | Python | pymysql/converters.py | TTimo/PyMySQL | fdc5fd6e1006bce39833de76926e3a1e4795ad9a | [
"MIT"
] | 1 | 2016-04-08T03:48:31.000Z | 2016-04-08T03:48:31.000Z | pymysql/converters.py | TTimo/PyMySQL | fdc5fd6e1006bce39833de76926e3a1e4795ad9a | [
"MIT"
] | null | null | null | pymysql/converters.py | TTimo/PyMySQL | fdc5fd6e1006bce39833de76926e3a1e4795ad9a | [
"MIT"
] | null | null | null | import re
import datetime
import time
import sys
from constants import FIELD_TYPE, FLAG
from charset import charset_by_id
PYTHON3 = sys.version_info[0] > 2
try:
set
except NameError:
try:
from sets import BaseSet as set
except ImportError:
from sets import Set as set
ESCAPE_REGEX = re.compile(r"[\0\n\r\032\'\"\\]")
ESCAPE_MAP = {'\0': '\\0', '\n': '\\n', '\r': '\\r', '\032': '\\Z',
'\'': '\\\'', '"': '\\"', '\\': '\\\\'}
def escape_item(val, charset):
if type(val) in [tuple, list, set]:
return escape_sequence(val, charset)
if type(val) is dict:
return escape_dict(val, charset)
if PYTHON3 and hasattr(val, "decode") and not isinstance(val, unicode):
# deal with py3k bytes
val = val.decode(charset)
encoder = encoders[type(val)]
val = encoder(val)
if type(val) in [str, int, unicode]:
return val
val = val.encode(charset)
return val
def escape_dict(val, charset):
n = {}
for k, v in val.items():
quoted = escape_item(v, charset)
n[k] = quoted
return n
def escape_sequence(val, charset):
n = []
for item in val:
quoted = escape_item(item, charset)
n.append(quoted)
return "(" + ",".join(n) + ")"
def escape_set(val, charset):
val = map(lambda x: escape_item(x, charset), val)
return ','.join(val)
def escape_bool(value):
return str(int(value))
def escape_object(value):
return str(value)
def escape_int(value):
return value
escape_long = escape_object
def escape_float(value):
return ('%.15g' % value)
def escape_string(value):
return ("'%s'" % ESCAPE_REGEX.sub(
lambda match: ESCAPE_MAP.get(match.group(0)), value))
def escape_unicode(value):
return escape_string(value)
def escape_None(value):
return 'NULL'
def escape_timedelta(obj):
seconds = int(obj.seconds) % 60
minutes = int(obj.seconds // 60) % 60
hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
return escape_string('%02d:%02d:%02d' % (hours, minutes, seconds))
def escape_time(obj):
s = "%02d:%02d:%02d" % (int(obj.hour), int(obj.minute),
int(obj.second))
if obj.microsecond:
s += ".%f" % obj.microsecond
return escape_string(s)
def escape_datetime(obj):
return escape_string(obj.strftime("%Y-%m-%d %H:%M:%S"))
def escape_date(obj):
return escape_string(obj.strftime("%Y-%m-%d"))
def escape_struct_time(obj):
return escape_datetime(datetime.datetime(*obj[:6]))
def Thing2Literal(o, d):
return "'%s'" % escape_string(str(o))
def convert_datetime(obj):
"""Returns a DATETIME or TIMESTAMP column value as a datetime object:
>>> datetime_or_None('2007-02-25 23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
>>> datetime_or_None('2007-02-25T23:06:20')
datetime.datetime(2007, 2, 25, 23, 6, 20)
Illegal values are returned as None:
>>> datetime_or_None('2007-02-31T23:06:20') is None
True
>>> datetime_or_None('0000-00-00 00:00:00') is None
True
"""
if ' ' in obj:
sep = ' '
elif 'T' in obj:
sep = 'T'
else:
return convert_date(obj)
try:
ymd, hms = obj.split(sep, 1)
return datetime.datetime(*[ int(x) for x in ymd.split('-')+hms.split(':') ])
except ValueError:
return convert_date(obj)
def convert_timedelta(obj):
"""Returns a TIME column as a timedelta object:
>>> timedelta_or_None('25:06:17')
datetime.timedelta(1, 3977)
>>> timedelta_or_None('-25:06:17')
datetime.timedelta(-2, 83177)
Illegal values are returned as None:
>>> timedelta_or_None('random crap') is None
True
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
"""
try:
microseconds = 0
if not isinstance(obj, unicode):
obj = obj.decode(connection.charset)
if "." in obj:
(obj, tail) = obj.split('.')
microseconds = int(tail)
hours, minutes, seconds = obj.split(':')
tdelta = datetime.timedelta(
hours = int(hours),
minutes = int(minutes),
seconds = int(seconds),
microseconds = microseconds
)
return tdelta
except ValueError:
return None
def convert_time(obj):
"""Returns a TIME column as a time object:
>>> time_or_None('15:06:17')
datetime.time(15, 6, 17)
Illegal values are returned as None:
>>> time_or_None('-25:06:17') is None
True
>>> time_or_None('random crap') is None
True
Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
can accept values as (+|-)DD HH:MM:SS. The latter format will not
be parsed correctly by this function.
Also note that MySQL's TIME column corresponds more closely to
Python's timedelta and not time. However if you want TIME columns
to be treated as time-of-day and not a time offset, then you can
use set this function as the converter for FIELD_TYPE.TIME.
"""
try:
microseconds = 0
if "." in obj:
(obj, tail) = obj.split('.')
microseconds = int(tail)
hours, minutes, seconds = obj.split(':')
return datetime.time(hour=int(hours), minute=int(minutes),
second=int(seconds), microsecond=microseconds)
except ValueError:
return None
def convert_date(obj):
"""Returns a DATE column as a date object:
>>> date_or_None('2007-02-26')
datetime.date(2007, 2, 26)
Illegal values are returned as None:
>>> date_or_None('2007-02-31') is None
True
>>> date_or_None('0000-00-00') is None
True
"""
try:
return datetime.date(*[ int(x) for x in obj.split('-', 2) ])
except ValueError:
return None
def convert_mysql_timestamp(timestamp):
"""Convert a MySQL TIMESTAMP to a Timestamp object.
MySQL >= 4.1 returns TIMESTAMP in the same format as DATETIME:
>>> mysql_timestamp_converter('2007-02-25 22:32:17')
datetime.datetime(2007, 2, 25, 22, 32, 17)
MySQL < 4.1 uses a big string of numbers:
>>> mysql_timestamp_converter('20070225223217')
datetime.datetime(2007, 2, 25, 22, 32, 17)
Illegal values are returned as None:
>>> mysql_timestamp_converter('2007-02-31 22:32:17') is None
True
>>> mysql_timestamp_converter('00000000000000') is None
True
"""
if timestamp[4] == '-':
return convert_datetime(timestamp)
timestamp += "0"*(14-len(timestamp)) # padding
year, month, day, hour, minute, second = \
int(timestamp[:4]), int(timestamp[4:6]), int(timestamp[6:8]), \
int(timestamp[8:10]), int(timestamp[10:12]), int(timestamp[12:14])
try:
return datetime.datetime(year, month, day, hour, minute, second)
except ValueError:
return None
def convert_set(s):
return set(s.split(","))
def convert_bit(b):
#b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
#return struct.unpack(">Q", b)[0]
#
# the snippet above is right, but MySQLdb doesn't process bits,
# so we shouldn't either
return b
def convert_characters(connection, field, data):
field_charset = charset_by_id(field.charsetnr).name
if field.flags & FLAG.SET:
return convert_set(data.decode(field_charset))
if field.flags & FLAG.BINARY:
return data
if connection.use_unicode:
data = data.decode(field_charset)
elif connection.charset != field_charset:
data = data.decode(field_charset)
data = data.encode(connection.charset)
return data
def convert_int(data):
return int(data)
def convert_long(data):
return long(data)
def convert_float(data):
return float(data)
encoders = {
bool: escape_bool,
int: escape_int,
long: escape_long,
float: escape_float,
str: escape_string,
unicode: escape_unicode,
tuple: escape_sequence,
list:escape_sequence,
set:escape_sequence,
dict:escape_dict,
type(None):escape_None,
datetime.date: escape_date,
datetime.datetime : escape_datetime,
datetime.timedelta : escape_timedelta,
datetime.time : escape_time,
time.struct_time : escape_struct_time,
}
decoders = {
FIELD_TYPE.BIT: convert_bit,
FIELD_TYPE.TINY: convert_int,
FIELD_TYPE.SHORT: convert_int,
FIELD_TYPE.LONG: convert_long,
FIELD_TYPE.FLOAT: convert_float,
FIELD_TYPE.DOUBLE: convert_float,
FIELD_TYPE.DECIMAL: convert_float,
FIELD_TYPE.NEWDECIMAL: convert_float,
FIELD_TYPE.LONGLONG: convert_long,
FIELD_TYPE.INT24: convert_int,
FIELD_TYPE.YEAR: convert_int,
FIELD_TYPE.TIMESTAMP: convert_mysql_timestamp,
FIELD_TYPE.DATETIME: convert_datetime,
FIELD_TYPE.TIME: convert_timedelta,
FIELD_TYPE.DATE: convert_date,
FIELD_TYPE.SET: convert_set,
FIELD_TYPE.BLOB: convert_characters,
FIELD_TYPE.TINY_BLOB: convert_characters,
FIELD_TYPE.MEDIUM_BLOB: convert_characters,
FIELD_TYPE.LONG_BLOB: convert_characters,
FIELD_TYPE.STRING: convert_characters,
FIELD_TYPE.VAR_STRING: convert_characters,
FIELD_TYPE.VARCHAR: convert_characters,
#FIELD_TYPE.BLOB: str,
#FIELD_TYPE.STRING: str,
#FIELD_TYPE.VAR_STRING: str,
#FIELD_TYPE.VARCHAR: str
}
conversions = decoders # for MySQLdb compatibility
try:
# python version > 2.3
from decimal import Decimal
def convert_decimal(data):
return Decimal(data)
decoders[FIELD_TYPE.DECIMAL] = convert_decimal
decoders[FIELD_TYPE.NEWDECIMAL] = convert_decimal
def escape_decimal(obj):
return unicode(obj)
encoders[Decimal] = escape_decimal
except ImportError:
pass
| 28.752841 | 84 | 0.624938 |
c9df1a92f3557ac9ffc9f16ba901d22de7c110fe | 16,276 | py | Python | tests/third_party/cupy/creation_tests/test_basic.py | Rubtsowa/dpnp | ef404c0f284b0c508ed1e556e140f02f76ae5551 | [
"BSD-2-Clause"
] | 37 | 2020-09-08T00:38:52.000Z | 2022-03-18T01:44:10.000Z | tests/third_party/cupy/creation_tests/test_basic.py | Rubtsowa/dpnp | ef404c0f284b0c508ed1e556e140f02f76ae5551 | [
"BSD-2-Clause"
] | 432 | 2020-09-07T09:48:41.000Z | 2022-03-25T17:50:55.000Z | tests/third_party/cupy/creation_tests/test_basic.py | Rubtsowa/dpnp | ef404c0f284b0c508ed1e556e140f02f76ae5551 | [
"BSD-2-Clause"
] | 17 | 2020-09-07T10:00:34.000Z | 2022-03-25T13:53:43.000Z | import unittest
import numpy
import pytest
import dpnp as cupy
from tests.third_party.cupy import testing
@testing.gpu
class TestBasic(unittest.TestCase):
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty(self, xp, dtype, order):
a = xp.empty((2, 3, 4), dtype=dtype, order=order)
a.fill(0)
return a
@testing.slow
def test_empty_huge_size(self):
a = cupy.empty((1024, 2048, 1024), dtype='b')
a.fill(123)
self.assertTrue((a == 123).all())
# Free huge memory for slow test
del a
cupy.get_default_memory_pool().free_all_blocks()
@testing.slow
def test_empty_huge_size_fill0(self):
a = cupy.empty((1024, 2048, 1024), dtype='b')
a.fill(0)
self.assertTrue((a == 0).all())
# Free huge memory for slow test
del a
cupy.get_default_memory_pool().free_all_blocks()
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_scalar(self, xp, dtype, order):
a = xp.empty(None, dtype=dtype, order=order)
a.fill(0)
return a
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_int(self, xp, dtype, order):
a = xp.empty(3, dtype=dtype, order=order)
a.fill(0)
return a
@testing.slow
def test_empty_int_huge_size(self):
a = cupy.empty(2 ** 31, dtype='b')
a.fill(123)
self.assertTrue((a == 123).all())
# Free huge memory for slow test
del a
cupy.get_default_memory_pool().free_all_blocks()
@testing.slow
def test_empty_int_huge_size_fill0(self):
a = cupy.empty(2 ** 31, dtype='b')
a.fill(0)
self.assertTrue((a == 0).all())
# Free huge memory for slow test
del a
cupy.get_default_memory_pool().free_all_blocks()
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.empty_like(a, order=order)
b.fill(0)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_contiguity(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.empty_like(a, order=order)
b.fill(0)
if order in ['f', 'F']:
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_contiguity2(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
a = xp.asfortranarray(a)
b = xp.empty_like(a, order=order)
b.fill(0)
if order in ['c', 'C']:
self.assertTrue(b.flags.c_contiguous)
else:
self.assertTrue(b.flags.f_contiguous)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_contiguity3(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
# test strides that are both non-contiguous and non-descending
a = a[:, ::2, :].swapaxes(0, 1)
b = xp.empty_like(a, order=order)
b.fill(0)
if order in ['k', 'K', None]:
self.assertFalse(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
elif order in ['f', 'F']:
self.assertFalse(b.flags.c_contiguous)
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
return b
@testing.for_all_dtypes()
def test_empty_like_K_strides(self, dtype):
# test strides that are both non-contiguous and non-descending
a = testing.shaped_arange((2, 3, 4), numpy, dtype)
a = a[:, ::2, :].swapaxes(0, 1)
b = numpy.empty_like(a, order='K')
b.fill(0)
# GPU case
ag = testing.shaped_arange((2, 3, 4), cupy, dtype)
ag = ag[:, ::2, :].swapaxes(0, 1)
bg = cupy.empty_like(ag, order='K')
bg.fill(0)
# make sure NumPy and CuPy strides agree
self.assertEqual(b.strides, bg.strides)
return
@testing.for_all_dtypes()
def test_empty_like_invalid_order(self, dtype):
for xp in (numpy, cupy):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
with pytest.raises(TypeError):
xp.empty_like(a, order='Q')
def test_empty_like_subok(self):
a = testing.shaped_arange((2, 3, 4), cupy)
with pytest.raises(TypeError):
cupy.empty_like(a, subok=True)
@testing.for_CF_orders()
def test_empty_zero_sized_array_strides(self, order):
a = numpy.empty((1, 0, 2), dtype='d', order=order)
b = cupy.empty((1, 0, 2), dtype='d', order=order)
self.assertEqual(b.strides, a.strides)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_eye(self, xp, dtype):
return xp.eye(5, 4, 1, dtype)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_identity(self, xp, dtype):
return xp.identity(4, dtype)
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_zeros(self, xp, dtype, order):
return xp.zeros((2, 3, 4), dtype=dtype, order=order)
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_zeros_scalar(self, xp, dtype, order):
return xp.zeros(None, dtype=dtype, order=order)
@testing.for_CF_orders()
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_zeros_int(self, xp, dtype, order):
return xp.zeros(3, dtype=dtype, order=order)
@testing.for_CF_orders()
def test_zeros_strides(self, order):
a = numpy.zeros((2, 3), dtype='d', order=order)
b = cupy.zeros((2, 3), dtype='d', order=order)
self.assertEqual(b.strides, a.strides)
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_zeros_like(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.zeros_like(a, order=order)
def test_zeros_like_subok(self):
a = cupy.ndarray((2, 3, 4))
with pytest.raises(TypeError):
cupy.zeros_like(a, subok=True)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_ones(self, xp, dtype):
return xp.ones((2, 3, 4), dtype=dtype)
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_ones_like(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.ones_like(a, order=order)
def test_ones_like_subok(self):
a = cupy.ndarray((2, 3, 4))
with pytest.raises(TypeError):
cupy.ones_like(a, subok=True)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_full(self, xp, dtype):
return xp.full((2, 3, 4), 1, dtype=dtype)
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_full_default_dtype(self, xp, dtype):
return xp.full((2, 3, 4), xp.array(1, dtype=dtype))
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_full_default_dtype_cpu_input(self, xp, dtype):
return xp.full((2, 3, 4), numpy.array(1, dtype=dtype))
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_full_like(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.full_like(a, 1, order=order)
def test_full_like_subok(self):
a = cupy.ndarray((2, 3, 4))
with pytest.raises(TypeError):
cupy.full_like(a, 1, subok=True)
@testing.parameterize(
*testing.product({
'shape': [4, (4, ), (4, 2), (4, 2, 3), (5, 4, 2, 3)],
})
)
@testing.gpu
class TestBasicReshape(unittest.TestCase):
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_reshape(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.empty_like(a, order=order, shape=self.shape)
b.fill(0)
return b
@testing.for_CF_orders()
@testing.for_all_dtypes()
def test_empty_like_reshape_cupy_only(self, dtype, order):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
b = cupy.empty_like(a, shape=self.shape)
b.fill(0)
c = cupy.empty(self.shape, order=order, dtype=dtype)
c.fill(0)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_reshape_contiguity(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
b = xp.empty_like(a, order=order, shape=self.shape)
b.fill(0)
if order in ['f', 'F']:
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
def test_empty_like_reshape_contiguity_cupy_only(self, dtype, order):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
b = cupy.empty_like(a, order=order, shape=self.shape)
b.fill(0)
c = cupy.empty(self.shape)
c.fill(0)
if order in ['f', 'F']:
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_reshape_contiguity2(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
a = xp.asfortranarray(a)
b = xp.empty_like(a, order=order, shape=self.shape)
b.fill(0)
shape = self.shape if not numpy.isscalar(self.shape) else (self.shape,)
if (order in ['c', 'C'] or
(order in ['k', 'K', None] and len(shape) != a.ndim)):
self.assertTrue(b.flags.c_contiguous)
else:
self.assertTrue(b.flags.f_contiguous)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
def test_empty_like_reshape_contiguity2_cupy_only(self, dtype, order):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
a = cupy.asfortranarray(a)
b = cupy.empty_like(a, order=order, shape=self.shape)
b.fill(0)
c = cupy.empty(self.shape)
c.fill(0)
shape = self.shape if not numpy.isscalar(self.shape) else (self.shape,)
if (order in ['c', 'C'] or
(order in ['k', 'K', None] and len(shape) != a.ndim)):
self.assertTrue(b.flags.c_contiguous)
else:
self.assertTrue(b.flags.f_contiguous)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_empty_like_reshape_contiguity3(self, xp, dtype, order):
a = testing.shaped_arange((2, 3, 4), xp, dtype)
# test strides that are both non-contiguous and non-descending
a = a[:, ::2, :].swapaxes(0, 1)
b = xp.empty_like(a, order=order, shape=self.shape)
b.fill(0)
shape = self.shape if not numpy.isscalar(self.shape) else (self.shape,)
if len(shape) == 1:
self.assertTrue(b.flags.c_contiguous)
self.assertTrue(b.flags.f_contiguous)
elif order in ['k', 'K', None] and len(shape) == a.ndim:
self.assertFalse(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
elif order in ['f', 'F']:
self.assertFalse(b.flags.c_contiguous)
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
return b
@testing.for_orders('C')
@testing.for_all_dtypes()
def test_empty_like_reshape_contiguity3_cupy_only(self, dtype, order):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
# test strides that are both non-contiguous and non-descending
a = a[:, ::2, :].swapaxes(0, 1)
b = cupy.empty_like(a, order=order, shape=self.shape)
b.fill(0)
shape = self.shape if not numpy.isscalar(self.shape) else (self.shape,)
if len(shape) == 1:
self.assertTrue(b.flags.c_contiguous)
self.assertTrue(b.flags.f_contiguous)
elif order in ['k', 'K', None] and len(shape) == a.ndim:
self.assertFalse(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
elif order in ['f', 'F']:
self.assertFalse(b.flags.c_contiguous)
self.assertTrue(b.flags.f_contiguous)
else:
self.assertTrue(b.flags.c_contiguous)
self.assertFalse(b.flags.f_contiguous)
c = cupy.zeros(self.shape)
c.fill(0)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_all_dtypes()
def test_empty_like_K_strides_reshape(self, dtype):
# test strides that are both non-contiguous and non-descending
a = testing.shaped_arange((2, 3, 4), numpy, dtype)
a = a[:, ::2, :].swapaxes(0, 1)
b = numpy.empty_like(a, order='K', shape=self.shape)
b.fill(0)
# GPU case
ag = testing.shaped_arange((2, 3, 4), cupy, dtype)
ag = ag[:, ::2, :].swapaxes(0, 1)
bg = cupy.empty_like(ag, order='K', shape=self.shape)
bg.fill(0)
# make sure NumPy and CuPy strides agree
self.assertEqual(b.strides, bg.strides)
return
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_zeros_like_reshape(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.zeros_like(a, order=order, shape=self.shape)
@testing.for_CF_orders()
@testing.for_all_dtypes()
def test_zeros_like_reshape_cupy_only(self, dtype, order):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
b = cupy.zeros_like(a, shape=self.shape)
c = cupy.zeros(self.shape, order=order, dtype=dtype)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_ones_like_reshape(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.ones_like(a, order=order, shape=self.shape)
@testing.for_all_dtypes()
def test_ones_like_reshape_cupy_only(self, dtype):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
b = cupy.ones_like(a, shape=self.shape)
c = cupy.ones(self.shape, dtype=dtype)
testing.assert_array_equal(b, c)
@testing.with_requires('numpy>=1.17.0')
@testing.for_orders('C')
@testing.for_all_dtypes()
@testing.numpy_cupy_array_equal()
def test_full_like_reshape(self, xp, dtype, order):
a = xp.ndarray((2, 3, 4), dtype=dtype)
return xp.full_like(a, 1, order=order, shape=self.shape)
@testing.for_all_dtypes()
def test_full_like_reshape_cupy_only(self, dtype):
a = testing.shaped_arange((2, 3, 4), cupy, dtype)
b = cupy.full_like(a, 1, shape=self.shape)
c = cupy.full(self.shape, 1, dtype=dtype)
testing.assert_array_equal(b, c)
| 35.077586 | 79 | 0.616921 |
c105406aa62095dec4a0e28a49c3206650f839cc | 9,084 | py | Python | src/panoptes/utils/utils.py | danjampro/panoptes-utils | ff51019cdd0e188cf5e8d8d70fc3579776a31716 | [
"MIT"
] | null | null | null | src/panoptes/utils/utils.py | danjampro/panoptes-utils | ff51019cdd0e188cf5e8d8d70fc3579776a31716 | [
"MIT"
] | null | null | null | src/panoptes/utils/utils.py | danjampro/panoptes-utils | ff51019cdd0e188cf5e8d8d70fc3579776a31716 | [
"MIT"
] | null | null | null | import collections.abc
import contextlib
import os
import re
import shutil
import signal
from astropy import units as u
from astropy.coordinates import AltAz
from astropy.coordinates import ICRS
from astropy.coordinates import SkyCoord
from panoptes.utils.time import current_time
PATH_MATCHER = re.compile(r'''
.*?
(?P<unit_id>PAN\d{3})[/_]{1}
(?P<camera_id>[a-gA-G0-9]{6})[/_]{1}
(?P<sequence_id>[0-9]{8}T[0-9]{6})[/_]{1}
(?P<image_id>[0-9]{8}T[0-9]{6})
.*?
''', re.VERBOSE)
def listify(obj):
""" Given an object, return a list.
Always returns a list. If obj is None, returns empty list,
if obj is list, just returns obj, otherwise returns list with
obj as single member.
If a `dict` object is passed then this function will return a list of *only*
the values.
.. doctest::
>>> listify(42)
[42]
>>> listify('foo')
['foo']
>>> listify(None)
[]
>>> listify(['a'])
['a']
>>> my_dict = dict(a=42, b='foo')
>>> listify(my_dict)
[42, 'foo']
>>> listify(my_dict.values())
[42, 'foo']
>>> listify(my_dict.keys())
['a', 'b']
Returns:
list: You guessed it.
"""
if obj is None:
return list()
elif isinstance(obj, list):
return obj
elif isinstance(obj, dict):
return list(obj.values())
elif isinstance(obj, (collections.abc.ValuesView, collections.abc.KeysView)):
return list(obj)
else:
return [obj]
def get_free_space(directory=None):
"""Return the amoung of freespace in gigabytes for given directory.
>>> from panoptes.utils.utils import get_free_space
>>> get_free_space()
<Quantity ... Gbyte>
>>> get_free_space(directory='/')
<Quantity ... Gbyte>
Args:
directory (str, optional): Path to directory. If None defaults to root.
Returns:
astropy.units.Quantity: The number of gigabytes avialable in folder.
"""
directory = directory or os.path.abspath('/')
_, _, free_space = shutil.disk_usage(directory)
free_space = (free_space * u.byte).to(u.gigabyte)
return free_space
def string_to_params(opts):
"""Parses a single string into parameters that can be passed to a function.
A user of the `peas_shell` can supply positional and keyword arguments to the
command being called, however the `Cmd` module that is used for the shell does
not parse these options but instead passes this as a single string. This utility
method does some simple parsing of that string and returns a list of positional
parameters and a dictionary of keyword arguments. A keyword argument is considered
anything that contains an equal sign (e.g. `exptime=30`). Any leading `--` to
a keyword argument will be stripped during parsing.
A list of items can be passed by specifying the keyword argument multiple times.
Note:
This function will attempt to parse keyword values as floats if possible.
If a string is required include a single quote around the value, e.g.
`param='42'` will keep the value as the string `'42'`.
>>> from panoptes.utils.utils import string_to_params
>>> args, kwargs = string_to_params("parg1 parg2 key1=a_str key2=2 key2='2' key3=03")
>>> args
['parg1', 'parg2']
>>> kwargs
{'key1': 'a_str', 'key2': [2.0, '2'], 'key3': 3.0}
>>> isinstance(kwargs['key2'][0], float)
True
>>> isinstance(kwargs['key2'][1], str)
True
>>> kwargs['key2'][1] == '2'
True
>>> args, kwargs = string_to_params('--key1=val1 --key1-2=val1-2')
>>> kwargs
{'key1': 'val1', 'key1-2': 'val1-2'}
Args:
opts (str): A single string containing everything beyond the actual
command that is called.
Returns:
tuple(list, dict): Returns a list of positional parameters and a dictionary
of keyword arguments. These correspond to the *args and **kwargs that
a typical function would receive.
"""
args = []
kwargs = {}
for opt in opts.split(' '):
if '=' not in opt:
args.append(opt)
else:
name, value = opt.split('=', maxsplit=1)
if name.startswith('--') and len(name) > 2:
name = name[2:]
if "'" in value:
# Remove the explict single quotes.
value = value.replace("'", "")
else:
# Make it a number if possible.
with contextlib.suppress(ValueError):
value = float(value)
if name in kwargs:
kwargs[name] = listify(kwargs[name])
kwargs[name].append(value)
else:
kwargs[name] = value
return args, kwargs
def altaz_to_radec(alt=None, az=None, location=None, obstime=None, **kwargs):
"""Convert alt/az degrees to RA/Dec SkyCoord.
>>> from panoptes.utils.utils import altaz_to_radec
>>> from astropy.coordinates import EarthLocation
>>> from astropy import units as u
>>> keck = EarthLocation.of_site('Keck Observatory')
...
>>> altaz_to_radec(alt=75, az=180, location=keck, obstime='2020-02-02T20:20:02.02')
<SkyCoord (ICRS): (ra, dec) in deg
(281.78..., 4.807...)>
>>> # Can use quantities or not.
>>> alt = 4500 * u.arcmin
>>> az = 180 * u.degree
>>> altaz_to_radec(alt=alt, az=az, location=keck, obstime='2020-02-02T20:20:02.02')
<SkyCoord (ICRS): (ra, dec) in deg
(281.78..., 4.807...)>
>>> # Will use current time if none given.
>>> altaz_to_radec(alt=35, az=90, location=keck)
<SkyCoord (ICRS): (ra, dec) in deg
(..., ...)>
>>> # Must pass a `location` instance.
>>> altaz_to_radec()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
assert location is not None
AssertionError
Args:
alt (astropy.units.Quantity or scalar): Altitude.
az (astropy.units.Quantity or scalar): Azimuth.
location (astropy.coordinates.EarthLocation, required): A valid location.
obstime (None, optional): Time for object, defaults to `current_time`
Returns:
astropy.coordinates.SkyCoord: Coordinates corresponding to the AltAz.
"""
assert location is not None
if obstime is None:
obstime = current_time()
alt = get_quantity_value(alt, 'degree') * u.degree
az = get_quantity_value(az, 'degree') * u.degree
altaz = AltAz(obstime=obstime, location=location, alt=alt, az=az)
return SkyCoord(altaz.transform_to(ICRS))
class DelaySigTerm(contextlib.ContextDecorator):
"""Supports delaying SIGTERM during a critical section.
This allows one to avoid having SIGTERM interrupt a
critical block of code, such as saving to a database.
Example:
..
with DelaySigTerm():
db.WriteCurrentRecord(record)
"""
# TODO(jamessynge): Consider generalizing as DelaySignal(signum).
def __enter__(self, callback=None):
"""
Args:
callback: If not None, called when SIGTERM is handled,
with kwargs previously_caught and frame.
"""
self.caught = False
self.old_handler = signal.getsignal(signal.SIGTERM)
if callback:
assert callable(callback)
self.callback = callback
else:
self.callback = None
def handler(signum, frame):
previously_caught = self.caught
self.caught = True
if self.callback:
self.callback(previously_caught=previously_caught, frame=frame)
signal.signal(signal.SIGTERM, handler)
return self
def __exit__(self, *exc):
signal.signal(signal.SIGTERM, self.old_handler)
if self.caught:
# Send SIGTERM to this process.
os.kill(os.getpid(), signal.SIGTERM)
# Suppress any exception caught while the context was running.
return True
return False
def get_quantity_value(quantity, unit=None):
""" Thin-wrapper around the `astropy.units.Quantity.to_value` method.
If passed something other than a Quantity will simply return the original object.
>>> from astropy import units as u
>>> from panoptes.utils.utils import get_quantity_value
>>> get_quantity_value(60 * u.second)
60.0
>>> # Can convert between units.
>>> get_quantity_value(60 * u.minute, unit='second')
3600.0
>>> get_quantity_value(60 * u.minute, unit=u.second)
3600.0
>>> get_quantity_value(60)
60
Args:
quantity (astropy.units.Quantity or scalar): Quantity to extract numerical value from.
unit (astropy.units.Unit, optional): unit to convert to.
Returns:
float: numerical value of the Quantity after conversion to the specified unit.
"""
try:
return quantity.to_value(unit)
except AttributeError:
return quantity
| 30.179402 | 94 | 0.615808 |
7c24efa94d25e5af56766768ab2a83f6b2447782 | 4,598 | py | Python | rt_gene/rt_gene_model_training/pytorch/utils/GenerateRTGENEH5Dataset.py | martinhoang11/OpenSeeFace | c6c9b8c6c00e0e674bce781f2e372fe9109fe44b | [
"BSD-2-Clause"
] | 1 | 2021-03-23T04:58:02.000Z | 2021-03-23T04:58:02.000Z | rt_gene/rt_gene_model_training/pytorch/utils/GenerateRTGENEH5Dataset.py | martinhoang11/OpenSeeFace | c6c9b8c6c00e0e674bce781f2e372fe9109fe44b | [
"BSD-2-Clause"
] | null | null | null | rt_gene/rt_gene_model_training/pytorch/utils/GenerateRTGENEH5Dataset.py | martinhoang11/OpenSeeFace | c6c9b8c6c00e0e674bce781f2e372fe9109fe44b | [
"BSD-2-Clause"
] | null | null | null | from __future__ import print_function, division, absolute_import
import argparse
import os
import h5py
import numpy as np
from PIL import Image, ImageFilter, ImageOps
from torchvision import transforms
from tqdm import tqdm
script_path = os.path.dirname(os.path.realpath(__file__))
# Augmentations following `prepare_dataset.m`: randomly crop and resize the image 10 times,
# along side two blurring stages, grayscaling and histogram normalisation
_required_size = (224, 224)
_transforms_list = [transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)), # equivalent to random 5px from each edge
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.RandomResizedCrop(size=_required_size, scale=(0.85, 1.0)),
transforms.Grayscale(num_output_channels=3),
lambda x: x.filter(ImageFilter.GaussianBlur(radius=1)),
lambda x: x.filter(ImageFilter.GaussianBlur(radius=3)),
lambda x: ImageOps.equalize(x)] # histogram equalisation
def load_and_augment(file_path, augment=False):
image = Image.open(file_path).resize(_required_size)
augmented_images = [np.array(trans(image)) for trans in _transforms_list if augment is True]
augmented_images.append(np.array(image))
return np.array(augmented_images, dtype=np.uint8)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Estimate gaze from images')
parser.add_argument('--rt_gene_root', type=str, required=True, nargs='?', help='Path to the base directory of RT_GENE')
parser.add_argument('--augment_dataset', type=bool, required=False, default=False, help="Whether to augment the dataset with predefined transforms")
parser.add_argument('--compress', action='store_true', dest="compress")
parser.add_argument('--no-compress', action='store_false', dest="compress")
parser.set_defaults(compress=False)
args = parser.parse_args()
_compression = "lzf" if args.compress is True else None
subject_path = [os.path.join(args.rt_gene_root, "s{:03d}_glasses/".format(_i)) for _i in range(0, 17)]
hdf_file = h5py.File(os.path.abspath(os.path.join(args.rt_gene_root, 'rtgene_dataset.hdf5')), mode='w')
for subject_id, subject_data in enumerate(subject_path):
subject_id = str("s{:03d}".format(subject_id))
subject_grp = hdf_file.create_group(subject_id)
with open(os.path.join(subject_data, "label_combined.txt"), "r") as f:
_lines = f.readlines()
for line in tqdm(_lines, desc="Subject {}".format(subject_id)):
split = line.split(",")
image_name = "{:0=6d}".format(int(split[0]))
image_grp = subject_grp.create_group(image_name)
left_img_path = os.path.join(subject_data, "inpainted/left_new/", "left_{:0=6d}_rgb.png".format(int(split[0])))
right_img_path = os.path.join(subject_data, "inpainted/right_new/", "right_{:0=6d}_rgb.png".format(int(split[0])))
if os.path.exists(left_img_path) and os.path.exists(right_img_path):
head_phi = float(split[1].strip()[1:])
head_theta = float(split[2].strip()[:-1])
gaze_phi = float(split[3].strip()[1:])
gaze_theta = float(split[4].strip()[:-1])
labels = [(head_theta, head_phi), (gaze_theta, gaze_phi)]
left_data = load_and_augment(left_img_path, augment=args.augment_dataset)
right_data = load_and_augment(right_img_path, augment=args.augment_dataset)
image_grp.create_dataset("left", data=left_data, compression=_compression)
image_grp.create_dataset("right", data=right_data, compression=_compression)
image_grp.create_dataset("label", data=labels)
hdf_file.flush()
hdf_file.close()
| 55.39759 | 152 | 0.663114 |
40af638a9dd18f8d6736bc3d8a1e1801d49140db | 2,470 | py | Python | students/k3343/laboratory_works/Kozyreva_Alyona/lab_work1/lr1/lrapp/views.py | AlyonaKozyr/ITMO_ICT_WebProgramming_2020 | b89ec06f7d5f2cd60b509f2850df412732ccc615 | [
"MIT"
] | null | null | null | students/k3343/laboratory_works/Kozyreva_Alyona/lab_work1/lr1/lrapp/views.py | AlyonaKozyr/ITMO_ICT_WebProgramming_2020 | b89ec06f7d5f2cd60b509f2850df412732ccc615 | [
"MIT"
] | null | null | null | students/k3343/laboratory_works/Kozyreva_Alyona/lab_work1/lr1/lrapp/views.py | AlyonaKozyr/ITMO_ICT_WebProgramming_2020 | b89ec06f7d5f2cd60b509f2850df412732ccc615 | [
"MIT"
] | null | null | null | from django.shortcuts import render, get_object_or_404
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.views.generic.list import ListView
from django.views.generic.edit import CreateView, FormView
import datetime
from .models import Homework, Student, Comment
from .forms import RegUserForm, StudentForm, CommentForm
from django.contrib.auth.models import User
from django.urls import reverse_lazy
from django.contrib.auth import authenticate, login
def start(request):
return render(request, 'start.html')
def index(request):
return render(request, 'index.html')
def board(request):
"""Вывод всех дз"""
homeworks = {}
homeworks["homeworks"] = Homework.objects.all()
return render(request, "board.html", homeworks)
def comments_list(request):
"""Вывод всех комментов"""
comments = {}
comments["comments"] = Comment.objects.all()
return render(request, "comments.html", comments)
class RegUserView(CreateView): ###
model = User
template_name = 'reg_user.html'
form_class = RegUserForm
success_url = reverse_lazy('reg_name')
def form_valid(self,form):
form_valid = super().form_valid(form)
username = form.cleaned_data["username"]
password = form.cleaned_data["password"]
aut_user = authenticate(username=username,password=password)
login(self.request, aut_user)
return form_valid
def reg_name(request):
# dictionary for initial data with
# field names as keys
context = {}
# add the dictionary during initialization
form = StudentForm(
request.POST or None) # создаем экземпляр формы, отсылаем в него данные из формы (из полей в браузере)
if form.is_valid(): # Проверка формы на корректность (валидация)
form.save()
return HttpResponseRedirect(reverse_lazy('index'))
context['form'] = form
return render(request, "reg_name.html", context)
def comment(request):
# dictionary for initial data with
# field names as keys
context = {}
# add the dictionary during initialization
form = CommentForm(
request.POST or None) # создаем экземпляр формы, отсылаем в него данные из формы (из полей в браузере)
if form.is_valid(): # Проверка формы на корректность (валидация)
form.save()
return HttpResponseRedirect(reverse_lazy('index'))
context['form'] = form
return render(request, "comment.html", context)
| 30.875 | 111 | 0.708097 |
e46ddafc0bee388e635c52fa8d62668e26bee40c | 5,108 | py | Python | tests/test_modules/test_pandablocks/test_pandablocksclient.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_pandablocks/test_pandablocksclient.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | tests/test_modules/test_pandablocks/test_pandablocksclient.py | MattTaylorDLS/pymalcolm | 995a8e4729bd745f8f617969111cc5a34ce1ac14 | [
"Apache-2.0"
] | null | null | null | from collections import OrderedDict
import unittest
from mock import call, Mock
from malcolm.modules.pandablocks.controllers.pandablocksclient import \
PandABlocksClient, FieldData, BlockData
class PandABoxControlTest(unittest.TestCase):
def setUp(self):
self.c = PandABlocksClient("h", "p")
def start(self, messages=None):
self.socket = Mock()
if messages:
self.socket.recv.side_effect = messages
def socket_cls():
return self.socket
self.c.start(socket_cls=socket_cls)
def tearDown(self):
if self.c.started:
self.c.stop()
def test_multiline_response_good(self):
messages = ["!TTLIN 6\n", "!OUTENC 4\n!CAL", "C 2\n.\nblah"]
self.start(messages)
resp = list(self.c.send_recv(""))
self.c.stop()
expected = ["TTLIN 6", "OUTENC 4", "CALC 2"]
assert resp == expected
def test_two_resp(self):
messages = ["OK =mm\n", "OK =232\n"]
self.start(messages)
assert self.c.send_recv("") == "OK =mm"
assert self.c.send_recv("") == "OK =232"
def test_bad_good(self):
messages = ["ERR Invalid bit value\n", "OK =232\n"]
self.start(messages)
with self.assertRaises(ValueError):
self.c.send_recv("")
assert self.c.send_recv("") == "OK =232"
def test_block_data(self):
messages = [
"!TTLIN 6\n!TTLOUT 10\n.\n",
"OK =TTL input\n",
"OK =TTL output\n",
"!VAL 1 pos_out\n!TERM 0 param enum\n.\n",
"!VAL 0 bit_mux\n.\n",
"OK =TTL termination\n",
"OK =TTL input value\n",
"!High-Z\n!50-Ohm\n.\n",
"!Average\n!No\n.\n",
"OK =TTL output value\n",
"!ZERO\n!TTLIN1.VAL\n!TTLIN2.VAL\n.\n",
]
self.start(messages)
block_data = self.c.get_blocks_data()
self.c.stop()
assert self.socket.send.call_args_list == [
call("*BLOCKS?\n"),
call("*DESC.TTLIN?\n"),
call("*DESC.TTLOUT?\n"),
call("TTLIN.*?\n"),
call("TTLOUT.*?\n"),
call("*DESC.TTLIN.TERM?\n"),
call("*DESC.TTLIN.VAL?\n"),
call("*ENUMS.TTLIN.TERM?\n"),
call("*ENUMS.TTLIN.VAL.CAPTURE?\n"),
call("*DESC.TTLOUT.VAL?\n"),
call("*ENUMS.TTLOUT.VAL?\n"),
]
assert list(block_data) == ["TTLIN", "TTLOUT"]
in_fields = OrderedDict()
in_fields["TERM"] = FieldData("param", "enum", "TTL termination",
["High-Z", "50-Ohm"])
in_fields["VAL"] = FieldData("pos_out", "", "TTL input value",
["Average", "No"])
assert block_data["TTLIN"] == (
BlockData(6, "TTL input", in_fields))
out_fields = OrderedDict()
out_fields["VAL"] = FieldData("bit_mux", "", "TTL output value", [
"ZERO", "TTLIN1.VAL", "TTLIN2.VAL"])
assert block_data["TTLOUT"] == (
BlockData(10, "TTL output", out_fields))
def test_changes(self):
messages = ["""!PULSE0.WIDTH=1.43166e+09
!PULSE1.WIDTH=1.43166e+09
!PULSE2.WIDTH=1.43166e+09
!PULSE3.WIDTH=1.43166e+09
!SEQ1.TABLE<
!PULSE0.INP (error)
!PULSE1.INP (error)
!PULSE2.INP (error)
!PULSE3.INP (error)
.
""","""!1
!2
!3
.
"""]
self.start(messages)
changes = self.c.get_changes()
self.c.stop()
assert self.socket.send.call_args_list == [
call("*CHANGES?\n"), call("SEQ1.TABLE?\n")]
expected = OrderedDict()
expected["PULSE0.WIDTH"] = "1.43166e+09"
expected["PULSE1.WIDTH"] = "1.43166e+09"
expected["PULSE2.WIDTH"] = "1.43166e+09"
expected["PULSE3.WIDTH"] = "1.43166e+09"
expected["SEQ1.TABLE"] = ["1", "2", "3"]
expected["PULSE0.INP"] = Exception
expected["PULSE1.INP"] = Exception
expected["PULSE2.INP"] = Exception
expected["PULSE3.INP"] = Exception
assert changes == expected
def test_set_field(self):
messages = "OK\n"
self.start(messages)
self.c.set_field("PULSE0", "WIDTH", 0)
self.c.stop()
self.socket.send.assert_called_once_with("PULSE0.WIDTH=0\n")
def test_set_table(self):
messages = "OK\n"
self.start(messages)
self.c.set_table("SEQ1", "TABLE", [1, 2, 3])
self.c.stop()
self.socket.send.assert_called_once_with("""SEQ1.TABLE<
1
2
3
""")
def test_table_fields(self):
messages = """!31:0 REPEATS
!32:32 USE_INPA
!64:54 STUFF
!37:37 INPB
.
"""
self.start(messages)
fields = self.c.get_table_fields("SEQ1", "TABLE")
self.c.stop()
self.socket.send.assert_called_once_with("SEQ1.TABLE.FIELDS?\n")
expected = OrderedDict()
expected["REPEATS"] = (31, 0)
expected["USE_INPA"] = (32, 32)
expected["STUFF"] = (64, 54)
expected["INPB"] = (37, 37)
assert fields == expected
| 31.726708 | 74 | 0.542091 |
9cce13e868ea678b92139556121c05dd54c44db3 | 4,621 | py | Python | packages/dataflow/new_cq_attempts.py | asdfghjjklllllaaa/infra | 8f63af54e46194cd29291813f2790ff6e986804d | [
"BSD-3-Clause"
] | 1 | 2020-11-11T06:25:13.000Z | 2020-11-11T06:25:13.000Z | packages/dataflow/new_cq_attempts.py | asdfghjjklllllaaa/infra | 8f63af54e46194cd29291813f2790ff6e986804d | [
"BSD-3-Clause"
] | 21 | 2020-09-06T02:41:05.000Z | 2022-03-02T04:40:01.000Z | packages/dataflow/new_cq_attempts.py | asdfghjjklllllaaa/infra | 8f63af54e46194cd29291813f2790ff6e986804d | [
"BSD-3-Clause"
] | null | null | null | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import apache_beam as beam
from dataflow import cq_attempts as sanitize_cq_attempts
from dataflow.common import chops_beam
class ExtractBuildBucketIdFn(beam.DoFn):
def process(self, cq_attempt_with_key):
# For a CQ attempt, we create one row for each contributing BuildBucket id.
key = cq_attempt_with_key[0]
cq_attempt_dict = cq_attempt_with_key[1]
bb_ids = cq_attempt_dict.get('contributing_bbucket_ids')
if bb_ids:
for bb_id in bb_ids:
yield str(bb_id), key
class FilterJoinedBuildBucketCQAttempt(beam.DoFn):
def process(self, joined_result):
# The key is BuildBucket ID. We expect there to be exactly 1 cq_attempt, and
# up to 1 BuildBucket entry.
cq_attempt_key = joined_result[1]['cq_attempt_key']
bb_entry = joined_result[1]['bb_entries']
if len(bb_entry) != 1 or len(cq_attempt_key) != 1:
return
yield cq_attempt_key[0], bb_entry[0]
def update_with_presubmit_failure(input_tuple):
value = input_tuple[1]
cq_attempts = value['cq_attempts']
assert len(cq_attempts) == 1, "There must be 1 cq_attempt."
cq_attempt = cq_attempts[0]
if cq_attempt['fail_type'] == 'FAILED_JOBS':
buildbucket_results = value['bb_entries']
presubmit_failures = 0
other_failures = 0
for bb_result in buildbucket_results:
if (bb_result['status'] == 'FAILURE' and
bb_result['builder'] == 'chromium_presubmit'):
presubmit_failures += 1
elif bb_result['status'] != 'SUCCESS':
other_failures += 1
if presubmit_failures >= 1 and other_failures == 0:
cq_attempt['fail_type'] = 'FAILED_PRESUBMIT_BOT'
# Dictionaries are supposed to be returned in a single element list.
return [cq_attempt]
def process_input(cq_events_pcol, bb_entries_pcol):
"""Sets up the pipeline stages to return aggregated cq attempts pcol.
This function performs two tasks:
1) Computes CQ attempts from raw CQ events. This includes data sanitization.
2) If a CQ attempt fails only because of 'chromium_presubmit' builder, sets
the failure status to 'FAILED_PRESUBMIT_BOT'.
"""
# Pcol of cq_attempt_as_dict
sanitized_cq_attempts = (
cq_events_pcol | sanitize_cq_attempts.ComputeAttempts())
# Create Pcol of tuples: (cq_attempt_key, cq_attempt_as_dict)
def extract_key(cq_attempt_dict):
key_parts = [
cq_attempt_dict.get('attempt_start_msec'),
cq_attempt_dict.get('cq_name'),
cq_attempt_dict.get('issue'),
cq_attempt_dict.get('patchset')
]
key = ':'.join([str(part) or '' for part in key_parts])
return key, cq_attempt_dict
cq_attempts_with_key = sanitized_cq_attempts | beam.Map(extract_key)
# Create Pcol of tuples: (build_bucket_id, cq_attempt_key)
cq_attempt_key_keyed_by_bb_id = cq_attempts_with_key | beam.ParDo(
ExtractBuildBucketIdFn())
# Create Pcol of tuples: (build_bucket_id, build_bucket_entry)
bb_entry_keyed_by_bb_id = bb_entries_pcol | beam.Map(
lambda e: (str(e.get('id')), e))
# Create Pcol of tuples: (cq_attempt_key, BuildBucket entry)
bb_entries_keyed_by_cq_attempt_key = ({
'bb_entries' : bb_entry_keyed_by_bb_id,
'cq_attempt_key': cq_attempt_key_keyed_by_bb_id
} | 'Join BuildBucket with cq attempts' >> beam.CoGroupByKey()
| beam.ParDo(FilterJoinedBuildBucketCQAttempt())
)
# Uses BuildBucket entries associated with a CQ attempt to potentially change
# the failure reason to FAILED_PRESUBMIT_BOT. Creates a Pcol of
# cq_attempt_as_dict.
results = ({
'cq_attempts' : cq_attempts_with_key,
'bb_entries' : bb_entries_keyed_by_cq_attempt_key
} | beam.CoGroupByKey()
| beam.FlatMap(update_with_presubmit_failure)
)
return results
def main():
p = chops_beam.EventsPipeline()
q = ('SELECT timestamp_millis, action, attempt_start_usec, cq_name, issue,'
' patchset, dry_run, failure_reason, contributing_buildbucket_ids, '
' earliest_equivalent_patchset '
'FROM `chrome-infra-events.raw_events.cq`')
cq_events_pcol = p | 'read raw CQ events' >> chops_beam.BQRead(q)
q = ('SELECT id, builder.builder, status from '
'`cr-buildbucket.chromium.completed_builds_BETA`')
bb_entries_pcol = p | 'read BuildBucket' >> chops_beam.BQRead(q)
results = process_input(cq_events_pcol, bb_entries_pcol)
# pylint: disable=expression-not-assigned
results | chops_beam.BQWrite('chrome-infra-events', 'cq_attempts')
p.run()
if __name__ == '__main__':
main()
| 36.385827 | 80 | 0.727765 |
a407b17fc89d610ba5661dbab077ca018b783c36 | 2,192 | py | Python | setup.py | jweslley/frogstarb | 3d963257b60881b4a511bd562eadde12ad32e553 | [
"MIT"
] | 1 | 2019-08-10T19:12:57.000Z | 2019-08-10T19:12:57.000Z | setup.py | jweslley/frogstarb | 3d963257b60881b4a511bd562eadde12ad32e553 | [
"MIT"
] | null | null | null | setup.py | jweslley/frogstarb | 3d963257b60881b4a511bd562eadde12ad32e553 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys, os
from distutils.core import setup
from distutils.command.install_scripts import install_scripts
class b_install_scripts(install_scripts):
""" Customized install_scripts. Create frogstarb.bat for win32. """
def run(self):
install_scripts.run(self)
if sys.platform == 'win32':
try:
script_dir = os.path.join(sys.prefix, 'Scripts')
script_path = os.path.join(script_dir, 'frogstarb')
bat_str = '@"%s" "%s" %%*' % (sys.executable, script_path)
bat_path = os.path.join(self.install_dir, 'frogstarb.bat')
f = file(bat_path, 'w')
f.write(bat_str)
f.close()
print 'Created:', bat_path
except Exception, e:
print 'ERROR: Unable to create %s: %s' % (bat_path, e)
version = '0.1.0'
setup(
name = 'FrogstarB',
version = version,
description = 'Simple tool to post to Blogger.com from the command line.',
author = 'Jonhnny Weslley',
author_email = 'jw [at] jonhnnyweslley.net',
url = 'http://github.com/jweslley/frogstarb',
download_url = 'http://github.com/jweslley/frogstarb/tarball/v%s' % version,
license = 'MIT',
packages = ['frogstarb'],
package_data = {'frogstarb' : ['preview/*']},
scripts = ['bin/frogstarb'],
requires = ["gdata", "markdown", "pystaches"],
cmdclass = {'install_scripts': b_install_scripts},
classifiers = ['Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'License :: OSI Approved :: MIT License',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Site Management',
'Topic :: Text Processing :: Markup :: HTML'
]
)
| 40.592593 | 80 | 0.563869 |
e4b6ebf1b9caf6438a2989e1ffadb308008a472e | 22,711 | py | Python | Clustering/src/task1.py | ankitagupta820/Data-Minning-using-Yelp-Dataset | 2f80ba0c780cf303ed12b05496ebaa1debe23e10 | [
"MIT"
] | 1 | 2021-04-08T01:03:02.000Z | 2021-04-08T01:03:02.000Z | Clustering/src/task1.py | ankitagupta820/Data-Minning-using-Yelp-Dataset | 2f80ba0c780cf303ed12b05496ebaa1debe23e10 | [
"MIT"
] | null | null | null | Clustering/src/task1.py | ankitagupta820/Data-Minning-using-Yelp-Dataset | 2f80ba0c780cf303ed12b05496ebaa1debe23e10 | [
"MIT"
] | null | null | null | from pyspark import SparkConf, SparkContext
import os
import sys
import collections
from collections import defaultdict
import itertools
import random
from math import sqrt
import copy
import json
import csv
import time
os.environ['PYSPARK_PYTHON'] = '/usr/local/bin/python3.6'
os.environ['PYSPARK_DRIVER_PYTHON'] = '/usr/local/bin/python3.6'
#CONSTANTS
MAHALANOBIS = "m"
EUCLIDEAN = "e"
class Kmeans:
def __init__(self, n_clusters: int, max_iter: int):
self.max_iteration = max_iter
self.n_cluster = n_clusters
def initialize_centroid(self, s: int):
"""
Use random sampling to pick points as centroids
"""
random.seed(s)
self.centroids = dict()
self.cluster_points = dict()
self.centroid_isStable = dict()
self.centroid_stats = dict()
for index, sample_key in enumerate(random.sample(self.data.keys(), self.n_cluster)):
self.centroids.setdefault("c" + str(index), self.data.get(sample_key))
self.centroid_stats.setdefault("c" + str(index), self.data.get(sample_key))
self.cluster_points.setdefault("c" + str(index), list())
self.centroid_isStable.setdefault("c" + str(index), False)
def fit_data(self, data: dict, s=666):
"""
Performs point assignment to centroids
"""
self.data = data
self.check_datasize() # this func might change the value of self.n_cluster
self.initialize_centroid(s)
epochs = 1
while True:
for k in list(self.data.keys()):
# [(centroid, key), distance]
# => [((c2, point_id), distance_to_c2)]
distance_to_centroids = dict()
for c in self.centroids.keys():
distance_to_centroids[(c, k)] = findDistance(self.centroids[c], self.data[k])
assignment = list(sorted(distance_to_centroids.items(), key=lambda x: x[1]))[:1]
# add the point to corresponding centroid list
# {cluster_point_id1: [point_id,...], cluster_point_id2: [point_id,...]}
self.cluster_points[assignment[0][0][0]].append(assignment[0][0][1])
prev_centroids, curr_centroids = self.update_centroids()
if not self.is_centroid_changed(prev_centroids, curr_centroids) or epochs >= self.max_iteration:
break
epochs += 1
self.reset_clusters()
return self.centroids, self.centroid_stats, self.cluster_points
def check_datasize(self):
"""
Check the data size. change the number of cluster
if size of data is less than earlier setup
"""
if len(self.data.keys()) < self.n_cluster:
self.n_cluster = len(self.data.keys())
def reset_clusters(self):
for key in self.cluster_points.keys():
self.cluster_points[key] = list()
def update_centroids(self):
"""
recalculate the centroids by averaging cluster points
returns -> previous centroids, new centroids
"""
prev_centroids = copy.deepcopy(self.centroids)
for centroid, cluster_point_list in self.cluster_points.items():
if not self.centroid_isStable.get(centroid):
points_list = list()
points_list.append(self.centroids.get(centroid))
for cluster_point in cluster_point_list:
points_list.append(self.data.get(cluster_point))
# Calculate new centroids by averaging points SUM / N
self.centroids[centroid] = [sum(d) / len(d) for d in zip(*points_list)]
# Calculate SUMSQ / N
self.centroid_stats[centroid] = [sum([val ** 2 for val in i]) / len(i) for i in
zip(*points_list)]
return prev_centroids, self.centroids
def is_centroid_changed(self, c1: dict, c2: dict):
for id in c1.keys():
valA = set(map(lambda v: round(v, 0), c1.get(id)))
valB = set(map(lambda v: round(v, 0), c2.get(id)))
if len(valA.difference(valB)) == 0:
self.centroid_isStable[id] = True
else:
self.centroid_isStable[id] = False
return True
return False
class Cluster:
def __init__(self):
self.centroids = None
self.cluster_points = None
self.Type = None
def init(self, centroids: dict, stats: dict, points: dict):
self.data_dimentions = len(list(centroids.values())[0])
self.cluster_points = points
self.centroids = centroids
self.SUM_N = centroids
self.SUMSQ_N = stats
self.total_point = 0
self.STD = dict()
self.calc_STD()
def getType(self):
return self.Type
def getClusterPoints(self):
return self.cluster_points
def getClusterPointsByKey(self, key):
return list(self.cluster_points.get(key))
def getCentroids(self):
return self.centroids
def getCentroidByKey(self, key: str):
return list(self.centroids.get(key))
def getSTD(self):
return self.STD
def getSTDByKey(self, key: str):
return list(self.STD.get(key))
def getDimension(self):
return self.data_dimentions
def getNumClusters(self):
return len(self.centroids.keys())
def getNumPoints(self):
self.total_point = 0
for key, value in self.cluster_points.items():
if type(value) == list:
self.total_point += len(value)
return self.total_point
def getSUMSQ_NByKey(self, key: str):
return list(self.SUMSQ_N.get(key))
def calc_STD(self):
self.STD = dict()
for k in self.SUM_N.keys():
self.STD[k] = [sqrt(SQ_N - SUM_N ** 2) for (SQ_N, SUM_N) in zip(self.SUMSQ_N.get(k), self.SUM_N.get(k))]
def updateCentroids(self, cluster_map, data_point_map):
if len(cluster_map.keys()) > 0:
old_centroids = copy.deepcopy(self.centroids)
old_cluster_points = copy.deepcopy(self.cluster_points)
old_sumsq_n = copy.deepcopy(self.SUMSQ_N)
for centroid, points in cluster_map.items():
new_point_list = list()
for point in points:
new_point_list.append(data_point_map.get(point))
n_old_points = len(old_cluster_points.get(centroid))
total_count = n_old_points + len(new_point_list)
# Update value for SUM/N
old_location1 = old_centroids.get(centroid)
old_sum = list(map(lambda val: val * n_old_points, old_location1))
new_sum = [sum(d) for d in zip(*new_point_list)]
self.centroids[centroid] = self.SUM_N[centroid] = computeAVG(old_sum, new_sum, denom=total_count)
# update value for SUMSQ / N
old_location2 = old_sumsq_n.get(centroid)
old_sumsq = list(map(lambda val: val * n_old_points, old_location2))
new_sumsq = [sum([val ** 2 for val in i]) for i in zip(*new_point_list)]
self.SUMSQ_N[centroid] = computeAVG(new_sumsq, old_sumsq, denom=total_count)
self.calc_STD()
self.updateClusterPoints(cluster_map)
def updateClusterPoints(self, new_cluster_points):
if len(new_cluster_points.keys()) > 0:
total_list = collections.defaultdict(list)
for key, val in itertools.chain(self.cluster_points.items(), new_cluster_points.items()):
total_list[key] += val
self.cluster_points = total_list
def computeAVG(l1, l2, denom=None):
L = list()
L.append(l1)
L.append(l2)
if denom is None:
return [sum(d) / len(d) for d in zip(*L)]
else:
return [sum(d) / denom for d in zip(*L)]
class DS(Cluster):
def __init__(self):
Cluster.__init__(self)
self.Type = "DS"
def mergeToOneCluster(self, ds_key: str,
cs_sumsq_n: list,
cs_centroid: list,
cs_points: list):
n_ds_points = len(self.getClusterPointsByKey(ds_key))
n_cs_points = len(cs_points)
total_points = n_cs_points + n_ds_points
ds_centroid = self.getCentroidByKey(ds_key)
old_sum = list(map(lambda val: val * n_ds_points, ds_centroid))
new_sum = list(map(lambda val: val * n_cs_points, cs_centroid))
sum = computeAVG(old_sum, new_sum, total_points)
old_sumsq = list(map(lambda val: val * n_ds_points, self.getSUMSQ_NByKey(ds_key)))
new_sumsq = list(map(lambda val: val * n_cs_points, cs_sumsq_n))
sumsq = computeAVG(old_sumsq, new_sumsq, total_points)
self.centroids.update({ds_key: sum})
self.cluster_points[ds_key].extend(cs_points)
self.SUMSQ_N.update({ds_key: sumsq})
self.getNumPoints()
self.calc_STD()
class CS(Cluster):
def __init__(self):
Cluster.__init__(self)
self.Type = "CS"
self.R2C_itr = 0
self.merge_itr = 0
def removeCluster(self, key):
# pop cluster points
self.cluster_points.pop(key)
# pop centroid and SUM_N
self.centroids.pop(key)
# pop standard dev for cluster
self.STD.pop(key)
# pop sum of squares avg.
self.SUMSQ_N.pop(key)
# recount total points
self.getNumPoints()
def update_change(self, centroids: dict, stats: dict, points: dict):
if len(centroids.keys()) != 0:
for key in list(centroids.keys()):
self.centroids.update({"R2C" + str(self.R2C_itr): centroids.get(key)})
self.SUMSQ_N.update({"R2C" + str(self.R2C_itr): stats.get(key)})
self.cluster_points.update({"R2C" + str(self.R2C_itr): points.get(key)})
self.calc_STD()
self.R2C_itr += 1
def merge_two_Clusters(self, C1: str, C2: str):
new_centroid = computeAVG(list(self.centroids[C1]), list(self.centroids[C2]))
new_sumsq_n = computeAVG(list(self.SUMSQ_N[C1]), list(self.SUMSQ_N[C2]))
cluster_points = list(self.cluster_points[C1])
cluster_points.extend(list(self.cluster_points[C2]))
m_itr_key = "m" + str(self.merge_itr)
# Update centroids
self.centroids.pop(C1)
self.centroids.pop(C2)
self.centroids.update({m_itr_key: new_centroid})
# Update SUMSQ_N
self.SUMSQ_N.pop(C1)
self.SUMSQ_N.pop(C2)
self.SUMSQ_N.update({m_itr_key: new_sumsq_n})
# Update cluster points
self.cluster_points.pop(C1)
self.cluster_points.pop(C2)
self.cluster_points.update({m_itr_key: cluster_points})
# Recalculate STD
self.calc_STD()
self.merge_itr += 1
def getClusterResultSortedInfo(self):
result = collections.defaultdict(list)
for key in self.cluster_points.keys():
result[key] = sorted(self.cluster_points[key])
return result
class RS:
def __init__(self):
self.remaining_set = dict()
self.Type = "RS"
@classmethod
def getType(cls):
return "RS"
def addPoints(self, data: dict):
self.remaining_set.update(data)
def countPoints(self):
return len(self.remaining_set.keys())
def getRemaining(self):
return self.remaining_set
def setPoints(self, points: dict):
self.remaining_set = points
class IntermediateSteps:
def __init__(self):
self.intermediate_steps = dict()
self.intermediate_steps["header"] = (
"round_id", "nof_cluster_discard", "nof_point_discard",
"nof_cluster_compression", "nof_point_compression",
"nof_point_retained"
)
def add_intermediate_step(self, round_no, ds, cs, rs):
self.intermediate_steps[round_no] = ( round_no, ds.getNumClusters(), ds.getNumPoints(), cs.getNumClusters(), cs.getNumPoints(), rs.countPoints())
print("Round "+str(round_no)+" -> DS: Clusters: "+str(ds.getNumClusters())+" Points: "+str(ds.getNumPoints())+" | CS: Clusters: "+str(cs.getNumClusters())+" Points: "+str(cs.getNumPoints())+" | RS: Points: "+str(rs.countPoints())+"")
def write(self, path: str):
writeToFile(self.intermediate_steps, path, type="csv")
def writeToFile(results, path, type="json"):
if type == "json":
with open(path, "w+") as f:
f.writelines(json.dumps(results))
f.close()
elif type == "csv":
with open(path, "w+", newline="") as f:
writer = csv.writer(f)
for key, value in results.items():
writer.writerow(value)
def segregate_single_point_clusters(centroids: dict, statistics: dict, points: dict):
# remove clusters and points with 1 point
remaining_points = dict()
temp_cluster_result = copy.deepcopy(points)
for centroid, cluster_points in temp_cluster_result.items():
if len(cluster_points) <= 1:
if len(cluster_points) != 0:
remaining_points.update({cluster_points[0]: centroids.get(centroid)})
points.pop(centroid)
centroids.pop(centroid)
statistics.pop(centroid)
return centroids, statistics, points, remaining_points
def findDistance(pointX, pointY, STD = None, d_type= EUCLIDEAN):
if d_type == EUCLIDEAN:
return float(sqrt(sum([(x - y) ** 2 for (x, y) in zip(pointX, pointY)])))
elif d_type == MAHALANOBIS:
return float(sqrt(sum([((x - y) / std) ** 2 for (x, y, std) in zip(pointX, pointY, STD)])))
def assign2NearestCluster(data_point, alpha, DS=None, CS=None, c_type=""):
if DS is not None and c_type == DS.getType():
ds_dimensions = DS.getDimension()
min_distance = float('inf')
closest_key = None
for key, point in DS.getCentroids().items():
curr_distance = findDistance(data_point[1], point, DS.getSTD().get(key), d_type = MAHALANOBIS)
if curr_distance < alpha * sqrt(ds_dimensions) and curr_distance < min_distance:
min_distance = curr_distance
closest_key = (key, data_point[0])
if closest_key is not None:
# assigned to closest DS
yield tuple((closest_key, data_point[1], False))
else:
#Outlier point
yield tuple((("-1", data_point[0]), data_point[1], True))
elif CS is not None and c_type == CS.getType():
cs_dimensions = CS.getDimension()
min_distance = float('inf')
closest_key = None
for key, point in CS.getCentroids().items():
curr_distance = findDistance(data_point[1], point, CS.getSTD().get(key), d_type=MAHALANOBIS)
if curr_distance < alpha * sqrt(cs_dimensions) and curr_distance < min_distance:
min_distance = curr_distance
closest_key = (key, data_point[0])
if closest_key is not None:
# assigned to closest CS
yield tuple((closest_key, data_point[1], False))
else:
# Outlier point
yield tuple((("-1", data_point[0]), data_point[1], True))
def merge_CS_Clusters(alpha, cs: CS):
cs_D = cs.getDimension()
old_cs = copy.deepcopy(cs)
centroid_keys = set(list(old_cs.getCentroids().keys()))
for pair in itertools.combinations(list(old_cs.getCentroids().keys()), 2):
if pair[0] in centroid_keys and pair[1] in centroid_keys:
distance = findDistance(pointX=old_cs.getCentroidByKey(pair[0]), pointY=old_cs.getCentroidByKey(pair[1]), STD=old_cs.getSTDByKey(pair[0]), d_type=MAHALANOBIS)
if distance < alpha * sqrt(cs_D):
# Merge 2 CS
cs.merge_two_Clusters(pair[0], pair[1])
centroid_keys.discard(pair[0])
centroid_keys.discard(pair[1])
def merge_CS_DS(alpha_value, ds: DS, cs: CS):
ds_D = ds.getDimension()
old_ds = copy.deepcopy(ds)
old_cs = copy.deepcopy(cs)
for c_cs in old_cs.getCentroids().keys():
for c_ds in old_ds.getCentroids().keys():
cs_ds_distance = findDistance(
pointX=old_cs.getCentroidByKey(c_cs),
pointY=old_ds.getCentroidByKey(c_ds),
STD=old_ds.getSTDByKey(c_ds),
d_type= MAHALANOBIS
)
if cs_ds_distance < alpha_value * sqrt(ds_D):
ds.mergeToOneCluster(ds_key=c_ds,
cs_sumsq_n=old_cs.getSUMSQ_NByKey(c_cs),
cs_centroid=old_cs.getCentroidByKey(c_cs),
cs_points=old_cs.getClusterPointsByKey(c_cs))
cs.removeCluster(c_cs)
break
def WriteClusterOutput(ds: DS, cs: CS, rs: RS, path):
result = defaultdict()
for key in list(ds.getClusterPoints().keys()):
[result.setdefault(str(id), int(key[1:])) for id in ds.getClusterPointsByKey(key)]
for key in list(cs.getClusterPoints().keys()):
[result.setdefault(str(id), -1) for id in cs.getClusterPointsByKey(key)]
for key in list(rs.getRemaining().keys()):
result.setdefault(str(key), -1)
writeToFile(result, path, type="json")
tick = time.time()
#input paths and variables
input_dir = "data/HW5/test1"
n_clusters = int("10")
outfile_1 = "data/HW5/output/cluster1.json"
outfile_2 = "data/HW5/intermediate/intermediate1.csv"
# input_dir = sys.argv[1]
# n_clusters = int(sys.argv[2])
# outfile_1 = sys.argv[3]
# outfile_2 = sys.argv[4]
conf = SparkConf().setMaster("local[*]").set("spark.executor.memory", "4g").set("spark.driver.memory", "4g")
sc = SparkContext(conf=conf)
sc.setLogLevel("WARN")
alpha = 3
discard_set = DS()
compressed_set = CS()
retained_set = RS()
intermediate_steps = IntermediateSteps()
for i, file_name in enumerate(sorted(os.listdir(input_dir))):
file_path = ''.join(input_dir + "/" + file_name)
Data_RDD = sc.textFile(file_path).map(lambda line: line.split(",")).map(lambda x: (int(x[0]), list(map(eval, x[1:]))))
if i == 0:
total_points = Data_RDD.count()
first_n = 10000 if total_points > 10000 else int(total_points * 0.1)
# Run KMeans on subset of first data file.
kmean_data = Data_RDD.filter(lambda x: x[0] < first_n).collectAsMap()
ds_centeroid, ds_stats, ds_points = Kmeans(n_clusters=n_clusters, max_iter=5).fit_data(kmean_data)
# Run K-Means to generate the DS clusters
discard_set.init(ds_centeroid, ds_stats, ds_points)
# run kMeans on rest of points to get CS
remaining_data = Data_RDD.filter(lambda x: x[0] >= first_n).collectAsMap()
centeroid, stats, points = Kmeans(n_clusters=n_clusters * 3, max_iter=3).fit_data(remaining_data)
# seperate points into RS.
cs_centeroid, cs_stats, cs_points, remaining = segregate_single_point_clusters(centeroid, stats, points)
compressed_set.init(cs_centeroid, cs_stats, cs_points)
retained_set.addPoints(remaining)
else:
# Compare new points to the clusters in DS and if the distance < 𝛼√𝑑,
# assign them to the nearest DS cluster.
# For assigned points (('-1', point_index), point_location, True)
# For Outliers points (('c1', point_index), point_location, False)
All_Assignment_RDD = Data_RDD.flatMap(lambda data_point: assign2NearestCluster(data_point, alpha, DS=discard_set, c_type=discard_set.getType()))
# Get all assigned points (('c1', point_index), point_location)
DS_Assignment_RDD = All_Assignment_RDD.filter(lambda assignment: assignment[2] is False).map(lambda x: (x[0], x[1]))
# Group by DS cluster name {"c1": [point_index1, point_index2....]}
ds_cluster_map = DS_Assignment_RDD.map(lambda x: x[0]).groupByKey().mapValues(list).collectAsMap()
# Point map {point_index: point_location}
ds_data_points = DS_Assignment_RDD.map(lambda x: (x[0][1], list(x[1]))).collectAsMap()
# Update DS centroids with new data points
discard_set.updateCentroids(ds_cluster_map, ds_data_points)
# Assign the outlier points of from DS assignment to nearest CS based on Mahalanobis Distance
# => output (('centroid', point_index), location, False)
All_Assignment_CS = All_Assignment_RDD.filter(lambda assignment: assignment[2] is True).map(lambda x: (x[0][1], x[1]))\
.flatMap(lambda data_point: assign2NearestCluster(data_point, alpha, CS=compressed_set, c_type=compressed_set.getType()))
#(('centroid', point_index), location)
CS_Assignment_RDD = All_Assignment_CS.filter(lambda x: x[2] is False).map(lambda x: (x[0], x[1]))
# Group by CS cluster name {"c1": [point_index1, point_index2....]}
cs_cluster_map = CS_Assignment_RDD.map(lambda x: x[0]).groupByKey().mapValues(list).collectAsMap()
# Point map {point_index: point_location}
cs_data_points = CS_Assignment_RDD.map(lambda x: (x[0][1], list(x[1]))).collectAsMap()
# Update CS centroids with new data points
compressed_set.updateCentroids(cs_cluster_map, cs_data_points)
# Assign the outlier points from CS assignment to RS
# {point_index: location}
remaining_point = All_Assignment_CS.filter(lambda assignment: assignment[2] is True).map(lambda x: (x[0][1], x[1])).collectAsMap()
retained_set.addPoints(remaining_point)
# Run K-Means on RS points to get CS (>1 point) and RS (<=1)
centeroids, stats, points = Kmeans(n_clusters=n_clusters * 3, max_iter=5).fit_data(retained_set.getRemaining())
cs_centeroids, cs_stats, cs_points, other = segregate_single_point_clusters(centeroids, stats, points)
compressed_set.update_change(cs_centeroids, cs_stats, cs_points)
retained_set.setPoints(other)
# Merge CS clusters if their mahalanobis distance < 𝛼√𝑑
merge_CS_Clusters(alpha, cs=compressed_set)
# merge CS clusters to DS if their mahalanobis distance < 𝛼√𝑑
if len(os.listdir(input_dir)) == i + 1:
merge_CS_DS(alpha, ds=discard_set, cs=compressed_set)
intermediate_steps.add_intermediate_step(i + 1, discard_set, compressed_set, retained_set)
# Write to Output Files
intermediate_steps.write(outfile_2)
WriteClusterOutput(ds=discard_set, cs=compressed_set, rs=retained_set, path=outfile_1)
print("Duration: %d s." % (time.time() - tick)) | 35.709119 | 241 | 0.625776 |
6572f05e973d2f5e0f28ca203bb67835391c709d | 2,531 | py | Python | setup.py | abhicantdraw/pandas-profiling | a12ebb7a94b9371df94bf611237a389d99f8bc00 | [
"MIT"
] | 736 | 2016-01-14T03:36:03.000Z | 2018-01-06T00:56:33.000Z | setup.py | abhicantdraw/pandas-profiling | a12ebb7a94b9371df94bf611237a389d99f8bc00 | [
"MIT"
] | 72 | 2016-01-29T12:08:04.000Z | 2018-01-06T11:18:44.000Z | setup.py | abhicantdraw/pandas-profiling | a12ebb7a94b9371df94bf611237a389d99f8bc00 | [
"MIT"
] | 108 | 2016-01-14T11:48:18.000Z | 2018-01-02T13:35:10.000Z | from pathlib import Path
from setuptools import find_packages, setup
# Read the contents of README file
source_root = Path(".")
with (source_root / "README.md").open(encoding="utf-8") as f:
long_description = f.read()
# Read the requirements
with (source_root / "requirements.txt").open(encoding="utf8") as f:
requirements = f.readlines()
version = "3.1.1"
with (source_root / "src" / "pandas_profiling" / "version.py").open(
"w", encoding="utf-8"
) as f:
f.writelines(
[
'"""This file is auto-generated by setup.py, please do not alter."""\n',
f'__version__ = "{version}"\n',
"",
]
)
setup(
name="pandas-profiling",
version=version,
author="Simon Brugman",
author_email="pandasprofiling@gmail.com",
packages=find_packages("src"),
package_dir={"": "src"},
url="https://github.com/pandas-profiling/pandas-profiling",
license="MIT",
description="Generate profile report for pandas DataFrame",
python_requires=">=3.6",
install_requires=requirements,
extras_require={
"notebook": [
"jupyter-client>=5.3.4",
"jupyter-core>=4.6.3",
"ipywidgets>=7.5.1",
],
},
package_data={
"pandas_profiling": ["py.typed"],
},
include_package_data=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Software Development :: Build Tools",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Operating System :: OS Independent",
"Intended Audience :: Science/Research",
"Intended Audience :: Developers",
"Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Healthcare Industry",
"Topic :: Scientific/Engineering",
"Framework :: IPython",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
keywords="pandas data-science data-analysis python jupyter ipython",
long_description=long_description,
long_description_content_type="text/markdown",
entry_points={
"console_scripts": [
"pandas_profiling = pandas_profiling.controller.console:main"
]
},
options={"bdist_wheel": {"universal": True}},
)
| 32.037975 | 84 | 0.610826 |
560df59ec3fa08d61fa6318da515da5a4e5e2ced | 19,352 | py | Python | test/test_model.py | stackedsax/behave | 0cdd1d7e0ebcb43e08d7c5d0b25f62d7ffbfeb5f | [
"BSD-2-Clause"
] | null | null | null | test/test_model.py | stackedsax/behave | 0cdd1d7e0ebcb43e08d7c5d0b25f62d7ffbfeb5f | [
"BSD-2-Clause"
] | null | null | null | test/test_model.py | stackedsax/behave | 0cdd1d7e0ebcb43e08d7c5d0b25f62d7ffbfeb5f | [
"BSD-2-Clause"
] | null | null | null | from __future__ import with_statement
import sys
from mock import Mock, patch
from nose.tools import *
from behave import model
from behave.compat.collections import OrderedDict
class TestFeatureRun(object):
def setUp(self):
self.runner = Mock()
self.runner.feature.tags = []
self.config = self.runner.config = Mock()
self.context = self.runner.context = Mock()
self.formatter = self.runner.formatter = Mock()
self.run_hook = self.runner.run_hook = Mock()
def test_formatter_feature_called(self):
feature = model.Feature('foo.feature', 1, u'Feature', u'foo',
background=Mock())
feature.run(self.runner)
self.formatter.feature.assert_called_with(feature)
def test_formatter_background_called_when_feature_has_background(self):
feature = model.Feature('foo.feature', 1, u'Feature', u'foo',
background=Mock())
feature.run(self.runner)
self.formatter.background.assert_called_with(feature.background)
def test_formatter_background_not_called_when_feature_has_no_background(self):
feature = model.Feature('foo.feature', 1, u'Feature', u'foo')
feature.run(self.runner)
assert not self.formatter.background.called
def test_run_runs_scenarios(self):
scenarios = [Mock(), Mock()]
for scenario in scenarios:
scenario.run.return_value = False
self.config.tags.check.return_value = True
feature = model.Feature('foo.feature', 1, u'Feature', u'foo',
scenarios=scenarios)
feature.run(self.runner)
for scenario in scenarios:
scenario.run.assert_called_with(self.runner)
def test_feature_hooks_not_run_if_feature_not_being_run(self):
self.config.tags.check.return_value = False
feature = model.Feature('foo.feature', 1, u'Feature', u'foo')
feature.run(self.runner)
assert not self.run_hook.called
class TestScenarioRun(object):
def setUp(self):
self.runner = Mock()
self.runner.feature.tags = []
self.config = self.runner.config = Mock()
self.config.dry_run = False
self.context = self.runner.context = Mock()
self.formatter = self.runner.formatter = Mock()
self.run_hook = self.runner.run_hook = Mock()
def test_run_invokes_formatter_scenario_and_steps_correctly(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True
steps = [Mock(), Mock()]
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
self.formatter.scenario.assert_called_with(scenario)
[step.run.assert_called_with(self.runner) for step in steps]
if sys.version_info[0] == 3:
stringio_target = 'io.StringIO'
else:
stringio_target = 'StringIO.StringIO'
def test_handles_stdout_and_log_capture(self):
self.config.stdout_capture = True
self.config.log_capture = True
self.config.tags.check.return_value = True
steps = [Mock(), Mock()]
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
self.runner.setup_capture.assert_called_with()
self.runner.teardown_capture.assert_called_with()
def test_failed_step_causes_remaining_steps_to_be_skipped(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True
steps = [Mock(), Mock()]
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
steps[0].run.return_value = False
assert scenario.run(self.runner)
eq_(steps[1].status, 'skipped')
def test_failed_step_causes_context_failure_to_be_set(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = True
steps = [Mock(), Mock()]
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
steps[0].run.return_value = False
assert scenario.run(self.runner)
self.context._set_root_attribute.assert_called_with('failed', True)
def test_skipped_steps_set_step_status_and_scenario_status_if_not_set(self):
self.config.stdout_capture = False
self.config.log_capture = False
self.config.tags.check.return_value = False
steps = [Mock(), Mock()]
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo',
steps=steps)
scenario.run(self.runner)
assert False not in [s.status == 'skipped' for s in steps]
eq_(scenario.status, 'skipped')
def test_scenario_hooks_not_run_if_scenario_not_being_run(self):
self.config.tags.check.return_value = False
scenario = model.Scenario('foo.feature', 17, u'Scenario', u'foo')
scenario.run(self.runner)
assert not self.run_hook.called
class TestScenarioOutline(object):
def test_run_calls_run_on_each_generated_scenario(self):
outline = model.ScenarioOutline('foo.featuer', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock()]
for scenario in outline._scenarios:
scenario.run.return_value = False
runner = Mock()
runner.context = Mock()
outline.run(runner)
[s.run.assert_called_with(runner) for s in outline._scenarios]
def test_run_stops_on_first_failure_if_requested(self):
outline = model.ScenarioOutline('foo.featuer', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock()]
outline._scenarios[0].run.return_value = True
runner = Mock()
runner.context = Mock()
config = runner.config = Mock()
config.stop = True
outline.run(runner)
outline._scenarios[0].run.assert_called_with(runner)
assert not outline._scenarios[1].run.called
def test_run_sets_context_variable_for_outline(self):
outline = model.ScenarioOutline('foo.featuer', 17, u'Scenario Outline',
u'foo')
outline._scenarios = [Mock(), Mock(), Mock()]
for scenario in outline._scenarios:
scenario.run.return_value = False
runner = Mock()
context = runner.context = Mock()
config = runner.config = Mock()
config.stop = True
outline.run(runner)
eq_(context._set_root_attribute.call_args_list, [
(('active_outline', outline._scenarios[0]._row), {}),
(('active_outline', outline._scenarios[1]._row), {}),
(('active_outline', outline._scenarios[2]._row), {}),
(('active_outline', None), {}),
])
def raiser(exception):
def func(*args, **kwargs):
raise exception
return func
class TestStepRun(object):
def setUp(self):
self.runner = Mock()
self.config = self.runner.config = Mock()
self.context = self.runner.context = Mock()
print ('context is', self.context)
self.formatter = self.runner.formatter = Mock()
self.step_registry = Mock()
self.stdout_capture = self.runner.stdout_capture = Mock()
self.stdout_capture.getvalue.return_value = ''
self.stderr_capture = self.runner.stderr_capture = Mock()
self.stderr_capture.getvalue.return_value = ''
self.log_capture = self.runner.log_capture = Mock()
self.log_capture.getvalue.return_value = ''
self.run_hook = self.runner.run_hook = Mock()
def test_run_appends_step_to_undefined_when_no_match_found(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
self.step_registry.find_match.return_value = None
self.runner.undefined = []
with patch('behave.step_registry.registry', self.step_registry):
assert not step.run(self.runner)
assert step in self.runner.undefined
eq_(step.status, 'undefined')
def test_run_reports_undefined_step_via_formatter_when_not_quiet(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
self.step_registry.find_match.return_value = None
with patch('behave.step_registry.registry', self.step_registry):
assert not step.run(self.runner)
self.formatter.match.assert_called_with(model.NoMatch())
self.formatter.result.assert_called_with(step)
def test_run_with_no_match_does_not_touch_formatter_when_quiet(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
self.step_registry.find_match.return_value = None
with patch('behave.step_registry.registry', self.step_registry):
assert not step.run(self.runner, quiet=True)
assert not self.formatter.match.called
assert not self.formatter.result.called
def test_run_when_not_quiet_reports_match_and_result(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
self.formatter.match.assert_called_with(match)
self.formatter.result.assert_called_with(step)
def test_run_when_quiet_reports_nothing(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
step.run(self.runner, quiet=True)
assert not self.formatter.match.called
assert not self.formatter.result.called
def test_run_runs_before_hook_then_match_then_after_hook(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
side_effects = (None, AssertionError('whee'), Exception('whee'))
for side_effect in side_effects:
# Make match.run() and runner.run_hook() the same mock so
# we can make sure things happen in the right order.
self.runner.run_hook = match.run = Mock()
def effect(thing):
def raiser(*args, **kwargs):
match.run.side_effect = None
if thing:
raise thing
def nonraiser(*args, **kwargs):
match.run.side_effect = raiser
return nonraiser
match.run.side_effect = effect(side_effect)
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(match.run.call_args_list, [
(('before_step', self.context, step), {}),
((self.context,), {}),
(('after_step', self.context, step), {}),
])
def test_run_sets_table_if_present(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo',
table=Mock())
self.step_registry.find_match.return_value = Mock()
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(self.context.table, step.table)
def test_run_sets_text_if_present(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo',
text=Mock(name='text'))
self.step_registry.find_match.return_value = Mock()
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(self.context.text, step.text)
def test_run_sets_status_to_passed_if_nothing_goes_wrong(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
step.error_message = None
self.step_registry.find_match.return_value = Mock()
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(step.status, 'passed')
eq_(step.error_message, None)
def test_run_sets_status_to_failed_on_assertion_error(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
step.error_message = None
match = Mock()
match.run.side_effect = raiser(AssertionError('whee'))
self.step_registry.find_match.return_value = match
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(step.status, 'failed')
assert step.error_message.startswith('Assertion Failed')
@patch('traceback.format_exc')
def test_run_sets_status_to_failed_on_exception(self, format_exc):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
step.error_message = None
match = Mock()
match.run.side_effect = raiser(Exception('whee'))
self.step_registry.find_match.return_value = match
format_exc.return_value = 'something to do with an exception'
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(step.status, 'failed')
eq_(step.error_message, format_exc.return_value)
@patch('time.time')
def test_run_calculates_duration(self, time_time):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
def time_time_1():
def time_time_2():
return 23
time_time.side_effect = time_time_2
return 17
side_effects = (None, raiser(AssertionError('whee')),
raiser(Exception('whee')))
for side_effect in side_effects:
match.run.side_effect = side_effect
time_time.side_effect = time_time_1
with patch('behave.step_registry.registry', self.step_registry):
step.run(self.runner)
eq_(step.duration, 23 - 17)
def test_run_captures_stdout_and_logging(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
with patch('behave.step_registry.registry', self.step_registry):
assert step.run(self.runner)
self.runner.start_capture.assert_called_with()
self.runner.stop_capture.assert_called_with()
def test_run_appends_any_captured_stdout_on_failure(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
self.stdout_capture.getvalue.return_value = 'frogs'
match.run.side_effect = raiser(Exception('halibut'))
with patch('behave.step_registry.registry', self.step_registry):
assert not step.run(self.runner)
assert 'Captured stdout:' in step.error_message
assert 'frogs' in step.error_message
def test_run_appends_any_captured_logging_on_failure(self):
step = model.Step('foo.feature', 17, u'Given', 'given', u'foo')
match = Mock()
self.step_registry.find_match.return_value = match
self.log_capture.getvalue.return_value = 'toads'
match.run.side_effect = raiser(AssertionError('kipper'))
with patch('behave.step_registry.registry', self.step_registry):
assert not step.run(self.runner)
assert 'Captured logging:' in step.error_message
assert 'toads' in step.error_message
class TestTableModel(object):
HEAD = [u'type of stuff', u'awesomeness', u'ridiculousness']
DATA = [
[u'fluffy', u'large', u'frequent'],
[u'lint', u'low', u'high'],
[u'green', u'variable', u'awkward'],
]
def setUp(self):
self.table = model.Table(self.HEAD, 0, self.DATA)
def test_equivalence(self):
t1 = self.table
self.setUp()
eq_(t1, self.table)
def test_table_iteration(self):
for i, row in enumerate(self.table):
for j, cell in enumerate(row):
eq_(cell, self.DATA[i][j])
def test_table_row_by_index(self):
for i in range(3):
eq_(self.table[i], model.Row(self.HEAD, self.DATA[i], 0))
def test_table_row_name(self):
eq_(self.table[0]['type of stuff'], 'fluffy')
eq_(self.table[1]['awesomeness'], 'low')
eq_(self.table[2]['ridiculousness'], 'awkward')
def test_table_row_index(self):
eq_(self.table[0][0], 'fluffy')
eq_(self.table[1][1], 'low')
eq_(self.table[2][2], 'awkward')
@raises(KeyError)
def test_table_row_keyerror(self):
self.table[0]['spam']
def test_table_row_items(self):
eq_(self.table[0].items(), zip(self.HEAD, self.DATA[0]))
class TestModelRow(object):
HEAD = [u'name', u'sex', u'age']
DATA = [u'Alice', u'female', u'12']
def setUp(self):
self.row = model.Row(self.HEAD, self.DATA, 0)
def test_len(self):
eq_(len(self.row), 3)
def test_getitem_with_valid_colname(self):
eq_(self.row['name'], u'Alice')
eq_(self.row['sex'], u'female')
eq_(self.row['age'], u'12')
@raises(KeyError)
def test_getitem_with_unknown_colname(self):
self.row['__UNKNOWN_COLUMN__']
def test_getitem_with_valid_index(self):
eq_(self.row[0], u'Alice')
eq_(self.row[1], u'female')
eq_(self.row[2], u'12')
@raises(IndexError)
def test_getitem_with_invalid_index(self):
colsize = len(self.row)
eq_(colsize, 3)
self.row[colsize]
def test_get_with_valid_colname(self):
eq_(self.row.get('name'), u'Alice')
eq_(self.row.get('sex'), u'female')
eq_(self.row.get('age'), u'12')
def test_getitem_with_unknown_colname_should_return_default(self):
eq_(self.row.get('__UNKNOWN_COLUMN__', 'XXX'), u'XXX')
def test_as_dict(self):
data1 = self.row.as_dict()
data2 = dict(self.row.as_dict())
assert isinstance(data1, dict)
assert isinstance(data2, dict)
assert isinstance(data1, OrderedDict)
# -- REQUIRES: Python2.7 or ordereddict installed.
# assert not isinstance(data2, OrderedDict)
eq_(data1, data2)
eq_(data1['name'], u'Alice')
eq_(data1['sex'], u'female')
eq_(data1['age'], u'12')
| 36.2397 | 82 | 0.625723 |
107a5f1787601c3ba0c60199c8660f01bfe13094 | 10,410 | py | Python | auto_update_tests.py | TerrorJack/binaryen | 4b04ef114a53fcaba27e9000ffb698fe898d1a37 | [
"Apache-2.0"
] | null | null | null | auto_update_tests.py | TerrorJack/binaryen | 4b04ef114a53fcaba27e9000ffb698fe898d1a37 | [
"Apache-2.0"
] | null | null | null | auto_update_tests.py | TerrorJack/binaryen | 4b04ef114a53fcaba27e9000ffb698fe898d1a37 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright 2015 WebAssembly Community Group participants
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import subprocess
import sys
from collections import OrderedDict
from scripts.test import binaryenjs
from scripts.test import lld
from scripts.test import shared
from scripts.test import support
from scripts.test import wasm2js
from scripts.test import wasm_opt
def update_asm_js_tests():
print('[ processing and updating testcases... ]\n')
for asm in shared.get_tests(shared.options.binaryen_test, ['.asm.js']):
basename = os.path.basename(asm)
for precise in [0, 1, 2]:
for opts in [1, 0]:
cmd = shared.ASM2WASM + [asm]
if 'threads' in basename:
cmd += ['--enable-threads']
wasm = asm.replace('.asm.js', '.fromasm')
if not precise:
cmd += ['--trap-mode=allow', '--ignore-implicit-traps']
wasm += '.imprecise'
elif precise == 2:
cmd += ['--trap-mode=clamp']
wasm += '.clamp'
if not opts:
wasm += '.no-opts'
if precise:
cmd += ['-O0'] # test that -O0 does nothing
else:
cmd += ['-O']
if 'debugInfo' in basename:
cmd += ['-g']
if 'noffi' in basename:
cmd += ['--no-legalize-javascript-ffi']
if precise and opts:
# test mem init importing
open('a.mem', 'wb').write(bytes(basename, 'utf-8'))
cmd += ['--mem-init=a.mem']
if basename[0] == 'e':
cmd += ['--mem-base=1024']
if '4GB' in basename:
cmd += ['--mem-max=4294967296']
if 'i64' in basename or 'wasm-only' in basename or 'noffi' in basename:
cmd += ['--wasm-only']
print(' '.join(cmd))
actual = support.run_command(cmd)
with open(os.path.join(shared.options.binaryen_test, wasm), 'w') as o:
o.write(actual)
if 'debugInfo' in basename:
cmd += ['--source-map', os.path.join(shared.options.binaryen_test, wasm + '.map'), '-o', 'a.wasm']
support.run_command(cmd)
def update_bin_fmt_tests():
print('\n[ checking binary format testcases... ]\n')
for wast in shared.get_tests(shared.options.binaryen_test, ['.wast']):
for debug_info in [0, 1]:
cmd = shared.WASM_AS + [wast, '-o', 'a.wasm', '-all']
if debug_info:
cmd += ['-g']
print(' '.join(cmd))
if os.path.exists('a.wasm'):
os.unlink('a.wasm')
subprocess.check_call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert os.path.exists('a.wasm')
cmd = shared.WASM_DIS + ['a.wasm', '-o', 'a.wast']
print(' '.join(cmd))
if os.path.exists('a.wast'):
os.unlink('a.wast')
subprocess.check_call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
assert os.path.exists('a.wast')
actual = open('a.wast').read()
binary_file = wast + '.fromBinary'
if not debug_info:
binary_file += '.noDebugInfo'
with open(binary_file, 'w') as o:
o.write(actual)
def update_example_tests():
print('\n[ checking example testcases... ]\n')
for t in shared.get_tests(shared.get_test_dir('example')):
basename = os.path.basename(t)
output_file = os.path.join(shared.options.binaryen_bin, 'example')
libdir = os.path.join(shared.BINARYEN_INSTALL_DIR, 'lib')
cmd = ['-I' + os.path.join(shared.options.binaryen_root, 'src'), '-g', '-pthread', '-o', output_file]
if t.endswith('.txt'):
# check if there is a trace in the file, if so, we should build it
out = subprocess.Popen([os.path.join(shared.options.binaryen_root, 'scripts', 'clean_c_api_trace.py'), t], stdout=subprocess.PIPE).communicate()[0]
if len(out) == 0:
print(' (no trace in ', basename, ')')
continue
print(' (will check trace in ', basename, ')')
src = 'trace.cpp'
with open(src, 'wb') as o:
o.write(out)
expected = t + '.txt'
else:
src = t
expected = os.path.splitext(t)[0] + '.txt'
if not src.endswith(('.c', '.cpp')):
continue
# build the C file separately
extra = [os.environ.get('CC') or 'gcc',
src, '-c', '-o', 'example.o',
'-I' + os.path.join(shared.options.binaryen_root, 'src'), '-g', '-L' + libdir, '-pthread']
print('build: ', ' '.join(extra))
if src.endswith('.cpp'):
extra += ['-std=c++14']
print(os.getcwd())
subprocess.check_call(extra)
# Link against the binaryen C library DSO, using rpath
cmd = ['example.o', '-L' + libdir, '-lbinaryen', '-Wl,-rpath,' + os.path.abspath(libdir)] + cmd
print(' ', basename, src, expected)
if os.environ.get('COMPILER_FLAGS'):
for f in os.environ.get('COMPILER_FLAGS').split(' '):
cmd.append(f)
cmd = [os.environ.get('CXX') or 'g++', '-std=c++14'] + cmd
try:
print('link: ', ' '.join(cmd))
subprocess.check_call(cmd)
print('run...', output_file)
proc = subprocess.Popen([output_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
actual, err = proc.communicate()
assert proc.returncode == 0, [proc.returncode, actual, err]
with open(expected, 'wb') as o:
o.write(actual)
finally:
os.remove(output_file)
if sys.platform == 'darwin':
# Also removes debug directory produced on Mac OS
shutil.rmtree(output_file + '.dSYM')
def update_wasm_dis_tests():
print('\n[ checking wasm-dis on provided binaries... ]\n')
for t in shared.get_tests(shared.options.binaryen_test, ['.wasm']):
print('..', os.path.basename(t))
cmd = shared.WASM_DIS + [t]
if os.path.isfile(t + '.map'):
cmd += ['--source-map', t + '.map']
actual = support.run_command(cmd)
open(t + '.fromBinary', 'w').write(actual)
def update_ctor_eval_tests():
print('\n[ checking wasm-ctor-eval... ]\n')
for t in shared.get_tests(shared.get_test_dir('ctor-eval'), ['.wast', '.wasm']):
print('..', os.path.basename(t))
ctors = open(t + '.ctors').read().strip()
cmd = shared.WASM_CTOR_EVAL + [t, '-all', '-o', 'a.wast', '-S', '--ctors', ctors]
support.run_command(cmd)
actual = open('a.wast').read()
out = t + '.out'
with open(out, 'w') as o:
o.write(actual)
def update_metadce_tests():
print('\n[ checking wasm-metadce... ]\n')
for t in shared.get_tests(shared.get_test_dir('metadce'), ['.wast', '.wasm']):
print('..', os.path.basename(t))
graph = t + '.graph.txt'
cmd = shared.WASM_METADCE + [t, '--graph-file=' + graph, '-o', 'a.wast', '-S', '-all']
stdout = support.run_command(cmd)
actual = open('a.wast').read()
out = t + '.dced'
with open(out, 'w') as o:
o.write(actual)
with open(out + '.stdout', 'w') as o:
o.write(stdout)
def update_reduce_tests():
if not shared.has_shell_timeout():
return
print('\n[ checking wasm-reduce ]\n')
for t in shared.get_tests(shared.get_test_dir('reduce'), ['.wast']):
print('..', os.path.basename(t))
# convert to wasm
support.run_command(shared.WASM_AS + [t, '-o', 'a.wasm'])
print(support.run_command(shared.WASM_REDUCE + ['a.wasm', '--command=%s b.wasm --fuzz-exec' % shared.WASM_OPT[0], '-t', 'b.wasm', '-w', 'c.wasm']))
expected = t + '.txt'
support.run_command(shared.WASM_DIS + ['c.wasm', '-o', expected])
def update_spec_tests():
print('\n[ updating wasm-shell spec testcases... ]\n')
for t in shared.options.spec_tests:
print('..', os.path.basename(t))
cmd = shared.WASM_SHELL + [t]
expected = os.path.join(shared.get_test_dir('spec'), 'expected-output', os.path.basename(t) + '.log')
if os.path.isfile(expected):
stdout = support.run_command(cmd, stderr=subprocess.PIPE)
# filter out binaryen interpreter logging that the spec suite
# doesn't expect
filtered = [line for line in stdout.splitlines() if not line.startswith('[trap')]
stdout = '\n'.join(filtered) + '\n'
with open(expected, 'w') as o:
o.write(stdout)
TEST_SUITES = OrderedDict([
('wasm-opt', wasm_opt.update_wasm_opt_tests),
('asm2wasm', update_asm_js_tests),
('wasm-dis', update_wasm_dis_tests),
('example', update_example_tests),
('ctor-eval', update_ctor_eval_tests),
('wasm-metadce', update_metadce_tests),
('wasm-reduce', update_reduce_tests),
('spec', update_spec_tests),
('lld', lld.update_lld_tests),
('wasm2js', wasm2js.update_wasm2js_tests),
('binfmt', update_bin_fmt_tests),
('binaryenjs', binaryenjs.update_binaryen_js_tests),
])
def main():
if shared.options.list_suites:
for suite in TEST_SUITES.keys():
print(suite)
return 0
for test in shared.requested or TEST_SUITES.keys():
TEST_SUITES[test]()
print('\n[ success! ]')
if __name__ == '__main__':
sys.exit(main())
| 40.19305 | 159 | 0.552065 |
0b50c280d33c179648371aa87013a25c5def39d1 | 1,970 | py | Python | app/api/clustering.py | story-squad/py-story-squad-DS | dc56a1dc22be3d8bd2f1a23f8d90090b025cebc1 | [
"MIT"
] | 1 | 2021-04-13T04:17:44.000Z | 2021-04-13T04:17:44.000Z | app/api/clustering.py | story-squad/py-story-squad-DS | dc56a1dc22be3d8bd2f1a23f8d90090b025cebc1 | [
"MIT"
] | null | null | null | app/api/clustering.py | story-squad/py-story-squad-DS | dc56a1dc22be3d8bd2f1a23f8d90090b025cebc1 | [
"MIT"
] | null | null | null | import logging
from fastapi import APIRouter
from app.utils.clustering.clustering_mvp import batch_cluster
# global variables and services
router = APIRouter()
log = logging.getLogger(__name__)
@router.post("/cluster")
async def cluster_endpoint(sub: dict):
"""Endpoint takes a list of cohort and submission objects then returns
clusters based on cohort in groups of 4.
Arguments:
---
sub (dict): Submission Object Defined by the following form:
```
{
"1": { # cohortID
"1": { # submissionID
"Image": "http://lorempixel.com/640/480/abstract",
"Inappropriate": False,
"Sensitive": False,
"Status": "APPROVED",
"Complexity": 123,
"Pages": {
"1": "http://lorempixel.com/640/480/abstract",
"2": "http://lorempixel.com/640/480/abstract",
},
},
},
"2":{
"1": {
"Image": "http://lorempixel.com/640/480/abstract",
"Inappropriate": False,
"Sensitive": False,
"Status": "APPROVED",
"Complexity": 123,
"Pages": {
"1": "http://lorempixel.com/640/480/abstract",
"2": "http://lorempixel.com/640/480/abstract",
},
},
},
}
```
Returns:
---
`response` json - a list of clusters defined by the following form:
{
"1": [["1","2","3","4"]], # CohortID: [Group1[SubmissionIDs],GroupN]
"2": [["5","6","7","8"]]
}
Note:
---
All submissions that are included in this data are post moderation review
and Approved for COPPA compliance
"""
response = await batch_cluster(sub)
return response
| 28.970588 | 77 | 0.481218 |
972fe4b37841e3046c10ec0b07db246b846cf00f | 8,811 | py | Python | src/PVtkView.py | joelguerrero/PyAero | ab8bc0436533834fe66a9d596b1ae2d8ba5faee7 | [
"MIT"
] | 1 | 2021-08-02T03:02:06.000Z | 2021-08-02T03:02:06.000Z | src/PVtkView.py | positroncascade/PyAero | 230fcad151621f2675eb931ff420740418ab4b56 | [
"MIT"
] | null | null | null | src/PVtkView.py | positroncascade/PyAero | 230fcad151621f2675eb931ff420740418ab4b56 | [
"MIT"
] | 1 | 2019-12-05T06:01:07.000Z | 2019-12-05T06:01:07.000Z | import vtk
from PyQt4 import QtGui
from vtk.qt4.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
import PyAero
import PLogger as logger
from PSettings import LOGCOLOR
class VtkWindow(QtGui.QFrame):
"""
VtkWindow integrates a QVTKRenderWindowInteractor for Python and Qt. Uses a
vtkGenericRenderWindowInteractor to handle the interactions. Use
GetRenderWindow() to get the vtkRenderWindow. Create with the
keyword stereo=1 in order to generate a stereo-capable window.
The user interface is summarized in vtkInteractorStyle.h:
- Keypress j / Keypress t: toggle between joystick (position
sensitive) and trackball (motion sensitive) styles. In joystick
style, motion occurs continuously as long as a mouse button is
pressed. In trackball style, motion occurs when the mouse button
is pressed and the mouse pointer moves.
- Keypress c / Keypress o: toggle between camera and object
(actor) modes. In camera mode, mouse events affect the camera
position and focal point. In object mode, mouse events affect
the actor that is under the mouse pointer.
- Button 1: rotate the camera around its focal point (if camera
mode) or rotate the actor around its origin (if actor mode). The
rotation is in the direction defined from the center of the
renderer's viewport towards the mouse position. In joystick mode,
the magnitude of the rotation is determined by the distance the
mouse is from the center of the render window.
- Button 2: pan the camera (if camera mode) or translate the actor
(if object mode). In joystick mode, the direction of pan or
translation is from the center of the viewport towards the mouse
position. In trackball mode, the direction of motion is the
direction the mouse moves. (Note: with 2-button mice, pan is
defined as <Shift>-Button 1.)
- Button 3: zoom the camera (if camera mode) or scale the actor
(if object mode). Zoom in/increase scale if the mouse position is
in the top half of the viewport; zoom out/decrease scale if the
mouse position is in the bottom half. In joystick mode, the amount
of zoom is controlled by the distance of the mouse pointer from
the horizontal centerline of the window.
- Keypress 3: toggle the render window into and out of stereo
mode. By default, red-blue stereo pairs are created. Some systems
support Crystal Eyes LCD stereo glasses; you have to invoke
SetStereoTypeToCrystalEyes() on the rendering window. Note: to
use stereo you also need to pass a stereo=1 keyword argument to
the constructor.
- Keypress e: exit the application.
- Keypress f: fly to the picked point
- Keypress p: perform a pick operation. The render window interactor
has an internal instance of vtkCellPicker that it uses to pick.
- Keypress r: reset the camera view along the current view
direction. Centers the actors and moves the camera so that all actors
are visible.
- Keypress s: modify the representation of all actors so that they
are surfaces.
- Keypress u: invoke the user-defined function. Typically, this
keypress will bring up an interactor that you can type commands in.
- Keypress w: modify the representation of all actors so that they
are wireframe.
"""
def __init__(self, parent=None):
super(VtkWindow, self).__init__(parent)
self.parent = parent
self.outline = False
self.vl = QtGui.QVBoxLayout()
self.vtkWidget = QVTKRenderWindowInteractor(self)
self.vl.addWidget(self.vtkWidget)
self.setLayout(self.vl)
self.renderer = vtk.vtkRenderer()
self.vtkwindow = self.vtkWidget.GetRenderWindow()
self.vtkwindow.AddRenderer(self.renderer)
self.interactor = self.vtkWidget.GetRenderWindow().GetInteractor()
self.renderer.GradientBackgroundOn()
self.renderer.SetBackground(1, 1, 1)
self.renderer.SetBackground2(0, 0, 1)
self.interactor.Initialize()
self.vtkWidget.AddObserver('KeyPressEvent', self.onKeyPress)
def onKeyPress(self, obj, event):
""""Define hotkeys. Partially overwriting base functionality of
QVTKRenderWindowInteractor.
"""
key = obj.GetKeyCode()
# logger.log.debug('Key code returned is %s' % (key))
if key == 'o':
self.toggleOutline()
elif key == 'p':
self.setDisplay('points')
elif key == 'w':
self.setDisplay('wireframe')
elif key == 's':
self.setDisplay('shaded')
elif key == 'f':
self.setShading('flat')
elif key == 'g':
self.setShading('gouraud')
elif key == 'W':
# FIXME
# FIXME add dialog for filename
# FIXME
self.writeStl('test_writing_STL.stl')
elif key == 'h':
self.makeScreenshot()
def readStl(self, name):
self.reader = vtk.vtkSTLReader()
self.reader.SetFileName(str(name))
logger.log.info('STL file <b><font color=%s>' % (LOGCOLOR) +
str(name) + '</b> loaded')
self.mapper = vtk.vtkPolyDataMapper()
self.mapper.SetInputConnection(self.reader.GetOutputPort())
self.actor = vtk.vtkActor()
self.actor.SetMapper(self.mapper)
self.actor.GetProperty().SetColor(0, 1, 0.2) # (R,G,B)
self.actor.GetProperty().SetLineWidth(2.0)
# create outline mapper
self.outl = vtk.vtkOutlineFilter()
self.outl.SetInputConnection(self.reader.GetOutputPort())
self.outlineMapper = vtk.vtkPolyDataMapper()
self.outlineMapper.SetInputConnection(self.outl.GetOutputPort())
# create outline actor
self.outlineActor = vtk.vtkActor()
self.outlineActor.SetMapper(self.outlineMapper)
self.renderer.AddActor(self.actor)
self.renderer.ResetCamera()
self.setDisplay('shaded')
self.setShading('gouraud')
# set tab to VTK window after loading an STL file
ntabs = self.parent.centralwidget.tabs.count()
self.parent.centralwidget.tabs.setCurrentIndex(ntabs-1)
def writeStl(self, name):
# Write the stl file to disk
self.writer = vtk.vtkSTLWriter()
self.writer.SetFileName(name)
# self.writer.SetFileTypeToASCII()
self.writer.SetFileTypeToBinary()
self.writer.SetInputConnection(self.reader.GetOutputPort())
self.writer.Write()
def setShading(self, style):
if style.lower() == 'flat':
self.actor.GetProperty().SetInterpolationToFlat()
if style.lower() == 'gouraud':
self.actor.GetProperty().SetInterpolationToGouraud()
if style.lower() == 'phong':
self.actor.GetProperty().SetInterpolationToPhong()
def setDisplay(self, style):
if style.lower() == 'points':
self.actor.GetProperty().SetRepresentationToPoints()
if style.lower() == 'wireframe':
self.actor.GetProperty().SetRepresentationToWireframe()
if style.lower() == 'shaded':
self.actor.GetProperty().SetRepresentationToSurface()
def edgesOn(self):
self.actor.GetProperty().EdgeVisibilityOn()
def toggleOutline(self):
self.outline = not self.outline
if self.outline:
self.renderer.AddActor(self.outlineActor)
else:
self.renderer.RemoveActor(self.outlineActor)
# redraw everything
self.vtkWidget.Render()
def makeScreenshot(self):
w2if = vtk.vtkWindowToImageFilter()
w2if.SetInput(self.vtkwindow)
w2if.Update()
title = PyAero.__appname__+' - Message'
dlg = QtGui.QInputDialog(self)
dlg.resize(400, 200)
dlg.setWindowTitle(title)
dlg.setInputMode(QtGui.QInputDialog.TextInput)
dlg.setLabelText('Enter screenshot name (*.png):')
dlg.exec_()
if not dlg.result():
return
fname = str(dlg.textValue())
if not fname.endswith('.png'):
fname = fname + '.png'
writer = vtk.vtkPNGWriter()
writer.SetFileName(fname)
writer.SetInputData(w2if.GetOutput())
writer.Write()
text = 'Screenshot <b>%s</b> generated in current folder.' % (fname)
msgbox = QtGui.QMessageBox()
msgbox.setWindowTitle(title)
msgbox.setText(text)
msgbox.exec_()
logger.log.info(text)
| 37.334746 | 80 | 0.641925 |
1ca1b892d2a784f4317e66eee01c80b7f4cef08b | 5,266 | py | Python | app/main/views.py | albunus/Blog_website | 3d517f6ae4d54e14552f3a2a5915b6d60c14fb87 | [
"MIT"
] | null | null | null | app/main/views.py | albunus/Blog_website | 3d517f6ae4d54e14552f3a2a5915b6d60c14fb87 | [
"MIT"
] | null | null | null | app/main/views.py | albunus/Blog_website | 3d517f6ae4d54e14552f3a2a5915b6d60c14fb87 | [
"MIT"
] | null | null | null | from flask import render_template,request,redirect,url_for,abort,flash,session
from . import main
from app.requests import get_quote
from flask_login import login_required,current_user
from ..models import Role,User,Blog,Comment,Subscriber
from ..import db, photos
import os,secrets
# from PIL import Image
from .forms import UpdateProfile,CreateBlog,CommentForm
from ..email import mail_message
#Views
@main.route('/')
def index():
quote = get_quote()
blogs = Blog.query.order_by(Blog.time.desc())
return render_template('index.html', blogs=blogs,quote = quote)
@main.route('/new_post', methods=['POST','GET'])
@login_required
def new_blog():
subscribers = Subscriber.query.all()
form = CreateBlog()
if form.validate_on_submit():
title = form.title.data
description = form.description.data
content = form.content.data
user_id = current_user._get_current_object().id
blog = Blog(title=title,description = description, content=content,user_id=user_id)
blog.save()
for subscriber in subscribers:
mail_message("New Blog Post","email/new_blog",subscriber.email,blog=blog)
return redirect(url_for('main.index'))
return render_template('post.html', form = form)
@main.route('/profile/<name>',methods = ['POST','GET'])
@login_required
def profile(name):
user = User.query.filter_by(username = name).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = f'photos/{filename}'
user.profile_pic_path = path
db.session.commit()
return render_template('profile/profile.html',user = user)
@main.route('/user/<name>/updateprofile', methods = ['POST','GET'])
@login_required
def updateprofile(name):
user = User.query.filter_by(username = name).first()
form = UpdateProfile()
if form.validate_on_submit():
user.username = form.username.data
user.email = form.email.data
user.bio = form.bio.data
db.session.commit()
return redirect(url_for('main.profile',name=user.username,))
elif request.method == 'GET':
form.username.data = current_user.username
form.email.data = current_user.email
form.bio.data = current_user.bio
return render_template('profile/update.html', user = user, form =form)
@main.route('/blog/<id>')
@login_required
def blog(id):
comments = Comment.query.filter_by(blog_id=id).all()
blog = Blog.query.get(id)
return render_template('blog_page.html',blog=blog,comments=comments)
@main.route('/blog/<blog_id>/update', methods = ['GET','POST'])
@login_required
def updateblog(blog_id):
blog = Blog.query.get(blog_id)
if blog.user != current_user:
abort(403)
form = CreateBlog()
if form.validate_on_submit():
blog.title = form.title.data
blog.description = form.description.data
blog.content = form.content.data
db.session.commit()
return redirect(url_for('main.blog',id = blog.id))
if request.method == 'GET':
form.title.data = blog.title
form.description.data = blog.description
form.content.data = blog.content
return render_template('edit_blog.html', form = form)
@main.route('/comment/new/<int:blog_id>', methods = ['GET','POST'])
@login_required
def new_comment(blog_id):
form = CommentForm()
title = 'Add a comment'
blog = Blog.query.filter_by(id=blog_id).first()
if form.validate_on_submit():
comment = form.comment.data
new_comment = Comment(comment = comment,blog_id = blog_id, user_id=current_user.id)
db.session.add(new_comment)
db.session.commit()
return redirect(url_for('.view_comments', id= blog.id))
return render_template('add_comment.html', form = form,blog = blog,title=title )
@main.route('/view_comments/<id>')
@login_required
def view_comments(id):
comment = Comment.get_comments(id)
title = 'View Comments'
return render_template('comment.html', comment=comment, title=title)
@main.route('/subscribe',methods = ['POST','GET'])
def subscribe():
email = request.form.get('subscriber')
new_subscriber = Subscriber(email = email)
new_subscriber.save_subscriber()
mail_message("Subscribed to Albunus Blogs","email/welcome_subscriber",new_subscriber.email,new_subscriber=new_subscriber)
return redirect(url_for('main.index'))
@main.route('/blog/<blog_id>/delete', methods = ['POST'])
@login_required
def delete_post(blog_id):
blog = Blog.query.get(blog_id)
if blog.user != current_user:
abort(403)
blog.delete()
return redirect(url_for('main.index'))
@main.route("/blog/<int:id>/<int:comment_id>/delete")
@login_required
def delete_comment(id, comment_id):
blog = Blog.query.filter_by(id = id).first()
comment = Comment.query.filter_by(id = comment_id).first()
db.session.delete(comment)
db.session.commit()
return redirect(url_for('main.blog',id = blog.id))
@main.route('/user/<string:username>')
def user_posts(username):
user = User.query.filter_by(username=username).first()
blogs = Blog.query.filter_by(user=user).order_by(Blog.time.desc())
return render_template('user.html',blogs=blogs,user = user) | 35.342282 | 125 | 0.691037 |
3290077cb3eb0546628b338e1a9d7ecc78e6fd3a | 2,433 | py | Python | src/sensors/sensors21_stream.py | mBouamama/Chartboard | df8050c8dba3bbe905a88b1e9268bb3aa09a3f11 | [
"Apache-2.0"
] | 21 | 2020-02-03T14:13:07.000Z | 2022-03-23T18:04:38.000Z | src/sensors/sensors21_stream.py | mBouamama/Chartboard | df8050c8dba3bbe905a88b1e9268bb3aa09a3f11 | [
"Apache-2.0"
] | 50 | 2020-04-13T13:59:32.000Z | 2022-02-12T23:30:17.000Z | src/sensors/sensors21_stream.py | mBouamama/Chartboard | df8050c8dba3bbe905a88b1e9268bb3aa09a3f11 | [
"Apache-2.0"
] | 4 | 2020-04-29T06:58:01.000Z | 2021-05-22T15:35:07.000Z | import time, random
from src.sensors.utils import end, sendUpdateByApi
def executeScriptToGetData(tile_id='stream_ex'):
""" Simulate some actions for text tile exemple """
webcamArray = [
# America
# "https://videos-3.earthcam.com/fecnetwork/16730.flv/chunklist_w1958387849.m3u8"
# Espagna
# "https://video-auth1.iol.pt/beachcam/palavasrivegauche/chunks.m3u8",
# "https://video-auth1.iol.pt/beachcam/carcavelos/chunks.m3u8",
# "https://video-auth1.iol.pt/beachcam/bcmafraribeira/chunks.m3u8",
# "https://video-auth1.iol.pt/beachcam/praiaguinchosul/chunks.m3u8",
# "https://video-auth1.iol.pt/beachcam/costacaparicacds/chunks.m3u8",
# "https://video-auth1.iol.pt/beachcam/lagide/chunks.m3u8",
# France
"https://video-auth1.iol.pt/beachcam/pourville/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/lehavre/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/siouvilles/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/saintbrevin/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/sablesolonne/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/cotesauvage/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/ronce/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/montalivet/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/latestedubuch/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/biscarosse/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/vieuxboucau01/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/seignosse/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/seignossebourdaines/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/capbreton/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/capbreton2/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/bidart/chunks.m3u8",
"https://video-auth1.iol.pt/beachcam/cavaliers/chunks.m3u8",
]
c1 = random.choice(webcamArray)
c2 = random.choice(webcamArray)
return {
'url': random.choice([c1, c2])
}
def sonde21(tester=None, tile_id='stream_ex'):
start_time = time.time()
data = executeScriptToGetData()
answer = sendUpdateByApi(tileId=tile_id, data=data, tileTemplate='stream', tester=tester)
end(title=f'sensor21 -> -> {tile_id}', startTime=start_time, tipboardAnswer=answer, tileId=tile_id)
| 49.653061 | 103 | 0.683107 |
7dce45d2e7fe0829e329b44c57b90049a5d6b3b2 | 2,875 | py | Python | yangram/users/serializers.py | LuceteYang/yangram | 922c4924c06043d5d27410611f4a76904452d44e | [
"MIT"
] | 1 | 2019-02-26T21:39:10.000Z | 2019-02-26T21:39:10.000Z | yangram/users/serializers.py | LuceteYang/yangram | 922c4924c06043d5d27410611f4a76904452d44e | [
"MIT"
] | 11 | 2020-06-05T20:02:33.000Z | 2022-02-26T09:55:09.000Z | yangram/users/serializers.py | LuceteYang/yangram | 922c4924c06043d5d27410611f4a76904452d44e | [
"MIT"
] | 1 | 2020-05-05T18:37:11.000Z | 2020-05-05T18:37:11.000Z | from rest_framework import serializers
from rest_auth.registration.serializers import RegisterSerializer
from allauth.account.adapter import get_adapter
from allauth.account.utils import setup_user_email
from . import models
from yangram.images import serializers as images_serializers
class ListUserSerializer(serializers.ModelSerializer):
following = serializers.SerializerMethodField() #시리얼라이저의 함수 사용
class Meta:
model = models.User
fields = (
'id',
'profile_image',
'username',
'name',
'bio',
'website',
'post_count',
'followers_count',
'following_count',
'following'
)
#get_~~~
def get_following(self, obj):
if 'request' in self.context:
request = self.context['request']
if obj in request.user.following.all():
return True
return False
class UserProfileSerializer(serializers.ModelSerializer):
images = images_serializers.ImageSerializer(many=True, read_only=True)
post_count = serializers.ReadOnlyField()
followers_count = serializers.ReadOnlyField()
following_count = serializers.ReadOnlyField()
is_self = serializers.SerializerMethodField()
following = serializers.SerializerMethodField()
class Meta:
model = models.User
fields = (
'profile_image',
'username',
'name',
'bio',
'website',
'post_count',
'followers_count',
'following_count',
'images',
'is_self',
'following'
)
def get_is_self(self, user):
if 'request' in self.context:
request = self.context['request']
if user.id == request.user.id:
return True
else:
return False
return False
def get_following(self, obj):
if 'request' in self.context:
request = self.context['request']
if obj in request.user.following.all():
return True
return False
class SignUpSerializer(RegisterSerializer):
name = serializers.CharField(required=True, write_only=True)
def get_cleaned_data(self):
return {
'name': self.validated_data.get('name', ''),
'username': self.validated_data.get('username', ''),
'password1': self.validated_data.get('password1', ''),
'email': self.validated_data.get('email', '')
}
def save(self, request):
adapter = get_adapter()
user = adapter.new_user(request)
self.cleaned_data = self.get_cleaned_data()
adapter.save_user(request, user, self)
setup_user_email(request, user, [])
user.save()
return user | 30.585106 | 74 | 0.591304 |
28c41f02b740690b7728b5f15007d6c071e1b8ba | 8,385 | py | Python | pybaseball/teamid_lookup.py | JtotheThree/pybaseball | d23b49bc32cc20d4112a817edf252fd86d9cf56c | [
"MIT"
] | null | null | null | pybaseball/teamid_lookup.py | JtotheThree/pybaseball | d23b49bc32cc20d4112a817edf252fd86d9cf56c | [
"MIT"
] | null | null | null | pybaseball/teamid_lookup.py | JtotheThree/pybaseball | d23b49bc32cc20d4112a817edf252fd86d9cf56c | [
"MIT"
] | null | null | null | import logging
import os
import re
from difflib import SequenceMatcher
from typing import Dict, List, Optional, Set
import numpy as np
import pandas as pd
from . import lahman
from .datasources import fangraphs
from .utils import most_recent_season
_DATA_FILENAME = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'fangraphs_teams.csv')
LOG_LEVEL = os.environ.get('LOG_LEVEL', 'WARNING').upper()
logging.basicConfig(level=LOG_LEVEL)
def team_ids(season: Optional[int] = None, league: str = 'ALL') -> pd.DataFrame:
if not os.path.exists(_DATA_FILENAME):
_generate_teams()
fg_team_data = pd.read_csv(_DATA_FILENAME, index_col=0)
if season is not None:
fg_team_data = fg_team_data.query(f"yearID == {season}")
if league is not None and league.upper() != "ALL":
fg_team_data = fg_team_data.query(f"lgID == '{league.upper()}'")
return fg_team_data
# franchID: teamIDfg
_manual_matches: Dict[str, int] = {
'BLT': 1007,
'BLU': 1008,
'BRG': 1012,
'BTT': 1014,
'CEN': 1019,
'CHP': 1022,
'CPI': 1030,
'ECK': 1032,
'MAR': 1046,
'NYY': 9,
'SLM': 1072,
}
def _front_loaded_ratio(str_1: str, str_2: str) -> float:
'''
A difflib ration based on difflint's SequenceMatcher ration.
It gives higher weight to a name that starts the same.
For example:
In the default ratio, 'LSA' and 'BSN' both match to 'BSA' with a score of 67.
However, for team names, the first letter or two are almost always the city, which is likely to be the same.
So, in this scorer 'LSA' would match to 'BSA' with a score of 83, while 'BSN' would match at 58.
'''
if len(str_1) != 3 or len(str_2) != 3:
logging.warn(
"This ratio is intended for 3 length string comparison (such as a lahman teamID, franchID, or teamIDBR."
"Returning 0 for non-compliant string(s)."
)
return 0.0
full_score = SequenceMatcher(a=str_1, b=str_2).ratio()
front_score = SequenceMatcher(a=str_1[:-1], b=str_2[:-1]).ratio()
return (full_score + front_score) / 2
def _get_close_team_matches(lahman_row: pd.Series, fg_data: pd.DataFrame, min_score: int = 50) -> Optional[str]:
columns_to_check = ['franchID', 'teamID', 'teamIDBR', 'initials', 'city_start']
best_of = 3
choices: Set[str] = set(fg_data[fg_data['Season'] == lahman_row.yearID]['Team'].values)
if len(choices) == 0:
return None
scores: Dict[str, List[float]] = {choice: [] for choice in choices}
for join_column in columns_to_check:
for choice in choices:
scores[choice].append(
_front_loaded_ratio(lahman_row[join_column], choice) * 100 if len(lahman_row[join_column]) == 3 else 0.0
)
scores = {key: sorted(value, reverse=True)[:best_of] for key, value in scores.items()}
scores_list = [(key, round(np.mean(value))) for key, value in scores.items()]
choice, score = sorted(scores_list, key=lambda x: x[1], reverse=True)[0]
return choice if score >= min_score else None
def _generate_teams() -> pd.DataFrame:
"""
Creates a datafile with a map of Fangraphs team IDs to lahman data to be used by fangraphss_teams
Should only need to be run when a team is added, removed, or moves to a new city.
"""
start_season = 1871
end_season = most_recent_season()
lahman_columns = ['yearID', 'lgID', 'teamID', 'franchID', 'divID', 'name', 'teamIDBR', 'teamIDlahman45',
'teamIDretro']
lahman_teams = lahman.teams()[lahman_columns]
# Only getting AB to make payload small, and you have to specify at least one column
fg_team_data = fangraphs.fg_team_batting_data(start_season, end_season, "ALL", stat_columns=['AB'])
fg_columns = list(fg_team_data.columns.values)
unjoined_fangraphs_teams = fg_team_data.copy(deep=True)
unjoined_lahman_teams = lahman_teams.copy(deep=True)
unjoined_lahman_teams['manual_teamid'] = unjoined_lahman_teams.apply(
lambda row: _manual_matches.get(row.franchID, -1),
axis=1
)
lahman_columns += ['manual_teamid']
unjoined_lahman_teams['initials'] = unjoined_lahman_teams.apply(
lambda row: re.sub(r'[^A-Z]', '', row['name']),
axis=1
)
lahman_columns += ['initials']
unjoined_lahman_teams['city_start'] = unjoined_lahman_teams.apply(
lambda row: row['name'][:3].upper(),
axis=1
)
lahman_columns += ['city_start']
joined: pd.DataFrame = None
for join_column in ['manual_teamid', 'teamID', 'franchID', 'teamIDBR', 'initials', 'city_start']:
joined_count = len(joined.index) if (joined is not None) else 0
if join_column == 'manual_teamid':
outer_joined = unjoined_lahman_teams.merge(unjoined_fangraphs_teams, how='outer',
left_on=['yearID', join_column],
right_on=['Season', 'teamIDfg'])
else:
outer_joined = unjoined_lahman_teams.merge(unjoined_fangraphs_teams, how='outer',
left_on=['yearID', join_column],
right_on=['Season', 'Team'])
# Clean up the data
found = outer_joined.query("not Season.isnull() and not yearID.isnull()")
joined = pd.concat([joined, found]) if (joined is not None) else found
# My kingdom for an xor function
unjoined = outer_joined.query('yearID.isnull() or Season.isnull()')
unjoined_lahman_teams = unjoined.query('Season.isnull()').drop(labels=fg_columns, axis=1)
unjoined_fangraphs_teams = unjoined.query('yearID.isnull()').drop(labels=lahman_columns, axis=1)
logging.info("Matched %s teams off of %s. %s teams remaining to match.", len(joined.index) - joined_count, join_column, len(unjoined_lahman_teams.index))
joined_count = len(joined.index) if (joined is not None) else 0
# Try to close match the rest
unjoined_lahman_teams['close_match'] = unjoined_lahman_teams.apply(
lambda row: _get_close_team_matches(row, unjoined_fangraphs_teams),
axis=1
)
outer_joined = unjoined_lahman_teams.merge(unjoined_fangraphs_teams, how='outer', left_on=['yearID', 'close_match'],
right_on=['Season', 'Team'])
# Clean up the data
joined = pd.concat([joined, outer_joined.query("not Season.isnull() and not yearID.isnull()")])
unjoined = outer_joined.query('(yearID.isnull() or Season.isnull()) and not (yearID.isnull() and Season.isnull())')
unjoined_lahman_teams = unjoined.query('Season.isnull()').drop(unjoined_fangraphs_teams.columns.values, axis=1)
unjoined_fangraphs_teams = unjoined.query('yearID.isnull()').drop(unjoined_lahman_teams.columns, axis=1)
logging.info("Matched %s teams off of close match. %s teams remaining to match.", len(joined.index) - joined_count, len(unjoined_lahman_teams.index))
error_state = False
if not unjoined_lahman_teams.empty:
logging.warning(
'When trying to join lahman data to Fangraphs, found %s rows of extraneous lahman data: %s',
len(unjoined_lahman_teams.index),
unjoined_lahman_teams.sort_values(['yearID', 'lgID', 'teamID', 'franchID'])
)
error_state = True
if not unjoined_fangraphs_teams.empty:
logging.warning(
'When trying to join Fangraphs data to lahman, found %s rows of extraneous Fangraphs data: %s',
len(unjoined_fangraphs_teams.index),
unjoined_fangraphs_teams.sort_values(['Season', 'Team'])
)
error_state = True
if error_state:
raise Exception("Extraneous data was not matched. Aborting.")
joined = joined[['yearID', 'lgID', 'teamID', 'franchID', 'teamIDfg', 'teamIDBR', 'teamIDretro']]
joined = joined.assign(teamIDfg=joined['teamIDfg'].apply(int))
joined = joined.assign(yearID=joined['yearID'].apply(int))
joined = joined.sort_values(['yearID', 'lgID', 'teamID', 'franchID']).drop_duplicates()
joined = joined.reset_index(drop=True)
joined.to_csv(_DATA_FILENAME)
return joined
# For backwards API compatibility
fangraphs_teams = team_ids
| 37.600897 | 161 | 0.651163 |
5ddebdd3dbc26ba2679e073cc6ec73f2aa3234bd | 6,954 | py | Python | experiments/scaleability-dynamic/mininet-environment/monitor.py | mpeuster/estate | 4cb94201e8110f09ac72c54e7d282e8c38aee415 | [
"Apache-2.0"
] | 1 | 2021-04-28T05:13:30.000Z | 2021-04-28T05:13:30.000Z | experiments/simple-poc-comparisson/mininet-environment/monitor.py | mpeuster/estate | 4cb94201e8110f09ac72c54e7d282e8c38aee415 | [
"Apache-2.0"
] | null | null | null | experiments/simple-poc-comparisson/mininet-environment/monitor.py | mpeuster/estate | 4cb94201e8110f09ac72c54e7d282e8c38aee415 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
from libestateredis.estate_redis import estate as estater
from cppesnode.estate_zmqclient import estate as estatez
from pyclient.estate import estate as estatep
from libestatelocal.estate import estate as estatelocal
from scapy.all import sniff
from scapy.layers.inet import TCP, IP
import sys
import time
import thread
import random
import string
es = None
# global values for pps calculation
last_log_timestamp = 0
last_local_pcount = 0
last_global_pcount = 0
dummy_state_size = 0
def get_ecounter(k):
count = es.get(k)
if count is None:
return 0
if count == "ES_NONE":
return 0
# TODO try get_global latest if we have a miss
try:
res = int(count)
except ValueError:
res = 0
print ("ERROR monitor.py:"
" Cannot convert get_ecounter value to int: %s" % str(count))
return res
def set_ecounter(k, v):
es.set(k, v)
def incr_ecounter(k, incr=1):
c = get_ecounter(k)
set_ecounter(k, c + incr)
def get_ecounter_global_sum(k):
c = es.get_global(k, red_sum)
try:
return int(c)
except ValueError:
print ("ERROR monitor.py: cannot convert get_ecounter_global_sum"
" value to int.")
return 0
def pkt_callback_debug(pkt):
sys.stdout.flush()
return pkt.summary()
def pkt_callback(pkt):
"""
Called for each packet seen on br0.
Updates NF state, e.g., packet counters.
"""
# filter for IPv4/TCP packets
if IP not in pkt:
return
if TCP not in pkt:
return
# create 5 tuple flow identifier
flow_id = "flow_%s" % str(
(pkt[IP].src,
pkt[TCP].sport,
pkt[IP].dst,
pkt[TCP].dport,
str(pkt[IP].proto))
).replace(" ", "")
# do pattern matching on raw data
PATTERN = "a"
pattern_count = 0
#data = str(pkt[TCP].payload)
#if len(data) > 0:
# pattern_count = data.count(PATTERN)
# randomly match packets (emulate malformed packet detection)
if pkt[TCP].seq % random.randint(10, 20) == 0:
pattern_count += 1
# update state values:
# general packet count
incr_ecounter("pcount")
# flow specific packet count
incr_ecounter("pcount:%s" % flow_id)
# general match count
incr_ecounter("matchcount", pattern_count)
# flow specific match count
incr_ecounter("matchcount:%s" % flow_id, pattern_count)
# TODO: add state: flows seen, flows active on instance (local dict)
# debugging:
#return "PKT: " + str(pkt.show()) #pkt.summary()
def random_bytes(size):
return ''.join(
random.choice(
string.letters + string.digits) for _ in range(int(size)))
def init_state():
"""
Initializes estate values, and lcoal values.
"""
global last_log_timestamp
last_log_timestamp = time.time()
# if we should use a big chouck of dummy data for state transferes,
# initialize it:
if dummy_state_size > 0:
dummydata = random_bytes(dummy_state_size)
es.set("dummystate", dummydata)
def log_global_state():
"""
Executed periodically.
Requets local and global state and logs (outputs it).
"""
global last_log_timestamp
global last_local_pcount
global last_global_pcount
# receive local values
t_get_local_start = time.time()
pcount_local = get_ecounter("pcount")
matchcount_local = get_ecounter("matchcount")
time_local_request = time.time() - t_get_local_start
# receive global values
t_get_global_start = time.time()
pcount_global = get_ecounter_global_sum("pcount")
matchcount_global = get_ecounter_global_sum("matchcount")
# if we should use dummy state with given size, ensure to fetch it always!
if dummy_state_size > 0:
dummydata = es.get_global("dummystate", red_latest)
#print dummydata
time_global_request = time.time() - t_get_global_start
# calculate pps
timespan = abs(time.time() - last_log_timestamp)
last_log_timestamp = time.time()
if timespan == 0:
raise Exception("We have a zero timespan for PPS calculation")
pps_local = (pcount_local - last_local_pcount) / timespan
last_local_pcount = pcount_local
pps_global = (pcount_global - last_global_pcount) / timespan
last_global_pcount = pcount_global
# generate log output
print("LOG_NETWORK_MONITOR:"
"%f;%f;%f;%f;%f;%f;%f;%f;%f;"
% (time.time(),
pps_local,
pps_global,
pcount_local,
pcount_global,
matchcount_local,
matchcount_global,
time_local_request,
time_global_request))
def print_log_header():
# generate log output
print("LOG_NETWORK_MONITOR:"
"t;"
"pps_local;"
"pps_global;"
"pcount_local;"
"pcount_global;"
"matchcount_local;"
"matchcount_global;"
"t_request_local;"
"t_request_global;")
def log_thread_func():
while True:
time.sleep(5)
log_global_state()
sys.stdout.flush()
def red_sum(l):
res = sum([float(i) for i in l])
#print "red_sum: %s = %f" % (str(l), res)
return res
def red_avg(l):
if len(l) < 1:
return 0
res = sum([float(i) for i in l]) / float(len(l))
#print "red_avg: %s = %f" % (str(l), res)
return res
def red_latest(l):
if len(l) < 1:
return "ES_NONE"
return l[0]
#TODO add real red_latest implementation
def main():
global es
global dummy_state_size
if len(sys.argv) < 3:
print "Arguments missing:"
print "monitor.py BACKEND INST_ID DUMMY_STATE_SIZE [BACKEND_OPTIONS1...N]"
print "e.g.: monitor.py redis 1 10.0.0.1"
exit(1)
backend = str(sys.argv[1])
instance_id = int(sys.argv[2])
dummy_state_size = float(sys.argv[3]) # in byte!
options = sys.argv[4:]
print "DUMMY_STATE_SIZE=%d" % dummy_state_size
if backend == "redis":
es = estater(instance_id, redis_host=options[0])
elif backend == "libestatezmq":
es = estatez(instance_id)
es.set_connection_properties(port=(8800 + instance_id))
es.start_cppesnode_process(
local_api_port=(8800 + instance_id), peerlist=options)
elif backend == "libestatepython":
es = estatep(0)
es.init_libestate(options[0], options[1], options)
elif backend == "libestatelocal":
es = estatelocal(0)
else:
print "specified backend not known"
if es is None:
print "backend not initialized. abort."
exit(1)
# initialize state
init_state()
#start logger
thread.start_new_thread(log_thread_func, ())
print_log_header()
# start monitoring (and block!)
sniff(iface="br0", prn=pkt_callback, filter="ip and tcp", store=0)
if __name__ == '__main__':
main()
| 25.851301 | 82 | 0.632729 |
51ce5c3975e7f2f1dbcbe3a7d7a1b601579f9147 | 433 | py | Python | jasmin/__init__.py | balsagoth/jasmin | 53d55f6af8c0d5faca51849e5953452a0dd93452 | [
"Apache-2.0"
] | null | null | null | jasmin/__init__.py | balsagoth/jasmin | 53d55f6af8c0d5faca51849e5953452a0dd93452 | [
"Apache-2.0"
] | null | null | null | jasmin/__init__.py | balsagoth/jasmin | 53d55f6af8c0d5faca51849e5953452a0dd93452 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Jookies LTD <jasmin@jookies.net>
# See LICENSE for details.
"""Jasmin SMS Gateway by Jookies LTD <jasmin@jookies.net>"""
MAJOR = 0
MINOR = 9
PATCH = 25
META = ''
def get_version():
"""Will return Jasmin's version"""
return '%s.%s' % (MAJOR, MINOR)
def get_release():
"""PEP 440 format"""
return '%s.%s.%s%s' % (MAJOR, MINOR, META, PATCH)
__version__ = get_version()
__release__ = get_release()
| 18.826087 | 60 | 0.642032 |
5aaa3eb82c8d1b9755cbbf2fe9d7e1273b5999c2 | 1,366 | py | Python | setup.py | debadeepta/nasbench301 | d3ae402a3ca90248cefae2c67f785a8c4fd7c9eb | [
"Apache-2.0"
] | null | null | null | setup.py | debadeepta/nasbench301 | d3ae402a3ca90248cefae2c67f785a8c4fd7c9eb | [
"Apache-2.0"
] | null | null | null | setup.py | debadeepta/nasbench301 | d3ae402a3ca90248cefae2c67f785a8c4fd7c9eb | [
"Apache-2.0"
] | null | null | null | import os
import setuptools
with open("README.md", "r") as f:
long_description = f.read()
requirements = []
with open('requirements.txt', 'r') as f:
for line in f:
requirements.append(line.strip())
#optional_requirements = []
#with open('optional-requirements.txt', 'r') as f:
# for line in f:
# optional_requirements.append(line.strip())
setuptools.setup(
name="nasbench301",
version="0.3",
author="AutoML Freiburg",
author_email="zimmerl@informatik.uni-freiburg.de",
description=("A surrogate benchmark for neural architecture search"),
long_description=long_description,
url="https://github.com/automl/nasbench301",
long_description_content_type="text/markdown",
license="3-clause BSD",
keywords="machine learning"
"optimization tuning neural architecture deep learning",
packages=setuptools.find_packages(),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: BSD License",
],
python_requires='>=3',
platforms=['Linux'],
#install_requires=requirements,
include_package_data=True
# extras_require=optional_requirements
)
| 31.045455 | 73 | 0.673499 |
f9faac7297e9f349a0b6f7578d4eb9d36f7bbe8b | 3,293 | py | Python | cria_pagina_apagados.py | projeto7c0/7c0-core | 9e29bf5f065759fb7a87d02a67553ef2423b3ca6 | [
"MIT"
] | 35 | 2019-02-11T16:03:58.000Z | 2021-06-28T04:22:16.000Z | cria_pagina_apagados.py | projeto7c0/7c0-core | 9e29bf5f065759fb7a87d02a67553ef2423b3ca6 | [
"MIT"
] | 4 | 2018-12-28T22:05:24.000Z | 2020-04-30T12:03:10.000Z | cria_pagina_apagados.py | projeto7c0/7c0-core | 9e29bf5f065759fb7a87d02a67553ef2423b3ca6 | [
"MIT"
] | 3 | 2019-02-15T22:50:26.000Z | 2020-07-05T19:16:40.000Z | def cria(arroba, id, link):
string = '<!DOCTYPE html><html><head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width"> <meta name="description" content="Website do Projeto 7C0."> <meta name="keywords" content="Projeto 7C0, monitoramento, politicos, atores publicos"> <link rel="icon" type="image/x-icon" href="./img/eye.ico" /> <title>Projeto 7C0 | Politicos | @account_handle</title> <link rel="stylesheet" href="../../css/style.css"> <script type="text/javascript" src="../../js/scripts.js"></script></head><body> <header> <div class="container"> <div id="branding"> <h1> Projeto 7C0 | @account_handle </h1> </div> <nav> <ul> <li><a href="../../index.html">Home</a></li> <li><a href="../../about.html">Sobre</a></li> <li><a href="../../source.html">Codigo-Fonte</a></li> </ul> </nav> </div> </header> <section id="main"> <div class="container"> <article id="main-col-single"> <ul id="services"> <div class="tweet"><h3>Pergunte porque o politico apagou esse tweet </h3></div> <div class="tweet"> <a href="https://twitter.com/share?ref_src=twsrc%5Etfw" class="twitter-share-button tweet" data-size="large" data-text="Esse tweet sumiu, o que aconteceu @account_handle? " data-url="archive_link" data-via="projeto7c0" data-related="projeto7c0" data-lang="pt" data-show-count="false">Tweet</a><script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script> </div> </ul> <div><iframe name="interno" width="100%" height="500" src="archive_link"></iframe></div> <p> </p> </article> </div> </div> </section> <section id="newsletter"> <div class="container"> <h3>Receba atualizações do projeto </h3> <form action="https://projeto7c0.us20.list-manage.com/subscribe/post?u=984470f280d60b82c247e3d7b&id=00a31b0d4a" method="post" target="_blank" novalidate> <input class="button_1" type="submit" value="Inscreva-se" name="subscribe"> </form> </div> </section> <footer> <p>Projeto 7C0, Copyright © 2019</p> </footer></body></html><!-- Browser Sync --><!-- You can sync this code using VSCode Browser Sync by typing "Server mode in browser" and then chosing the following files to be sync "*.html|./css/*.css".-->'
string = string.replace("account_handle", arroba)
string = string.replace("archive_link", link)
print("Criando arquivo...")
f = open(r'/home/ec2-user/projeto7c0.github.io/politicians/'+arroba+"/"+str(id)+".html", "w+")
f.write(string)
f.close()
if __name__ == '__main__':
import database, contas, os
arrobas = contas.pega_contas()
for arroba in arrobas:
print(arroba)
lista = database.list_apagados(arroba)
os.makedirs(r'/home/ec2-user/projeto7c0.github.io/politicians/'+arroba, exist_ok=True)
for tweet in lista:
cria(arroba, tweet[0], str(tweet[1]).replace("http", "https", 1))
| 126.653846 | 2,596 | 0.583966 |
27b9622f2fe5f66affd21dc368a1339957ba4a27 | 13,988 | py | Python | __init__.py | aagallag/binjatron | 4bbff5c4fa489a6718037126e2ea46816875e268 | [
"MIT"
] | 154 | 2016-09-02T16:22:56.000Z | 2021-08-02T13:16:03.000Z | __init__.py | aagallag/binjatron | 4bbff5c4fa489a6718037126e2ea46816875e268 | [
"MIT"
] | 13 | 2016-09-14T22:48:15.000Z | 2019-07-04T20:13:55.000Z | __init__.py | aagallag/binjatron | 4bbff5c4fa489a6718037126e2ea46816875e268 | [
"MIT"
] | 22 | 2016-09-03T06:58:44.000Z | 2020-06-24T03:35:34.000Z | """
binjatron.py
A plugin for Binary Ninja to integrate Binary Ninja with Voltron.
Install per instructions here:
https://github.com/Vector35/binaryninja-api/tree/master/python/examples
Documentation here: https://github.com/snare/binja/blob/master/README.md
Note: requires the current version of Voltron from GitHub here:
https://github.com/snare/voltron
"""
from binaryninja import *
import voltron
from threading import Thread
from voltron.core import Client
from voltron.plugin import api_request
from scruffy import ConfigFile, PackageFile
import sys
log = voltron.setup_logging()
client = Client()
last_bp_addrs = []
last_pc_addr = 0
last_pc_addr_colour = 0
syncing = False
vers = None
slide = 0
notification = None
sync_callbacks = []
mute_errors_after = 3
config = ConfigFile('~/.binjatron.conf', defaults=PackageFile('defaults.yaml'), apply_env=True, env_prefix='BTRON')
config.load()
bp_colour = enums.HighlightStandardColor(config.bp_colour)
pc_colour = enums.HighlightStandardColor(config.pc_colour)
no_colour = enums.HighlightStandardColor(0)
def _get_function(view, address):
func = view.get_function_at(address)
if func is None:
return view.get_function_at(view.get_previous_function_start_before(address))
return func
def sync(view):
global syncing, vers, notification
def build_requests():
return [
api_request('registers', registers=['pc'], block=True),
api_request('breakpoints', block=True),
]
def callback(results=[], error=None):
global last_bp_addrs, last_pc_addr, last_pc_addr_colour, sync_callbacks, mute_errors_after, syncing
if error:
if mute_errors_after > 0:
log_error("Error synchronising: {}".format(error))
elif mute_errors_after == 0:
# Prevent errors from filling up the entire log if the debugger closes and we lose sync
log_alert("Voltron encountered three sync errors in a row. Muting errors until the next succesful sync.")
syncing = False
mute_errors_after -= 1
else:
if(mute_errors_after < 0):
log_info("Sync restored after {} attempts".format(mute_errors_after * -1))
syncing = True
mute_errors_after = 3
if client and len(results):
if results[1].breakpoints:
addrs = [l['address'] - slide for s in [bp['locations'] for bp in results[1].breakpoints] for l in s]
# add colours to all the breakpoints currently set in the debugger
for addr in addrs:
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, bp_colour)
# remove colours from any addresses that had breakpoints the last time we updated, but don't now
for addr in set(last_bp_addrs) - set(addrs):
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, no_colour)
# save this set of breakpoint addresses for next time
last_bp_addrs = addrs
elif last_bp_addrs:
if (results[1].status == 'success') or (hasattr(results[1], 'message') and 'busy' not in results[1].message.lower()):
# We end up here if the debugger has been closed and re-opened
replace_breakpoints = show_message_box(
'New Session',
'The Voltron instance currently syncing reports no breakpoints set, but breakpoints have been set in Binary Ninja. Restore these breakpoints?',
buttons=enums.MessageBoxButtonSet.YesNoButtonSet)
if replace_breakpoints:
for addr in set(last_bp_addrs):
set_breakpoint(view, addr)
else:
for addr in set(last_bp_addrs):
func = _get_function(view, addr)
if func:
func.set_auto_instr_highlight(addr, no_colour)
last_bp_addrs = []
if results[0].registers:
# get the current PC from the debugger
addr = results[0].registers.values()[0] - slide
# find the function where that address is
func = _get_function(view, addr)
if last_pc_addr:
# update the highlight colour of the previous PC to its saved value
_get_function(view, last_pc_addr).set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
# save the PC and current colour for that instruction
last_pc_addr_colour = func.get_instr_highlight(addr)
last_pc_addr = addr
# update the highlight colour to show the current PC
func.set_auto_instr_highlight(addr, pc_colour)
# Run sync callbacks and remove them from the list if specified
for cb, _ in sync_callbacks:
cb(results)
sync_callbacks = filter(lambda cbt: not cbt[1], sync_callbacks)
elif not results[1].breakpoints or (results[0].message == 'No such target'): # Clear the program counter highlight if the program isn't running
if last_pc_addr:
# update the highlight colour of the previous PC to its saved value
_get_function(view, last_pc_addr).set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
if not syncing:
try:
log_info("Starting synchronisation with Voltron")
# register for notifications
notification = BinjatronNotification(view)
view.register_notification(notification)
# Start the client
vers = client.perform_request("version")
client.start(build_requests=build_requests, callback=callback)
syncing = True
except:
log_info("Couldn't connect to Voltron")
else:
log_info("Already synchronising with Voltron")
def stop(view):
global syncing, client, slide, notification
if syncing:
log_info("Stopping synchronisation with Voltron")
# clear any colours we've set
if last_pc_addr:
func = _get_function(view, last_pc_addr)
func.set_auto_instr_highlight(last_pc_addr, last_pc_addr_colour)
for addr in last_bp_addrs:
func = _get_function(view, addr)
func.set_auto_instr_highlight(addr, no_colour)
# stop the voltron client
client.stop()
client = Client()
# unregister notifications
view.unregister_notification(notification)
notification = None
syncing = False
slide = 0
else:
log_alert("Not synchronising with Voltron")
def set_breakpoint(view, address):
global vers
try:
if not vers:
vers = client.perform_request("version")
# build a breakpoint set command for the debugger
if 'lldb' in vers.host_version:
cmd = "breakpoint set -a 0x{:x}".format(address + slide)
elif 'gdb' in vers.host_version:
cmd = "break *0x{:x}".format(address + slide)
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
# send it
res = client.perform_request("command", command=cmd, block=False)
if res.is_error:
raise Exception("Failed to set breakpoint: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
# add colour in binja
func = _get_function(view, address)
if func:
func.set_auto_instr_highlight(address, bp_colour)
except:
log_alert("Failed to set breakpoint")
def delete_breakpoint(view, address):
global vers, last_bp_addrs
try:
if not vers:
vers = client.perform_request("version")
# get a list of breakpoints from the debugger and find the one we're after
res = client.perform_request("breakpoints")
bp_id = None
if res.is_success:
for bp in res.breakpoints:
for location in bp['locations']:
if address == location['address'] - slide:
bp_id = bp['id']
break
# build a breakpoint delete command for the debugger
if 'lldb' in vers.host_version:
cmd = "breakpoint delete {}".format(bp_id)
elif 'gdb' in vers.host_version:
cmd = "delete {}".format(bp_id)
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
# send it
res = client.perform_request("command", command=cmd, block=False)
if res.is_error:
raise Exception("Failed to delete breakpoint: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
# remove the breakpoint colour in binja
func = _get_function(view, address)
if func:
func.set_auto_instr_highlight(address, no_colour)
last_bp_addrs = filter(lambda k : k != address, last_bp_addrs)
except:
log_alert("Failed to delete breakpoint")
def set_slide(view, address):
global slide
if 'async' in vers.capabilities:
# if we're using a debugger that supports async, grab the current PC
res = client.perform_request("registers", registers=["pc"], block=False)
pc = res.registers.values()[0]
else:
# otherwise we just have to use the last PC we saved
if last_pc_addr == 0:
log_alert("Your debugger does not support async API access, and Binary Ninja hasn't received any data from it yet. Please run the `voltron update` command in the debugger, or step the debugger, or let it run until it hits a breakpoint so Binjatron can get the register state.")
else:
pc = last_pc_addr
slide = pc - address
# if we have an async debugger, we can update now. otherwise we'll have to wait for the user to step again
if 'async' in vers.capabilities:
client.update()
def clear_slide(view):
global slide
slide = 0
def custom_request(request, args, alert=True):
""" Allows external code to pass arbitrary commands to the voltron client
request: type of request - usually 'command'
args: dict containing keyword arguments for the request
alert: boolean indicating whether errors should result in a popup or simply
log to the console. Defaults to True."""
global vers
client_result = None
try:
if not vers:
vers = client.perform_request("version")
if 'lldb' in vers.host_version or 'gdb' in vers.host_version:
cmd = request
else:
raise Exception("Debugger host version {} not supported".format(vers.host_version))
client_result = client.perform_request(request, **args)
if client_result.is_error:
raise Exception("\"" + cmd + "\": {}".format(client_result))
# update the voltron views
client.perform_request("command", command="voltron update", block=False)
except:
log_info(sys.exc_info()[1])
if alert:
log_alert(request + " failed: " + str(args))
else:
log_info(request + " failed: " + str(args))
# Even if we encountered an exception, we return the results so external code can
# handle the error if necessary.
return client_result
def register_sync_callback(cb, should_delete=False):
""" Allows external code to register a callback to be run upon a succesful sync
cb: function pointer to the callback. Gets `results` as an argument
should_delete: boolean indicating whether the callback should be removed from
the list after a single call. Defaults to False. """
global sync_callbacks
sync_callbacks.append((cb, should_delete))
def sync_state():
""" Return the sync state so that external code can determine whether voltron is currently syncing with binjatron """
return syncing
class BinjatronNotification(BinaryDataNotification):
def __init__(self, view):
self.view = view
def data_written(self, view, offset, length):
log_info("data_written({:x}, {})".format(offset, length))
# get the data that was written
data = view.read(offset, length)
# write it to memory in the debugger
res = client.perform_request("write_memory", address=offset + slide, value=data, block=False)
if not res.is_success:
log_error("Failed to write memory in debugger: {}".format(res))
# update the voltron views
res = client.perform_request("command", command="voltron update", block=False)
def data_inserted(self, view, offset, length):
log_info("data_inserted()")
def data_removed(self, view, offset, length):
log_info("data_removed()")
PluginCommand.register("Voltron: Sync", "", sync)
PluginCommand.register("Voltron: Stop syncing", "", stop)
PluginCommand.register_for_address("Voltron: Breakpoint set", "", set_breakpoint)
PluginCommand.register_for_address("Voltron: Breakpoint clear", "", delete_breakpoint)
PluginCommand.register_for_address("Voltron: Slide set", "", set_slide)
PluginCommand.register("Voltron: Slide clear", "", clear_slide)
| 38.855556 | 289 | 0.624821 |
6b4ec193898a6c16a0edd919c738e213eac9ed8e | 4,522 | py | Python | torrt/rpc/utorrent.py | anton-v-ivanov/torrt | 7b0177e0870b394f1bd775b6a88d6cca0649906d | [
"BSD-3-Clause"
] | 82 | 2015-04-12T08:36:53.000Z | 2022-01-17T07:51:42.000Z | torrt/rpc/utorrent.py | anton-v-ivanov/torrt | 7b0177e0870b394f1bd775b6a88d6cca0649906d | [
"BSD-3-Clause"
] | 79 | 2015-04-12T08:35:59.000Z | 2022-02-10T12:05:26.000Z | torrt/rpc/utorrent.py | fakegit/torrt | 73c43d1b8659d707902f7951b019d9f1506b24b2 | [
"BSD-3-Clause"
] | 26 | 2015-01-13T17:49:07.000Z | 2021-07-20T10:02:46.000Z | from typing import List, Any
from urllib.parse import urljoin
from ..base_rpc import BaseRPC
from ..exceptions import TorrtRPCException
from ..utils import make_soup, TorrentData
class UTorrentRPC(BaseRPC):
"""See http://www.utorrent.com/community/developers/webapi for protocol spec details.
idle sign: What a shame - uTorrent API is a load of mess.
"""
alias: str = 'utorrent'
token_page_path: str = 'token.html'
def __init__(
self,
url: str = None,
host: str = 'localhost',
port: int = 8080,
user: str = None,
password: str = None,
enabled: bool = False
):
self.user = user
self.password = password
self.enabled = enabled
self.host = host
self.port = port
self.csrf_token = ''
if url is not None:
self.url = url
else:
self.url = f'http://{host}:{port}/gui/'
super().__init__()
def login(self):
try:
response = self.client.request(
urljoin(self.url, self.token_page_path),
auth=(self.user, self.password),
json=False,
silence_exceptions=False,
)
self.csrf_token = make_soup(response.text).find(id='token').text
if not self.csrf_token:
raise UTorrentRPCException('Unable to fetch CSRF token.')
self.logged_in = True
except Exception as e:
self.log_error(f'Failed to login using `{self.url}` RPC: {e}')
raise UTorrentRPCException(str(e))
def build_params(self, action: str = None, params: dict = None) -> dict:
document = {'action': action}
if params is not None:
document.update(params)
return document
def get_request_url(self, params: dict) -> str:
rest = []
join = lambda l: '&'.join(l)
for param_name, param_val in params.items():
if param_val is None:
continue
val = param_val
if isinstance(param_val, list):
val = join(param_val)
rest.append(f'{param_name}={val}')
return f'{self.url}?token={self.csrf_token}&{join(rest)}'
def query(self, data: dict, files: dict = None) -> dict:
action = data['action'] or 'list'
self.log_debug(f'RPC action `{action}` ...', )
if not self.logged_in:
self.login()
url = self.get_request_url(data)
request_kwargs = {}
if files is not None:
request_kwargs['files'] = files
try:
response = self.client.request(
url=url, auth=(self.user, self.password), **request_kwargs)
if self.client.last_response.status_code != 200:
raise UTorrentRPCException(response.text.strip())
except Exception as e:
self.log_error(f'Failed to query RPC `{url}`: {e}')
raise UTorrentRPCException(str(e))
return response
def method_get_torrents(self, hashes: List[str] = None) -> List[dict]:
result = self.query(self.build_params(params={'list': 1}))
torrents_info = []
for torrent_data in result['torrents']:
hash_ = torrent_data[0].lower()
if hashes is None or hash_ in hashes:
torrents_info.append({
'hash': hash_,
'name': torrent_data[2],
'download_to': torrent_data[26],
'comment': ''
})
return torrents_info
def method_add_torrent(self, torrent: TorrentData, download_to: str = None, params: dict = None) -> Any:
# NB: `download_to` is ignored, as existing API approach to it is crippled.
file_data = {'torrent_file': ('from_torrt.torrent', torrent.raw)}
return self.query(self.build_params(action='add-file', params={'path': download_to}), file_data)
def method_remove_torrent(self, hash_str: str, with_data: bool = False) -> Any:
action = 'remove'
if with_data:
action = 'removedata'
return self.query(self.build_params(action=action, params={'hash': hash_str}))
def method_get_version(self) -> str:
result = self.query(self.build_params(action='getversion'))
return result['version']['ui_version']
class UTorrentRPCException(TorrtRPCException):
""""""
| 27.406061 | 108 | 0.567669 |
7553ea99fd78ba577c6a7279eea9adad622562c9 | 2,284 | py | Python | vanilla_vae/model.py | teasherm/models | ba26d9d165a7d598d5c008ee1833ab8950bbee12 | [
"Unlicense"
] | null | null | null | vanilla_vae/model.py | teasherm/models | ba26d9d165a7d598d5c008ee1833ab8950bbee12 | [
"Unlicense"
] | null | null | null | vanilla_vae/model.py | teasherm/models | ba26d9d165a7d598d5c008ee1833ab8950bbee12 | [
"Unlicense"
] | null | null | null | import tensorflow as tf
from lib.ops import xavier_initializer, sigmoid_loss
relu = tf.nn.relu
def _sample_z(mu, log_var):
eps = tf.random_normal(shape=tf.shape(mu))
return mu + tf.exp(log_var / 2) * eps
def _Q(X, z_dim, h_dim=128):
with tf.variable_scope("Q"):
h = tf.layers.dense(
X,
h_dim,
activation=relu,
kernel_initializer=xavier_initializer(X),
name="layer1")
z_mu = tf.layers.dense(
h,
z_dim,
activation=relu,
kernel_initializer=xavier_initializer(h),
name="mu")
z_logvar = tf.layers.dense(
h,
z_dim,
activation=relu,
kernel_initializer=xavier_initializer(h),
name="sigma")
return z_mu, z_logvar
def _P(z, X_dim, h_dim=128, reuse=False):
with tf.variable_scope("P") as scope:
if reuse: scope.reuse_variables()
h = tf.layers.dense(
z, h_dim,
activation=relu,
kernel_initializer=xavier_initializer(z),
name="layer1")
logits = tf.layers.dense(
h,
X_dim,
activation=relu,
kernel_initializer=xavier_initializer(h),
name="logits")
prob = tf.nn.sigmoid(logits)
return prob, logits
def build_graph(X_dim=784, z_dim=100):
X = tf.placeholder(tf.float32, shape=[None, X_dim])
z = tf.placeholder(tf.float32, shape=[None, z_dim])
z_mu, z_logvar = _Q(X, z_dim)
z_sample = _sample_z(z_mu, z_logvar)
_, logits = _P(z_sample, X_dim)
X_samples, _ = _P(z, X_dim, reuse=True)
# E[log P(X|z)]
recon_loss = sigmoid_loss(logits, X, axis=1)
# D_KL(Q(z|X) || P(z|X)); calculate in closed form as both dist. are Gaussian
kl_loss = 0.5 * tf.reduce_sum(tf.exp(z_logvar) + z_mu**2 - 1. - z_logvar, 1)
vae_loss = tf.reduce_mean(recon_loss + kl_loss)
train_op = tf.train.AdamOptimizer().minimize(vae_loss)
return dict(
X=X, z=z, X_samples=X_samples, vae_loss=vae_loss, train_op=train_op)
def init(sess):
sess.run(tf.global_variables_initializer())
def train(sess, var_dict, X):
_, loss = sess.run([var_dict["train_op"], var_dict["vae_loss"]],
{var_dict["X"]: X})
return loss
def sample(sess, var_dict, z):
return sess.run(
var_dict["X_samples"],
feed_dict={var_dict["z"]: z})
| 25.098901 | 79 | 0.631786 |
2bebeecea2823a156c1ff8cee37115fb7aed5018 | 638 | py | Python | tests/test_client.py | BambinoRaccoon/geoguessr_api | d31a1eb89765f5c8594b8d7a1b61256abbfbc1b9 | [
"MIT"
] | null | null | null | tests/test_client.py | BambinoRaccoon/geoguessr_api | d31a1eb89765f5c8594b8d7a1b61256abbfbc1b9 | [
"MIT"
] | null | null | null | tests/test_client.py | BambinoRaccoon/geoguessr_api | d31a1eb89765f5c8594b8d7a1b61256abbfbc1b9 | [
"MIT"
] | null | null | null | import geoguessr_api
import asyncio
from examples.env import load_env
username, password, token = load_env()
def test_sync_client():
with geoguessr_api.Client(username, password, token) as client:
assert client.token == token
print(client.token)
assert client.me.email.address == username
async def test_async_client():
async with geoguessr_api.AsyncClient(username, password, token) as client:
print(client.token)
assert client.token == token
assert client.me.email.address == username
if __name__ == '__main__':
# test_sync_client()
asyncio.run(test_async_client())
| 26.583333 | 78 | 0.710031 |
b6de0af7ea9028562affbcb9327e3c7714ce3fc2 | 3,621 | py | Python | docs/conf.py | jingxinfu/Biopyutils | a04f86e3b12bcbb44bf317f3bb9c65ef5a6ab862 | [
"BSD-3-Clause"
] | 1 | 2022-03-15T03:45:28.000Z | 2022-03-15T03:45:28.000Z | docs/conf.py | jingxinfu/Biopyutils | a04f86e3b12bcbb44bf317f3bb9c65ef5a6ab862 | [
"BSD-3-Clause"
] | 1 | 2020-09-05T18:10:41.000Z | 2020-09-05T18:10:41.000Z | docs/conf.py | jingxinfu/Biopyutils | a04f86e3b12bcbb44bf317f3bb9c65ef5a6ab862 | [
"BSD-3-Clause"
] | 3 | 2020-09-04T17:05:46.000Z | 2020-09-10T14:39:20.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# License : GPL3
# Author : Jingxin Fu <jingxinfu.tj@gmail.com>
# Date : 10/02/2020
# Last Modified Date: 11/02/2020
# Last Modified By : Jingxin Fu <jingxinfu.tj@gmail.com>
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'Biopyutils'
import time
copyright = u'2020-{}'.format(time.strftime("%Y"))
author = 'Jingxin Fu'
import Biopyutils
release = Biopyutils.__version__
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.githubpages',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.autosummary',
'numpydoc',
]
autosummary_generate = True
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_options = {
'source_link_position': "footer",
'bootswatch_theme': "paper",
'navbar_sidebarrel': False,
'bootstrap_version': "3",
}
# Add any paths that contain custom themes here, relative to this directory.
import sphinx_bootstrap_theme
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/favicon.ico"
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'Bipyutilsdoc'
# -- Extension configuration -------------------------------------------------
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/3/': None}
| 36.575758 | 79 | 0.667495 |
63957527be586b571e3cbbd6a0bca9376d60f2c4 | 1,002 | py | Python | maya/app/mayabullet/__init__.py | arjun-namdeo/py_stubs | 605bb167e239978f5417f3f1fc1f5c12e2a243cc | [
"MIT"
] | 20 | 2019-09-20T00:30:22.000Z | 2021-12-26T06:56:16.000Z | mayaSDK/maya/app/mayabullet/__init__.py | minjiang999/vscode-mayapy | 7a21872f80b5b740fc653e79c3f9b5268e87b3c3 | [
"MIT"
] | 5 | 2019-12-29T15:19:03.000Z | 2022-03-29T16:54:19.000Z | mayaSDK/maya/app/mayabullet/__init__.py | minjiang999/vscode-mayapy | 7a21872f80b5b740fc653e79c3f9b5268e87b3c3 | [
"MIT"
] | 8 | 2019-09-23T05:46:44.000Z | 2022-01-11T14:42:14.000Z | """
Copyright (C) 2011 Autodesk, Inc. All rights reserved.
Developer Tip:
To auto-reload the entire package when maya is running, a shelf button can be
created with the following content.
import sys
bulletScriptPath = '<Bullet src location>/Bullet/scripts'
if (not bulletScriptPath in sys.path):
sys.path.insert(0,bulletScriptPath)
import maya.app.mayabullet
import maya.app.mayabullet.BulletUtils
import maya.app.mayabullet.CommandWithOptionVars
import maya.app.mayabullet.Ragdoll
import maya.app.mayabullet.RigidBody
import maya.app.mayabullet.RigidBodyConstraint
import maya.app.mayabullet.SoftBody
import maya.app.mayabullet.SoftBodyConstraint
reload(maya.app.mayabullet)
reload(maya.app.mayabullet.BulletUtils)
reload(maya.app.mayabullet.CommandWithOptionVars)
reload(maya.app.mayabullet.Ragdoll)
reload(maya.app.mayabullet.RigidBody)
reload(maya.app.mayabullet.RigidBodyConstraint)
reload(maya.app.mayabullet.SoftBody)
reload(maya.app.mayabullet.SoftBodyConstraint)
"""
logger = None
| 28.628571 | 77 | 0.823353 |
142b6314aa0357e6540f56ea190be1e2cb4b8ba8 | 8,319 | py | Python | kubernetes_asyncio/client/models/v1_deployment_condition.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | null | null | null | kubernetes_asyncio/client/models/v1_deployment_condition.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | 13 | 2021-04-12T02:03:48.000Z | 2022-03-28T02:08:46.000Z | kubernetes_asyncio/client/models/v1_deployment_condition.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.16.14
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1DeploymentCondition(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'last_transition_time': 'datetime',
'last_update_time': 'datetime',
'message': 'str',
'reason': 'str',
'status': 'str',
'type': 'str'
}
attribute_map = {
'last_transition_time': 'lastTransitionTime',
'last_update_time': 'lastUpdateTime',
'message': 'message',
'reason': 'reason',
'status': 'status',
'type': 'type'
}
def __init__(self, last_transition_time=None, last_update_time=None, message=None, reason=None, status=None, type=None, local_vars_configuration=None): # noqa: E501
"""V1DeploymentCondition - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._last_transition_time = None
self._last_update_time = None
self._message = None
self._reason = None
self._status = None
self._type = None
self.discriminator = None
if last_transition_time is not None:
self.last_transition_time = last_transition_time
if last_update_time is not None:
self.last_update_time = last_update_time
if message is not None:
self.message = message
if reason is not None:
self.reason = reason
self.status = status
self.type = type
@property
def last_transition_time(self):
"""Gets the last_transition_time of this V1DeploymentCondition. # noqa: E501
Last time the condition transitioned from one status to another. # noqa: E501
:return: The last_transition_time of this V1DeploymentCondition. # noqa: E501
:rtype: datetime
"""
return self._last_transition_time
@last_transition_time.setter
def last_transition_time(self, last_transition_time):
"""Sets the last_transition_time of this V1DeploymentCondition.
Last time the condition transitioned from one status to another. # noqa: E501
:param last_transition_time: The last_transition_time of this V1DeploymentCondition. # noqa: E501
:type: datetime
"""
self._last_transition_time = last_transition_time
@property
def last_update_time(self):
"""Gets the last_update_time of this V1DeploymentCondition. # noqa: E501
The last time this condition was updated. # noqa: E501
:return: The last_update_time of this V1DeploymentCondition. # noqa: E501
:rtype: datetime
"""
return self._last_update_time
@last_update_time.setter
def last_update_time(self, last_update_time):
"""Sets the last_update_time of this V1DeploymentCondition.
The last time this condition was updated. # noqa: E501
:param last_update_time: The last_update_time of this V1DeploymentCondition. # noqa: E501
:type: datetime
"""
self._last_update_time = last_update_time
@property
def message(self):
"""Gets the message of this V1DeploymentCondition. # noqa: E501
A human readable message indicating details about the transition. # noqa: E501
:return: The message of this V1DeploymentCondition. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this V1DeploymentCondition.
A human readable message indicating details about the transition. # noqa: E501
:param message: The message of this V1DeploymentCondition. # noqa: E501
:type: str
"""
self._message = message
@property
def reason(self):
"""Gets the reason of this V1DeploymentCondition. # noqa: E501
The reason for the condition's last transition. # noqa: E501
:return: The reason of this V1DeploymentCondition. # noqa: E501
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""Sets the reason of this V1DeploymentCondition.
The reason for the condition's last transition. # noqa: E501
:param reason: The reason of this V1DeploymentCondition. # noqa: E501
:type: str
"""
self._reason = reason
@property
def status(self):
"""Gets the status of this V1DeploymentCondition. # noqa: E501
Status of the condition, one of True, False, Unknown. # noqa: E501
:return: The status of this V1DeploymentCondition. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1DeploymentCondition.
Status of the condition, one of True, False, Unknown. # noqa: E501
:param status: The status of this V1DeploymentCondition. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and status is None: # noqa: E501
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
self._status = status
@property
def type(self):
"""Gets the type of this V1DeploymentCondition. # noqa: E501
Type of deployment condition. # noqa: E501
:return: The type of this V1DeploymentCondition. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this V1DeploymentCondition.
Type of deployment condition. # noqa: E501
:param type: The type of this V1DeploymentCondition. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1DeploymentCondition):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1DeploymentCondition):
return True
return self.to_dict() != other.to_dict()
| 31.392453 | 169 | 0.618584 |
88518e8e92650d65b869c835919604bbb12595b2 | 839 | py | Python | www/src/Lib/webbrowser.py | maheshnakarmi/brython | c23368a63378d55d908cfd9fde0fb1bd7b2f806b | [
"BSD-3-Clause"
] | null | null | null | www/src/Lib/webbrowser.py | maheshnakarmi/brython | c23368a63378d55d908cfd9fde0fb1bd7b2f806b | [
"BSD-3-Clause"
] | null | null | null | www/src/Lib/webbrowser.py | maheshnakarmi/brython | c23368a63378d55d908cfd9fde0fb1bd7b2f806b | [
"BSD-3-Clause"
] | null | null | null | from browser import window
__all__ = ["Error", "open", "open_new", "open_new_tab"]
class Error(Exception):
pass
_target = { 0: '', 1: '_blank', 2: '_new' } # hack...
def open(url, new=0, autoraise=True):
"""
new window or tab is not controllable
on the client side. autoraise not available.
"""
# javascript window.open doesn't work if you do not specify the protocol
# A solution is the next hack:
if '://' in url:
if url[:6] == 'ftp://':
print('entro')
else:
protocol = url.split('//:')[0]
url = url.replace(protocol + '//:', '//')
else:
url = '//' + url
print(url)
if window.open(url, _target[new]):
return True
return False
def open_new(url):
return open(url, 1)
def open_new_tab(url):
return open(url, 2)
| 22.078947 | 76 | 0.562574 |
398bcc1ede6d0710250e35cb82b280391414cf43 | 15,165 | py | Python | obsolete_main/main_momenta_spect.py | ravi-0841/spect-pitch-gan | ea4b9ea8396df753e25e0b2cb210288f683d3903 | [
"MIT"
] | null | null | null | obsolete_main/main_momenta_spect.py | ravi-0841/spect-pitch-gan | ea4b9ea8396df753e25e0b2cb210288f683d3903 | [
"MIT"
] | null | null | null | obsolete_main/main_momenta_spect.py | ravi-0841/spect-pitch-gan | ea4b9ea8396df753e25e0b2cb210288f683d3903 | [
"MIT"
] | null | null | null | import os
import numpy as np
import argparse
import time
import librosa
import sys
import scipy.io.wavfile as scwav
import scipy.io as scio
import scipy.signal as scisig
import pylab
import logging
from glob import glob
from nn_models.model_momenta_spect import VariationalCycleGAN
from utils.helper import smooth, generate_interpolation
import utils.preprocess as preproc
from importlib import reload
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
def train(train_dir, model_dir, model_name, random_seed, \
validation_dir, output_dir, \
tensorboard_log_dir, pre_train=None, \
lambda_cycle_pitch=0, lambda_cycle_mfc=0, lambda_momenta=0,
lambda_identity_mfc=0, generator_learning_rate=1e-05,
discriminator_learning_rate=1e-03):
np.random.seed(random_seed)
num_epochs = 5000
mini_batch_size = 1 # mini_batch_size = 1 is better
sampling_rate = 16000
num_mcep = 23
frame_period = 5
n_frames = 128
lc_lm = "lp_"+str(lambda_cycle_pitch) \
+ '_lm_'+str(lambda_cycle_mfc) \
+"_lmo_"+str(lambda_momenta) + '_li_' \
+ str(lambda_identity_mfc) + '_momenta_spect'
model_dir = os.path.join(model_dir, lc_lm)
logger_file = './log/'+lc_lm+'.log'
if os.path.exists(logger_file):
os.remove(logger_file)
reload(logging)
logging.basicConfig(filename=logger_file, \
level=logging.DEBUG)
print("lambda_cycle pitch - {}".format(lambda_cycle_pitch))
print("lambda_cycle mfc - {}".format(lambda_cycle_mfc))
print("lambda_momenta - {}".format(lambda_momenta))
print("cycle_loss - L1")
logging.info("lambda_cycle_pitch - {}".format(lambda_cycle_pitch))
logging.info("lambda_cycle_mfc - {}".format(lambda_cycle_mfc))
logging.info("lambda_identity_mfc - {}".format(lambda_identity_mfc))
logging.info("lambda_momenta - {}".format(lambda_momenta))
logging.info("generator_lr - {}".format(generator_learning_rate))
logging.info("discriminator_lr - {}".format(discriminator_learning_rate))
if not os.path.isdir("./pitch_spect/"+lc_lm):
os.makedirs(os.path.join("./pitch_spect/", lc_lm))
else:
for f in glob(os.path.join("./pitch_spect/", \
lc_lm, "*.png")):
os.remove(f)
print('Preprocessing Data...')
start_time = time.time()
data_train = scio.loadmat(os.path.join(train_dir, 'train_5.mat'))
data_valid = scio.loadmat(os.path.join(train_dir, 'valid_5.mat'))
pitch_A_train = np.expand_dims(data_train['src_f0_feat'], axis=-1)
pitch_B_train = np.expand_dims(data_train['tar_f0_feat'], axis=-1)
mfc_A_train = data_train['src_mfc_feat']
mfc_B_train = data_train['tar_mfc_feat']
pitch_A_valid = np.expand_dims(data_valid['src_f0_feat'], axis=-1)
pitch_B_valid = np.expand_dims(data_valid['tar_f0_feat'], axis=-1)
mfc_A_valid = data_valid['src_mfc_feat']
mfc_B_valid = data_valid['tar_mfc_feat']
# Randomly shuffle the trainig data
indices_train = np.arange(0, pitch_A_train.shape[0])
np.random.shuffle(indices_train)
pitch_A_train = pitch_A_train[indices_train]
mfc_A_train = mfc_A_train[indices_train]
np.random.shuffle(indices_train)
pitch_B_train = pitch_B_train[indices_train]
mfc_B_train = mfc_B_train[indices_train]
mfc_A_valid, pitch_A_valid, \
mfc_B_valid, pitch_B_valid = preproc.sample_data(mfc_A=mfc_A_valid, \
mfc_B=mfc_B_valid, pitch_A=pitch_A_valid, \
pitch_B=pitch_B_valid)
if validation_dir is not None:
validation_output_dir = os.path.join(output_dir, lc_lm)
if not os.path.exists(validation_output_dir):
os.makedirs(validation_output_dir)
end_time = time.time()
time_elapsed = end_time - start_time
print('Preprocessing Done.')
print('Time Elapsed for Data Preprocessing: %02d:%02d:%02d' % (time_elapsed // 3600, \
(time_elapsed % 3600 // 60), \
(time_elapsed % 60 // 1)))
#use pre_train arg to provide trained model
model = VariationalCycleGAN(dim_pitch=1, dim_mfc=23, \
n_frames=n_frames, pre_train=pre_train)
for epoch in range(1,num_epochs+1):
print('Epoch: %d' % epoch)
logging.info('Epoch: %d' % epoch)
start_time_epoch = time.time()
mfc_A, pitch_A, \
mfc_B, pitch_B = preproc.sample_data(mfc_A=mfc_A_train, \
mfc_B=mfc_B_train, pitch_A=pitch_A_train, \
pitch_B=pitch_B_train)
n_samples = mfc_A.shape[0]
train_gen_loss = []
train_disc_loss = []
for i in range(n_samples // mini_batch_size):
start = i * mini_batch_size
end = (i + 1) * mini_batch_size
generator_loss, discriminator_loss, \
gen_pitch_A, gen_mfc_A, gen_pitch_B, \
gen_mfc_B, mom_A, mom_B \
= model.train_grad(mfc_A=mfc_A[start:end],
mfc_B=mfc_B[start:end], pitch_A=pitch_A[start:end],
pitch_B=pitch_B[start:end], lambda_cycle_pitch=lambda_cycle_pitch,
lambda_cycle_mfc=lambda_cycle_mfc, lambda_momenta=lambda_momenta,
lambda_identity_mfc=lambda_identity_mfc,
generator_learning_rate=generator_learning_rate,
discriminator_learning_rate=discriminator_learning_rate)
train_gen_loss.append(generator_loss)
train_disc_loss.append(discriminator_loss)
print("Train Generator Loss- {}".format(np.mean(train_gen_loss)))
print("Train Discriminator Loss- {}".format(np.mean(train_disc_loss)))
logging.info("Train Generator Loss- {}".format(np.mean(train_gen_loss)))
logging.info("Train Discriminator Loss- {}".format(np.mean(train_disc_loss)))
if epoch%100 == 0:
for i in range(mfc_A_valid.shape[0]):
gen_mom_A, gen_pitch_A, gen_mfc_A, gen_mom_B, \
gen_pitch_B, gen_mfc_B = model.test_gen(mfc_A=mfc_A_valid[i:i+1],
mfc_B=mfc_B_valid[i:i+1],
pitch_A=pitch_A_valid[i:i+1],
pitch_B=pitch_B_valid[i:i+1])
pylab.figure(figsize=(13,13))
pylab.subplot(221)
pylab.plot(pitch_A_valid[i].reshape(-1,), label='Input A')
pylab.plot(gen_pitch_B.reshape(-1,), label='Generated B')
pylab.plot(gen_mom_B.reshape(-1,), label='Generated momenta')
pylab.legend(loc=2)
pylab.subplot(222)
pylab.plot(mfc_A_valid[i,0,:].reshape(-1,), label='Input Mfc A')
pylab.plot(gen_mfc_B[0,0,:].reshape(-1,), label='Generated Mfc B')
pylab.legend(loc=2)
pylab.subplot(223)
pylab.plot(pitch_B_valid[i].reshape(-1,), label='Input B')
pylab.plot(gen_pitch_A.reshape(-1,), label='Generated A')
pylab.plot(gen_mom_A.reshape(-1,), label='Generated momenta')
pylab.legend(loc=2)
pylab.subplot(224)
pylab.plot(mfc_B_valid[i,0,:].reshape(-1,), label='Input Mfc B')
pylab.plot(gen_mfc_A[0,0,:].reshape(-1,), label='Generated Mfc A')
pylab.legend(loc=2)
pylab.suptitle('Epoch '+str(epoch)+' example '+str(i+1))
pylab.savefig('./pitch_spect/'+lc_lm+'/'\
+str(epoch)+'_'+str(i+1)+'.png')
pylab.close()
end_time_epoch = time.time()
time_elapsed_epoch = end_time_epoch - start_time_epoch
print('Time Elapsed for This Epoch: %02d:%02d:%02d' % (time_elapsed_epoch // 3600, \
(time_elapsed_epoch % 3600 // 60), (time_elapsed_epoch % 60 // 1)))
logging.info('Time Elapsed for This Epoch: %02d:%02d:%02d' % (time_elapsed_epoch // 3600, \
(time_elapsed_epoch % 3600 // 60), (time_elapsed_epoch % 60 // 1)))
if epoch % 100 == 0:
cur_model_name = model_name+"_"+str(epoch)+".ckpt"
model.save(directory=model_dir, filename=cur_model_name)
if validation_dir is not None:
print('Generating Validation Data B from A...')
sys.stdout.flush()
# counter = 1
for file in sorted(os.listdir(validation_dir)):
try:
filepath = os.path.join(validation_dir, file)
wav = scwav.read(filepath)
wav = wav[1].astype(np.float64)
wav = preproc.wav_padding(wav = wav, sr = sampling_rate, \
frame_period = frame_period, multiple = 4)
f0, sp, ap = preproc.world_decompose(wav = wav, \
fs = sampling_rate, frame_period = frame_period)
code_sp = preproc.world_encode_spectral_envelope(sp, \
sampling_rate, dim=num_mcep)
f0 = scisig.medfilt(f0, kernel_size=3)
z_idx = np.where(f0<10.0)[0]
f0 = generate_interpolation(f0)
f0 = smooth(f0, window_len=13)
f0 = np.reshape(f0, (1,1,-1))
code_sp = np.reshape(code_sp, (1,-1,num_mcep))
code_sp = np.transpose(code_sp, (0,2,1))
f0_conv, sp_conv = model.test(input_pitch=f0, \
input_mfc=code_sp, \
direction='A2B')
f0_conv = np.asarray(np.reshape(f0_conv,(-1,)), np.float64)
f0_conv[z_idx] = 0.0
sp_conv = np.squeeze(np.transpose(sp_conv, (0,2,1)))
sp_conv = np.asarray(sp_conv.copy(order='C'), np.float64)
sp_conv = preproc.world_decode_spectral_envelope(sp_conv,
fs=sampling_rate)
sp_conv = sp_conv.copy(order='C')
f0_conv = f0_conv.copy(order='C')
ap_conv = ap.copy(order='C')
wav_transformed = preproc.world_speech_synthesis(f0=f0_conv,
decoded_sp=sp_conv, ap=ap_conv, fs=sampling_rate,
frame_period=frame_period)
librosa.output.write_wav(os.path.join(validation_output_dir, \
os.path.basename(file)), wav_transformed, sampling_rate)
except Exception as ex:
print(ex)
logging.info(ex)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description = 'Train VariationalCycleGAN model for datasets.')
emo_dict = {"neu-ang":['neutral', 'angry'], \
"neu-sad":['neutral', 'sad'], \
"neu-hap":['neutral', 'happy']}
emo_pair = "neu-ang"
train_dir_default = "./data/"+emo_pair
model_dir_default = "./model/"+emo_pair
model_name_default = emo_pair
validation_dir_default = './data/evaluation/'+emo_pair+"/"+emo_dict[emo_pair][0]+'_5'
# validation_dir_default = './data/evaluation/'+emo_pair+"/"+emo_dict[emo_pair][0]
output_dir_default = './validation_output/'+emo_pair
tensorboard_log_dir_default = './log/'+emo_pair
random_seed_default = 0
parser.add_argument('--train_dir', type=str, help='Directory for A.',
default=train_dir_default)
parser.add_argument('--model_dir', type=str, help='Directory for saving models.',
default=model_dir_default)
parser.add_argument('--model_name', type=str, help='File name for saving model.',
default=model_name_default)
parser.add_argument('--random_seed', type=int, help='Random seed for model training.',
default=random_seed_default)
parser.add_argument('--validation_dir', type=str,
help='Convert validation after each training epoch. Set None for no conversion',
default=validation_dir_default)
parser.add_argument('--output_dir', type=str, help='Output directory for converted validation voices.',
default=output_dir_default)
parser.add_argument('--tensorboard_log_dir', type=str, help='TensorBoard log directory.',
default=tensorboard_log_dir_default)
parser.add_argument('--current_iter', type=int, help="Current iteration of the model (Fine tuning)",
default=1)
parser.add_argument("--lambda_cycle_pitch", type=float, help="hyperparam for cycle loss pitch",
default=0.00001)
parser.add_argument('--lambda_cycle_mfc', type=float, help="hyperparam for cycle loss mfc",
default=1.0)
parser.add_argument('--lambda_identity_mfc', type=float, help="hyperparam for identity loss mfc",
default=0.5)
parser.add_argument('--lambda_momenta', type=float, help="hyperparam for momenta magnitude",
default=1e-6)
parser.add_argument('--generator_learning_rate', type=float, help="generator learning rate",
default=1e-07)
parser.add_argument('--discriminator_learning_rate', type=float, help="discriminator learning rate",
default=1e-07)
argv = parser.parse_args()
train_dir = argv.train_dir
model_dir = argv.model_dir
model_name = argv.model_name
random_seed = argv.random_seed
validation_dir = None if argv.validation_dir == 'None' or argv.validation_dir == 'none' \
else argv.validation_dir
output_dir = argv.output_dir
tensorboard_log_dir = argv.tensorboard_log_dir
lambda_cycle_pitch = argv.lambda_cycle_pitch
lambda_cycle_mfc = argv.lambda_cycle_mfc
lambda_identity_mfc = argv.lambda_identity_mfc
lambda_momenta = argv.lambda_momenta
generator_learning_rate = argv.generator_learning_rate
discriminator_learning_rate = argv.discriminator_learning_rate
train(train_dir=train_dir, model_dir=model_dir, model_name=model_name,
random_seed=random_seed, validation_dir=validation_dir,
output_dir=output_dir, tensorboard_log_dir=tensorboard_log_dir,
pre_train='./model/cmu-arctic/lp_0.0001_lm_0.0001_lmo_0.01_supervised_train/cmu-arctic900.ckpt',
lambda_cycle_pitch=lambda_cycle_pitch, lambda_cycle_mfc=lambda_cycle_mfc,
lambda_momenta=lambda_momenta, lambda_identity_mfc=lambda_identity_mfc,
generator_learning_rate=generator_learning_rate,
discriminator_learning_rate=discriminator_learning_rate)
| 44.734513 | 108 | 0.602901 |
e4d9dea64ccddc028bf6cd5fe76dcaaa87d1ae89 | 1,563 | py | Python | nova/api/openstack/compute/contrib/__init__.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | 7 | 2015-09-22T11:27:16.000Z | 2015-11-02T12:33:46.000Z | nova/api/openstack/compute/contrib/__init__.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | 9 | 2015-05-20T11:20:17.000Z | 2017-07-27T08:21:33.000Z | nova/api/openstack/compute/contrib/__init__.py | vmthunder/nova | baf05caab705c5778348d9f275dc541747b7c2de | [
"Apache-2.0"
] | 13 | 2015-05-05T09:34:04.000Z | 2017-11-08T02:03:46.000Z | # Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Contrib contains extensions that are shipped with nova.
It can't be called 'extensions' because that causes namespacing problems.
"""
from oslo.config import cfg
from nova.api.openstack import extensions
from nova.openstack.common import log as logging
ext_opts = [
cfg.ListOpt('osapi_compute_ext_list',
default=[],
help='Specify list of extensions to load when using osapi_'
'compute_extension option with nova.api.openstack.'
'compute.contrib.select_extensions'),
]
CONF = cfg.CONF
CONF.register_opts(ext_opts)
LOG = logging.getLogger(__name__)
def standard_extensions(ext_mgr):
extensions.load_standard_extensions(ext_mgr, LOG, __path__, __package__)
def select_extensions(ext_mgr):
extensions.load_standard_extensions(ext_mgr, LOG, __path__, __package__,
CONF.osapi_compute_ext_list)
| 33.255319 | 78 | 0.710813 |
dd75c97e2db8fe4cfcabe101b794ec1791b2baf4 | 1,031 | py | Python | technocrats/api/migrations/0001_initial.py | verenceLola/Technocrats | a039dc3ae431e04ab34121b37f2e249e2563662b | [
"MIT"
] | null | null | null | technocrats/api/migrations/0001_initial.py | verenceLola/Technocrats | a039dc3ae431e04ab34121b37f2e249e2563662b | [
"MIT"
] | 8 | 2018-11-23T10:53:07.000Z | 2022-01-06T08:36:53.000Z | technocrats/api/migrations/0001_initial.py | verenceLola/Technocrats | a039dc3ae431e04ab34121b37f2e249e2563662b | [
"MIT"
] | null | null | null | # Generated by Django 2.1.3 on 2018-11-22 11:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Bucketlist',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
('owner_id', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, related_name='bucketlists', to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'techno_bucketlist',
},
),
]
| 33.258065 | 161 | 0.623666 |
80726ebaa24b862c8901b43780e6b92419e9fe00 | 25,451 | py | Python | openstack_dashboard/openstack/common/rpc/amqp.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/openstack/common/rpc/amqp.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | null | null | null | openstack_dashboard/openstack/common/rpc/amqp.py | shhui/horizon | fd8cf6e31c07b147289bfb86c90133599eb2906e | [
"Apache-2.0"
] | null | null | null | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2011 - 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Shared code between AMQP based openstack.common.rpc implementations.
The code in this module is shared between the rpc implemenations based on AMQP.
Specifically, this includes impl_kombu and impl_qpid. impl_carrot also uses
AMQP, but is deprecated and predates this code.
"""
import collections
import inspect
import sys
import uuid
from eventlet import greenpool
from eventlet import pools
from eventlet import queue
from eventlet import semaphore
# TODO(pekowsk): Remove import cfg and below comment in Havana.
# This import should no longer be needed when the amqp_rpc_single_reply_queue
# option is removed.
from oslo.config import cfg
from openstack_dashboard.openstack.common import excutils
from openstack_dashboard.openstack.common.gettextutils import _
from openstack_dashboard.openstack.common import local
from openstack_dashboard.openstack.common import log as logging
from openstack_dashboard.openstack.common.rpc import common as rpc_common
# TODO(pekowski): Remove this option in Havana.
amqp_opts = [
cfg.BoolOpt('amqp_rpc_single_reply_queue',
default=False,
help='Enable a fast single reply queue if using AMQP based '
'RPC like RabbitMQ or Qpid.'),
]
cfg.CONF.register_opts(amqp_opts)
UNIQUE_ID = '_unique_id'
LOG = logging.getLogger(__name__)
class Pool(pools.Pool):
"""Class that implements a Pool of Connections."""
def __init__(self, conf, connection_cls, *args, **kwargs):
self.connection_cls = connection_cls
self.conf = conf
kwargs.setdefault("max_size", self.conf.rpc_conn_pool_size)
kwargs.setdefault("order_as_stack", True)
super(Pool, self).__init__(*args, **kwargs)
self.reply_proxy = None
# TODO(comstud): Timeout connections not used in a while
def create(self):
LOG.debug(_('Pool creating new connection'))
return self.connection_cls(self.conf)
def empty(self):
while self.free_items:
self.get().close()
# Force a new connection pool to be created.
# Note that this was added due to failing unit test cases. The issue
# is the above "while loop" gets all the cached connections from the
# pool and closes them, but never returns them to the pool, a pool
# leak. The unit tests hang waiting for an item to be returned to the
# pool. The unit tests get here via the teatDown() method. In the run
# time code, it gets here via cleanup() and only appears in service.py
# just before doing a sys.exit(), so cleanup() only happens once and
# the leakage is not a problem.
self.connection_cls.pool = None
_pool_create_sem = semaphore.Semaphore()
def get_connection_pool(conf, connection_cls):
with _pool_create_sem:
# Make sure only one thread tries to create the connection pool.
if not connection_cls.pool:
connection_cls.pool = Pool(conf, connection_cls)
return connection_cls.pool
class ConnectionContext(rpc_common.Connection):
"""The class that is actually returned to the caller of
create_connection(). This is essentially a wrapper around
Connection that supports 'with'. It can also return a new
Connection, or one from a pool. The function will also catch
when an instance of this class is to be deleted. With that
we can return Connections to the pool on exceptions and so
forth without making the caller be responsible for catching
them. If possible the function makes sure to return a
connection to the pool.
"""
def __init__(self, conf, connection_pool, pooled=True, server_params=None):
"""Create a new connection, or get one from the pool."""
self.connection = None
self.conf = conf
self.connection_pool = connection_pool
if pooled:
self.connection = connection_pool.get()
else:
self.connection = connection_pool.connection_cls(
conf,
server_params=server_params)
self.pooled = pooled
def __enter__(self):
"""When with ConnectionContext() is used, return self."""
return self
def _done(self):
"""If the connection came from a pool, clean it up and put it back.
If it did not come from a pool, close it.
"""
if self.connection:
if self.pooled:
# Reset the connection so it's ready for the next caller
# to grab from the pool
self.connection.reset()
self.connection_pool.put(self.connection)
else:
try:
self.connection.close()
except Exception:
pass
self.connection = None
def __exit__(self, exc_type, exc_value, tb):
"""End of 'with' statement. We're done here."""
self._done()
def __del__(self):
"""Caller is done with this connection. Make sure we cleaned up."""
self._done()
def close(self):
"""Caller is done with this connection."""
self._done()
def create_consumer(self, topic, proxy, fanout=False):
self.connection.create_consumer(topic, proxy, fanout)
def create_worker(self, topic, proxy, pool_name):
self.connection.create_worker(topic, proxy, pool_name)
def join_consumer_pool(self, callback, pool_name, topic, exchange_name):
self.connection.join_consumer_pool(callback,
pool_name,
topic,
exchange_name)
def consume_in_thread(self):
self.connection.consume_in_thread()
def __getattr__(self, key):
"""Proxy all other calls to the Connection instance."""
if self.connection:
return getattr(self.connection, key)
else:
raise rpc_common.InvalidRPCConnectionReuse()
class ReplyProxy(ConnectionContext):
"""Connection class for RPC replies / callbacks."""
def __init__(self, conf, connection_pool):
self._call_waiters = {}
self._num_call_waiters = 0
self._num_call_waiters_wrn_threshold = 10
self._reply_q = 'reply_' + uuid.uuid4().hex
super(ReplyProxy, self).__init__(conf, connection_pool, pooled=False)
self.declare_direct_consumer(self._reply_q, self._process_data)
self.consume_in_thread()
def _process_data(self, message_data):
msg_id = message_data.pop('_msg_id', None)
waiter = self._call_waiters.get(msg_id)
if not waiter:
LOG.warn(_('no calling threads waiting for msg_id : %(msg_id)s'
', message : %(data)s'), {'msg_id': msg_id,
'data': message_data})
else:
waiter.put(message_data)
def add_call_waiter(self, waiter, msg_id):
self._num_call_waiters += 1
if self._num_call_waiters > self._num_call_waiters_wrn_threshold:
LOG.warn(_('Number of call waiters is greater than warning '
'threshold: %d. There could be a MulticallProxyWaiter '
'leak.') % self._num_call_waiters_wrn_threshold)
self._num_call_waiters_wrn_threshold *= 2
self._call_waiters[msg_id] = waiter
def del_call_waiter(self, msg_id):
self._num_call_waiters -= 1
del self._call_waiters[msg_id]
def get_reply_q(self):
return self._reply_q
def msg_reply(conf, msg_id, reply_q, connection_pool, reply=None,
failure=None, ending=False, log_failure=True):
"""Sends a reply or an error on the channel signified by msg_id.
Failure should be a sys.exc_info() tuple.
"""
with ConnectionContext(conf, connection_pool) as conn:
if failure:
failure = rpc_common.serialize_remote_exception(failure,
log_failure)
try:
msg = {'result': reply, 'failure': failure}
except TypeError:
msg = {'result': dict((k, repr(v))
for k, v in reply.__dict__.iteritems()),
'failure': failure}
if ending:
msg['ending'] = True
_add_unique_id(msg)
# If a reply_q exists, add the msg_id to the reply and pass the
# reply_q to direct_send() to use it as the response queue.
# Otherwise use the msg_id for backward compatibilty.
if reply_q:
msg['_msg_id'] = msg_id
conn.direct_send(reply_q, rpc_common.serialize_msg(msg))
else:
conn.direct_send(msg_id, rpc_common.serialize_msg(msg))
class RpcContext(rpc_common.CommonRpcContext):
"""Context that supports replying to a rpc.call."""
def __init__(self, **kwargs):
self.msg_id = kwargs.pop('msg_id', None)
self.reply_q = kwargs.pop('reply_q', None)
self.conf = kwargs.pop('conf')
super(RpcContext, self).__init__(**kwargs)
def deepcopy(self):
values = self.to_dict()
values['conf'] = self.conf
values['msg_id'] = self.msg_id
values['reply_q'] = self.reply_q
return self.__class__(**values)
def reply(self, reply=None, failure=None, ending=False,
connection_pool=None, log_failure=True):
if self.msg_id:
msg_reply(self.conf, self.msg_id, self.reply_q, connection_pool,
reply, failure, ending, log_failure)
if ending:
self.msg_id = None
def unpack_context(conf, msg):
"""Unpack context from msg."""
context_dict = {}
for key in list(msg.keys()):
# NOTE(vish): Some versions of python don't like unicode keys
# in kwargs.
key = str(key)
if key.startswith('_context_'):
value = msg.pop(key)
context_dict[key[9:]] = value
context_dict['msg_id'] = msg.pop('_msg_id', None)
context_dict['reply_q'] = msg.pop('_reply_q', None)
context_dict['conf'] = conf
ctx = RpcContext.from_dict(context_dict)
rpc_common._safe_log(LOG.debug, _('unpacked context: %s'), ctx.to_dict())
return ctx
def pack_context(msg, context):
"""Pack context into msg.
Values for message keys need to be less than 255 chars, so we pull
context out into a bunch of separate keys. If we want to support
more arguments in rabbit messages, we may want to do the same
for args at some point.
"""
context_d = dict([('_context_%s' % key, value)
for (key, value) in context.to_dict().iteritems()])
msg.update(context_d)
class _MsgIdCache(object):
"""This class checks any duplicate messages."""
# NOTE: This value is considered can be a configuration item, but
# it is not necessary to change its value in most cases,
# so let this value as static for now.
DUP_MSG_CHECK_SIZE = 16
def __init__(self, **kwargs):
self.prev_msgids = collections.deque([],
maxlen=self.DUP_MSG_CHECK_SIZE)
def check_duplicate_message(self, message_data):
"""AMQP consumers may read same message twice when exceptions occur
before ack is returned. This method prevents doing it.
"""
if UNIQUE_ID in message_data:
msg_id = message_data[UNIQUE_ID]
if msg_id not in self.prev_msgids:
self.prev_msgids.append(msg_id)
else:
raise rpc_common.DuplicateMessageError(msg_id=msg_id)
def _add_unique_id(msg):
"""Add unique_id for checking duplicate messages."""
unique_id = uuid.uuid4().hex
msg.update({UNIQUE_ID: unique_id})
LOG.debug(_('UNIQUE_ID is %s.') % (unique_id))
class _ThreadPoolWithWait(object):
"""Base class for a delayed invocation manager used by
the Connection class to start up green threads
to handle incoming messages.
"""
def __init__(self, conf, connection_pool):
self.pool = greenpool.GreenPool(conf.rpc_thread_pool_size)
self.connection_pool = connection_pool
self.conf = conf
def wait(self):
"""Wait for all callback threads to exit."""
self.pool.waitall()
class CallbackWrapper(_ThreadPoolWithWait):
"""Wraps a straight callback to allow it to be invoked in a green
thread.
"""
def __init__(self, conf, callback, connection_pool):
"""
:param conf: cfg.CONF instance
:param callback: a callable (probably a function)
:param connection_pool: connection pool as returned by
get_connection_pool()
"""
super(CallbackWrapper, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.callback = callback
def __call__(self, message_data):
self.pool.spawn_n(self.callback, message_data)
class ProxyCallback(_ThreadPoolWithWait):
"""Calls methods on a proxy object based on method and args."""
def __init__(self, conf, proxy, connection_pool):
super(ProxyCallback, self).__init__(
conf=conf,
connection_pool=connection_pool,
)
self.proxy = proxy
self.msg_id_cache = _MsgIdCache()
def __call__(self, message_data):
"""Consumer callback to call a method on a proxy object.
Parses the message for validity and fires off a thread to call the
proxy object method.
Message data should be a dictionary with two keys:
method: string representing the method to call
args: dictionary of arg: value
Example: {'method': 'echo', 'args': {'value': 42}}
"""
# It is important to clear the context here, because at this point
# the previous context is stored in local.store.context
if hasattr(local.store, 'context'):
del local.store.context
rpc_common._safe_log(LOG.debug, _('received %s'), message_data)
self.msg_id_cache.check_duplicate_message(message_data)
ctxt = unpack_context(self.conf, message_data)
method = message_data.get('method')
args = message_data.get('args', {})
version = message_data.get('version')
namespace = message_data.get('namespace')
if not method:
LOG.warn(_('no method for message: %s') % message_data)
ctxt.reply(_('No method for message: %s') % message_data,
connection_pool=self.connection_pool)
return
self.pool.spawn_n(self._process_data, ctxt, version, method,
namespace, args)
def _process_data(self, ctxt, version, method, namespace, args):
"""Process a message in a new thread.
If the proxy object we have has a dispatch method
(see rpc.dispatcher.RpcDispatcher), pass it the version,
method, and args and let it dispatch as appropriate. If not, use
the old behavior of magically calling the specified method on the
proxy we have here.
"""
ctxt.update_store()
try:
rval = self.proxy.dispatch(ctxt, version, method, namespace,
**args)
# Check if the result was a generator
if inspect.isgenerator(rval):
for x in rval:
ctxt.reply(x, None, connection_pool=self.connection_pool)
else:
ctxt.reply(rval, None, connection_pool=self.connection_pool)
# This final None tells multicall that it is done.
ctxt.reply(ending=True, connection_pool=self.connection_pool)
except rpc_common.ClientException as e:
LOG.debug(_('Expected exception during message handling (%s)') %
e._exc_info[1])
ctxt.reply(None, e._exc_info,
connection_pool=self.connection_pool,
log_failure=False)
except Exception:
# sys.exc_info() is deleted by LOG.exception().
exc_info = sys.exc_info()
LOG.error(_('Exception during message handling'),
exc_info=exc_info)
ctxt.reply(None, exc_info, connection_pool=self.connection_pool)
class MulticallProxyWaiter(object):
def __init__(self, conf, msg_id, timeout, connection_pool):
self._msg_id = msg_id
self._timeout = timeout or conf.rpc_response_timeout
self._reply_proxy = connection_pool.reply_proxy
self._done = False
self._got_ending = False
self._conf = conf
self._dataqueue = queue.LightQueue()
# Add this caller to the reply proxy's call_waiters
self._reply_proxy.add_call_waiter(self, self._msg_id)
self.msg_id_cache = _MsgIdCache()
def put(self, data):
self._dataqueue.put(data)
def done(self):
if self._done:
return
self._done = True
# Remove this caller from reply proxy's call_waiters
self._reply_proxy.del_call_waiter(self._msg_id)
def _process_data(self, data):
result = None
self.msg_id_cache.check_duplicate_message(data)
if data['failure']:
failure = data['failure']
result = rpc_common.deserialize_remote_exception(self._conf,
failure)
elif data.get('ending', False):
self._got_ending = True
else:
result = data['result']
return result
def __iter__(self):
"""Return a result until we get a reply with an 'ending' flag."""
if self._done:
raise StopIteration
while True:
try:
data = self._dataqueue.get(timeout=self._timeout)
result = self._process_data(data)
except queue.Empty:
self.done()
raise rpc_common.Timeout()
except Exception:
with excutils.save_and_reraise_exception():
self.done()
if self._got_ending:
self.done()
raise StopIteration
if isinstance(result, Exception):
self.done()
raise result
yield result
#TODO(pekowski): Remove MulticallWaiter() in Havana.
class MulticallWaiter(object):
def __init__(self, conf, connection, timeout):
self._connection = connection
self._iterator = connection.iterconsume(timeout=timeout or
conf.rpc_response_timeout)
self._result = None
self._done = False
self._got_ending = False
self._conf = conf
self.msg_id_cache = _MsgIdCache()
def done(self):
if self._done:
return
self._done = True
self._iterator.close()
self._iterator = None
self._connection.close()
def __call__(self, data):
"""The consume() callback will call this. Store the result."""
self.msg_id_cache.check_duplicate_message(data)
if data['failure']:
failure = data['failure']
self._result = rpc_common.deserialize_remote_exception(self._conf,
failure)
elif data.get('ending', False):
self._got_ending = True
else:
self._result = data['result']
def __iter__(self):
"""Return a result until we get a 'None' response from consumer"""
if self._done:
raise StopIteration
while True:
try:
self._iterator.next()
except Exception:
with excutils.save_and_reraise_exception():
self.done()
if self._got_ending:
self.done()
raise StopIteration
result = self._result
if isinstance(result, Exception):
self.done()
raise result
yield result
def create_connection(conf, new, connection_pool):
"""Create a connection."""
return ConnectionContext(conf, connection_pool, pooled=not new)
_reply_proxy_create_sem = semaphore.Semaphore()
def multicall(conf, context, topic, msg, timeout, connection_pool):
"""Make a call that returns multiple times."""
# TODO(pekowski): Remove all these comments in Havana.
# For amqp_rpc_single_reply_queue = False,
# Can't use 'with' for multicall, as it returns an iterator
# that will continue to use the connection. When it's done,
# connection.close() will get called which will put it back into
# the pool
# For amqp_rpc_single_reply_queue = True,
# The 'with' statement is mandatory for closing the connection
LOG.debug(_('Making synchronous call on %s ...'), topic)
msg_id = uuid.uuid4().hex
msg.update({'_msg_id': msg_id})
LOG.debug(_('MSG_ID is %s') % (msg_id))
_add_unique_id(msg)
pack_context(msg, context)
# TODO(pekowski): Remove this flag and the code under the if clause
# in Havana.
if not conf.amqp_rpc_single_reply_queue:
conn = ConnectionContext(conf, connection_pool)
wait_msg = MulticallWaiter(conf, conn, timeout)
conn.declare_direct_consumer(msg_id, wait_msg)
conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
else:
with _reply_proxy_create_sem:
if not connection_pool.reply_proxy:
connection_pool.reply_proxy = ReplyProxy(conf, connection_pool)
msg.update({'_reply_q': connection_pool.reply_proxy.get_reply_q()})
wait_msg = MulticallProxyWaiter(conf, msg_id, timeout, connection_pool)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg), timeout)
return wait_msg
def call(conf, context, topic, msg, timeout, connection_pool):
"""Sends a message on a topic and wait for a response."""
rv = multicall(conf, context, topic, msg, timeout, connection_pool)
# NOTE(vish): return the last result from the multicall
rv = list(rv)
if not rv:
return
return rv[-1]
def cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a topic without waiting for a response."""
LOG.debug(_('Making asynchronous cast on %s...'), topic)
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast(conf, context, topic, msg, connection_pool):
"""Sends a message on a fanout exchange without waiting for a response."""
LOG.debug(_('Making asynchronous fanout cast...'))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def cast_to_server(conf, context, server_params, topic, msg, connection_pool):
"""Sends a message on a topic to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.topic_send(topic, rpc_common.serialize_msg(msg))
def fanout_cast_to_server(conf, context, server_params, topic, msg,
connection_pool):
"""Sends a message on a fanout exchange to a specific server."""
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool, pooled=False,
server_params=server_params) as conn:
conn.fanout_send(topic, rpc_common.serialize_msg(msg))
def notify(conf, context, topic, msg, connection_pool, envelope):
"""Sends a notification event on a topic."""
LOG.debug(_('Sending %(event_type)s on %(topic)s'),
dict(event_type=msg.get('event_type'),
topic=topic))
_add_unique_id(msg)
pack_context(msg, context)
with ConnectionContext(conf, connection_pool) as conn:
if envelope:
msg = rpc_common.serialize_msg(msg)
conn.notify_send(topic, msg)
def cleanup(connection_pool):
if connection_pool:
connection_pool.empty()
def get_control_exchange(conf):
return conf.control_exchange
| 37.483063 | 79 | 0.631724 |
11fdcd6f71bcd6197b239fd79b552d9a4173f9bb | 1,046 | py | Python | synth.py | vam-google/java-automl | 47b7f68f4452ba1297ea198ec2efd2e6fa5cdfe8 | [
"Apache-2.0"
] | null | null | null | synth.py | vam-google/java-automl | 47b7f68f4452ba1297ea198ec2efd2e6fa5cdfe8 | [
"Apache-2.0"
] | null | null | null | synth.py | vam-google/java-automl | 47b7f68f4452ba1297ea198ec2efd2e6fa5cdfe8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
import synthtool.languages.java as java
gapic = gcp.GAPICGenerator()
service = 'automl'
versions = ['v1beta1', 'v1']
for version in versions:
library = java.bazel_library(
service=service,
version=version,
bazel_target=f'//google/cloud/{service}/{version}:google-cloud-{service}-{version}-java',
)
java.common_templates() | 31.69697 | 95 | 0.744742 |
055338e9675b5cb9abe475052ccf8e7f8d3ca293 | 2,080 | py | Python | daemon/api/endpoints/pods.py | hantwain/jina | 55d900d42f17211f3d6c7792fd36b937e4f1ef44 | [
"Apache-2.0"
] | 1 | 2022-02-09T14:14:06.000Z | 2022-02-09T14:14:06.000Z | daemon/api/endpoints/pods.py | hantwain/jina | 55d900d42f17211f3d6c7792fd36b937e4f1ef44 | [
"Apache-2.0"
] | 1 | 2022-03-08T18:46:28.000Z | 2022-03-08T18:47:24.000Z | daemon/api/endpoints/pods.py | hantwain/jina | 55d900d42f17211f3d6c7792fd36b937e4f1ef44 | [
"Apache-2.0"
] | 1 | 2022-03-17T04:50:07.000Z | 2022-03-17T04:50:07.000Z | from fastapi import Depends, APIRouter, HTTPException
from daemon import Runtime400Exception
from daemon.api.dependencies import PodDepends
from daemon.models import DaemonID, ContainerItem, ContainerStoreStatus, PodModel
from daemon.stores import pod_store as store
router = APIRouter(prefix='/pods', tags=['pods'])
@router.get(
path='', summary='Get all alive Pod\' status', response_model=ContainerStoreStatus
)
async def _get_items():
return store.status
@router.get(
path='/arguments',
summary='Get all accepted arguments of a Pod',
)
async def _fetch_pod_params():
return PodModel.schema()['properties']
@router.post(
path='',
summary='Create a Pod',
description='Create a Pod and add it to the store',
status_code=201,
response_model=DaemonID,
)
async def _create(pod: PodDepends = Depends(PodDepends)):
try:
return await store.add(
id=pod.id,
workspace_id=pod.workspace_id,
params=pod.params,
ports=pod.ports,
envs=pod.envs,
device_requests=pod.device_requests,
)
except Exception as ex:
raise Runtime400Exception from ex
# order matters! this must be put in front of del {id}
# https://fastapi.tiangolo.com/tutorial/path-params/?h=+path#order-matters
@router.delete(
path='',
summary='Terminate all running Pods',
)
async def _clear_all():
await store.clear()
@router.delete(
path='/{id}',
summary='Terminate a running Pod',
description='Terminate a running Pod and release its resources',
)
async def _delete(id: DaemonID, workspace: bool = False):
try:
await store.delete(id=id, workspace=workspace)
except KeyError:
raise HTTPException(status_code=404, detail=f'{id} not found in store')
@router.get(
path='/{id}', summary='Get status of a running Pod', response_model=ContainerItem
)
async def _status(id: DaemonID):
try:
return store[id]
except KeyError:
raise HTTPException(status_code=404, detail=f'{id} not found in pod store')
| 27.012987 | 86 | 0.686058 |
56ae98fb0a71424f5b8400b5617209638add42b5 | 6,688 | py | Python | Products/GSContentManager/interfaces.py | groupserver/Products.GSContentManager | 2fd8d708f378eb1bde4a7331f724f4c9a3b974ae | [
"ZPL-2.1"
] | null | null | null | Products/GSContentManager/interfaces.py | groupserver/Products.GSContentManager | 2fd8d708f378eb1bde4a7331f724f4c9a3b974ae | [
"ZPL-2.1"
] | null | null | null | Products/GSContentManager/interfaces.py | groupserver/Products.GSContentManager | 2fd8d708f378eb1bde4a7331f724f4c9a3b974ae | [
"ZPL-2.1"
] | null | null | null | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright © 2013 E-Democracy.org and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from __future__ import unicode_literals
from gs.core import to_ascii
from zope.interface.interface import Interface
from zope.schema import (ASCIILine, Bool, Choice, Datetime, Dict, Text,
TextLine)
from zope.schema.vocabulary import SimpleVocabulary, SimpleTerm
from zope.contentprovider.interfaces import IContentProvider
class IGSContentPage(Interface):
pass
class IGSDataTemplate(Interface):
pass
class IGSContentManagerFolderMarker(Interface):
pass
class IGSContentPageVersion(Interface):
""" Schema for a content page """
id = ASCIILine(title='Identifier', # lint:ok
description='The identifier of the version.',
required=True)
title = TextLine(title='Title',
description='The title of the page, which will appear in '
'the title bar of the browser.',
required=True)
content = Text(title='Content',
description='The content of this page.',
required=False)
published = Bool(title='Publish',
description='If you publish the change it will be shown '
'to people by default.',
required=True,
default=True)
editor = TextLine(title='Editor ID',
description='The identifier of the user who last edited '
'this Page',
required=False,
default='')
parentVersion = ASCIILine(title='Parent Version ID',
description='The identifier of the page version that this '
'version was based on.',
required=False,
default=to_ascii(''))
creationDate = Datetime(title='Creation Date',
description='The date that the version was created',
required=False)
class IGSContentPageHistory(Interface):
"""Marker interface for the history of a page
"""
class IGSMangePages(Interface):
pageId = ASCIILine(title='Page Identifier',
description='The identifier for the new page. No spaces '
'are allowed.',
required=False)
title = TextLine(title='Title',
description='The title of the page. This will appear at'
'the top of the page and in the title bar of the browser.',
required=False)
newPageId = ASCIILine(title='New Page Identifier',
description='The identifier the page should have after it '
'has been copied. No spaces are allowed.',
required=False)
copyDestination = ASCIILine(title='Destination',
description='Where the page should be copied to.',
required=False)
renamedPageId = ASCIILine(title='New Page Identifier',
description='The new identifier for the page. No spaces '
'are allowed.',
required=False)
moveDestination = ASCIILine(title='Destination',
description='Where the page should be moved to.',
required=False)
anyone = SimpleTerm(
'anyone', 'anyone',
'Anyone, including those that are not logged in.')
members = SimpleTerm(
'members', 'members',
'Only logged in members.')
administrators = SimpleTerm(
'administrators', 'administrators',
'Only administrators.'
)
viewLevels = SimpleVocabulary([anyone, members, administrators])
changeLevels = SimpleVocabulary([members, administrators])
class IGSChangePagePrivacy(Interface):
view = Choice(title='View the page',
description='Which group of users can view the page.',
required=True,
vocabulary=viewLevels)
change = Choice(title='Change the page',
description='Which group of users can change the page.',
required=True,
vocabulary=changeLevels)
class IGSContentManagerContextMenuContentProvider(IContentProvider):
"""The content provider for the context menu"""
pageTemplateFileName = Text(title="Page Template File Name",
description='The name of the ZPT file that is used to render the '
'menu.',
required=False,
default="browser/templates/profileContextMenu.pt")
class IGSPageTreeContentProvider(IContentProvider):
"""The content provider for the context menu"""
treeIdPrefix = TextLine(title='Tree Identifier Prefix',
description='The text that is appended to the start of all '
'tree-node identifiers.',
default='tree-'
)
class IGSContentPageHistoryContentProvider(IContentProvider):
"""The content provider for the page history """
pageTemplateFileName = Text(title="Page Template File Name",
description='The name of the ZPT file that is used to render the '
'history',
required=False,
default="browser/templates/page_history.pt")
changedVersion = ASCIILine(title='Changed Version',
description='The identifier of the version that is being '
'changed',
required=False)
showChange = Bool(title='Show Changed',
description='True if the "change" links are shown in the '
'history.',
default=False)
startId = ASCIILine(title='Start Identifier',
description='The identifier for the page at the start of '
'the history range.',
required=False,
default=None)
endId = ASCIILine(title='End Identifier',
description='The identifier for the page at the end of '
'the history range.',
required=False,
default=None)
class IGSContentManagerTabMenuContentProvider(IContentProvider):
"""The content provider for the tab menu"""
pageTemplateFileName = Text(title="Page Template File Name",
description='The name of the ZPT file that is used to render '
'the menu.',
required=False,
default="browser/templates/tabmenu.pt")
pages = Dict(title='Pages in the Profile',
description='The pages that are in the context of the profile.')
class IGSContentPagePrivacyContentProvider(IContentProvider):
"""The content provider for the tab menu"""
pageTemplateFileName = Text(title="Page Template File Name",
description='The name of the ZPT file that is used to render '
'the privacy.',
required=False,
default="browser/templates/privacy.pt")
| 32 | 78 | 0.672249 |
a736a7da621bb35d6ef7c0de9a23ce78a15c3e9d | 1,020 | py | Python | src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetLocation.py | khurtado/WMCore | f74e252412e49189a92962945a94f93bec81cd1e | [
"Apache-2.0"
] | 21 | 2015-11-19T16:18:45.000Z | 2021-12-02T18:20:39.000Z | src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetLocation.py | khurtado/WMCore | f74e252412e49189a92962945a94f93bec81cd1e | [
"Apache-2.0"
] | 5,671 | 2015-01-06T14:38:52.000Z | 2022-03-31T22:11:14.000Z | src/python/WMComponent/DBS3Buffer/MySQL/DBSBufferFiles/GetLocation.py | khurtado/WMCore | f74e252412e49189a92962945a94f93bec81cd1e | [
"Apache-2.0"
] | 67 | 2015-01-21T15:55:38.000Z | 2022-02-03T19:53:13.000Z | #!/usr/bin/env python
"""
_GetLocation_
MySQL implementation of File.GetLocation
"""
from WMCore.Database.DBFormatter import DBFormatter
class GetLocation(DBFormatter):
sql = """select pnn from dbsbuffer_location
where id in (select location from dbsbuffer_file_location
where filename in (select id from dbsbuffer_file where lfn=:lfn))"""
def getBinds(self, file=None):
binds = []
file = self.dbi.makelist(file)
for f in file:
binds.append({'lfn': f})
return binds
def format(self, result):
"Return a list of SE FQDN's"
out = set()
for r in result:
for i in r.fetchall():
out.add(i[0])
return out
def execute(self, file=None, conn = None, transaction = False):
binds = self.getBinds(file)
result = self.dbi.processData(self.sql, binds,
conn = conn, transaction = transaction)
return self.format(result)
| 25.5 | 88 | 0.592157 |
f252ba9bf9ec410936a8b37860702f66963062d5 | 36,332 | py | Python | tensorflow/python/debug/wrappers/local_cli_wrapper_test.py | abhaikollara/tensorflow | 4f96df3659696990cb34d0ad07dc67843c4225a9 | [
"Apache-2.0"
] | 56 | 2018-06-21T13:47:23.000Z | 2020-05-13T09:31:47.000Z | tensorflow/python/debug/wrappers/local_cli_wrapper_test.py | abhaikollara/tensorflow | 4f96df3659696990cb34d0ad07dc67843c4225a9 | [
"Apache-2.0"
] | 6 | 2022-01-15T07:17:47.000Z | 2022-02-14T15:28:22.000Z | tensorflow/python/debug/wrappers/local_cli_wrapper_test.py | abhaikollara/tensorflow | 4f96df3659696990cb34d0ad07dc67843c4225a9 | [
"Apache-2.0"
] | 15 | 2018-09-06T14:18:32.000Z | 2020-05-14T06:35:30.000Z | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for local command-line-interface debug wrapper session."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import numpy as np
from tensorflow.python.debug.cli import cli_config
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.cli import cli_shared
from tensorflow.python.debug.cli import debugger_cli_common
from tensorflow.python.debug.cli import ui_factory
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.lib.io import file_io
from tensorflow.python.keras import backend
from tensorflow.python.keras.engine import sequential
from tensorflow.python.keras.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
# Import resource_variable_ops for the variables-to-tensor implicit conversion.
from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
class LocalCLIDebuggerWrapperSessionForTest(
local_cli_wrapper.LocalCLIDebugWrapperSession):
"""Subclasses the wrapper class for testing.
Overrides its CLI-related methods for headless testing environments.
Inserts observer variables for assertions.
"""
def __init__(self,
command_sequence,
sess,
dump_root=None):
"""Constructor of the for-test subclass.
Args:
command_sequence: (list of list of str) A list of command arguments,
including the command prefix, each element of the list is such as:
["run", "-n"],
["print_feed", "input:0"].
sess: See the doc string of LocalCLIDebugWrapperSession.__init__.
dump_root: See the doc string of LocalCLIDebugWrapperSession.__init__.
"""
local_cli_wrapper.LocalCLIDebugWrapperSession.__init__(
self, sess, dump_root=dump_root, log_usage=False)
self._command_sequence = command_sequence
self._command_pointer = 0
# Observer variables.
self.observers = {
"debug_dumps": [],
"tf_errors": [],
"run_start_cli_run_numbers": [],
"run_end_cli_run_numbers": [],
"print_feed_responses": [],
"profiler_py_graphs": [],
"profiler_run_metadata": [],
}
def _prep_cli_for_run_start(self):
pass
def _prep_debug_cli_for_run_end(self,
debug_dump,
tf_error,
passed_filter,
passed_filter_exclude_op_names):
self.observers["debug_dumps"].append(debug_dump)
self.observers["tf_errors"].append(tf_error)
def _prep_profile_cli_for_run_end(self, py_graph, run_metadata):
self.observers["profiler_py_graphs"].append(py_graph)
self.observers["profiler_run_metadata"].append(run_metadata)
def _launch_cli(self):
if self._is_run_start:
self.observers["run_start_cli_run_numbers"].append(self._run_call_count)
else:
self.observers["run_end_cli_run_numbers"].append(self._run_call_count)
readline_cli = ui_factory.get_ui(
"readline",
config=cli_config.CLIConfig(
config_file_path=os.path.join(tempfile.mkdtemp(), ".tfdbg_config")))
self._register_this_run_info(readline_cli)
while True:
command = self._command_sequence[self._command_pointer]
self._command_pointer += 1
try:
if command[0] == "run":
self._run_handler(command[1:])
elif command[0] == "print_feed":
self.observers["print_feed_responses"].append(
self._print_feed_handler(command[1:]))
else:
raise ValueError("Unrecognized command prefix: %s" % command[0])
except debugger_cli_common.CommandLineExit as e:
return e.exit_token
@test_util.run_v1_only("b/120545219")
class LocalCLIDebugWrapperSessionTest(test_util.TensorFlowTestCase):
def setUp(self):
self._tmp_dir = tempfile.mktemp()
self.v = variables.VariableV1(10.0, name="v")
self.w = variables.VariableV1(21.0, name="w")
self.delta = constant_op.constant(1.0, name="delta")
self.inc_v = state_ops.assign_add(self.v, self.delta, name="inc_v")
self.w_int = control_flow_ops.with_dependencies(
[self.inc_v],
math_ops.cast(self.w, dtypes.int32, name="w_int_inner"),
name="w_int_outer")
self.ph = array_ops.placeholder(dtypes.float32, name="ph")
self.xph = array_ops.transpose(self.ph, name="xph")
self.m = constant_op.constant(
[[0.0, 1.0, 2.0], [-4.0, -1.0, 0.0]], dtype=dtypes.float32, name="m")
self.y = math_ops.matmul(self.m, self.xph, name="y")
self.sparse_ph = array_ops.sparse_placeholder(
dtypes.float32, shape=([5, 5]), name="sparse_placeholder")
self.sparse_add = sparse_ops.sparse_add(self.sparse_ph, self.sparse_ph)
rewriter_config = rewriter_config_pb2.RewriterConfig(
disable_model_pruning=True,
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF,
dependency_optimization=rewriter_config_pb2.RewriterConfig.OFF)
graph_options = config_pb2.GraphOptions(rewrite_options=rewriter_config)
config_proto = config_pb2.ConfigProto(graph_options=graph_options)
self.sess = session.Session(config=config_proto)
# Initialize variable.
self.sess.run(variables.global_variables_initializer())
def tearDown(self):
ops.reset_default_graph()
if os.path.isdir(self._tmp_dir):
file_io.delete_recursively(self._tmp_dir)
def testConstructWrapper(self):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), log_usage=False)
def testConstructWrapperWithExistingEmptyDumpRoot(self):
os.mkdir(self._tmp_dir)
self.assertTrue(os.path.isdir(self._tmp_dir))
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=self._tmp_dir, log_usage=False)
def testConstructWrapperWithExistingNonEmptyDumpRoot(self):
os.mkdir(self._tmp_dir)
dir_path = os.path.join(self._tmp_dir, "foo")
os.mkdir(dir_path)
self.assertTrue(os.path.isdir(dir_path))
with self.assertRaisesRegexp(
ValueError, "dump_root path points to a non-empty directory"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=self._tmp_dir, log_usage=False)
def testConstructWrapperWithExistingFileDumpRoot(self):
os.mkdir(self._tmp_dir)
file_path = os.path.join(self._tmp_dir, "foo")
open(file_path, "a").close() # Create the file
self.assertTrue(os.path.isfile(file_path))
with self.assertRaisesRegexp(ValueError, "dump_root path points to a file"):
local_cli_wrapper.LocalCLIDebugWrapperSession(
session.Session(), dump_root=file_path, log_usage=False)
def testRunsUnderDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Assert correct run call numbers for which the CLI has been launched at
# run-start and run-end.
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1, 2], wrapped_sess.observers["run_end_cli_run_numbers"])
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the TensorFlow runtime errors are picked up and in this case,
# they should be both None.
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunsWithEmptyStringDumpRootWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root="")
# run under debug mode.
wrapped_sess.run(self.inc_v)
self.assertAllClose(11.0, self.sess.run(self.v))
def testRunInfoOutputAtRunEndIsCorrect(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
tfdbg_logo = cli_shared.get_tfdbg_logo()
# The run_info output in the first run() call should contain the tfdbg logo.
self.assertEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
wrapped_sess.run(self.inc_v)
run_info_output = wrapped_sess._run_info_handler([])
# The run_info output in the second run() call should NOT contain the logo.
self.assertNotEqual(tfdbg_logo.lines,
run_info_output.lines[:len(tfdbg_logo.lines)])
menu = run_info_output.annotations[debugger_cli_common.MAIN_MENU_KEY]
self.assertIn("list_tensors", menu.captions())
def testRunsUnderNonDebugMode(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run", "-n"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
def testRunningWithSparsePlaceholderFeedWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
sparse_feed = ([[0, 1], [0, 2]], [10.0, 20.0])
sparse_result = wrapped_sess.run(
self.sparse_add, feed_dict={self.sparse_ph: sparse_feed})
self.assertAllEqual([[0, 1], [0, 2]], sparse_result.indices)
self.assertAllClose([20.0, 40.0], sparse_result.values)
def testRunsUnderNonDebugThenDebugMode(self):
# Do two NON_DEBUG_RUNs, followed by DEBUG_RUNs.
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-n"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1, 2, 3],
wrapped_sess.observers["run_start_cli_run_numbers"])
# Here, the CLI should have been launched only under the third run,
# because the first and second runs are NON_DEBUG.
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesWithinLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run three times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(13.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None], wrapped_sess.observers["tf_errors"])
def testRunMultipleTimesOverLimit(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-t", "3"]], self.sess, dump_root=self._tmp_dir)
# run twice, which is less than the number of times specified by the
# command.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(12.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(0, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([], wrapped_sess.observers["tf_errors"])
def testRunMixingDebugModeAndMultpleTimes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-n"], ["run", "-t", "2"], ["run"], ["run"]],
self.sess, dump_root=self._tmp_dir)
# run four times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(14.0, self.sess.run(self.v))
self.assertEqual([1, 2],
wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([3, 4], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testDebuggingMakeCallableTensorRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.VariableV1(42)
tensor_runner = wrapped_sess.make_callable(v)
self.sess.run(v.initializer)
self.assertAllClose(42, tensor_runner())
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingMakeCallableTensorRunnerWithCustomRunOptionsWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
a = constant_op.constant(42)
tensor_runner = wrapped_sess.make_callable(a)
run_options = config_pb2.RunOptions(
trace_level=config_pb2.RunOptions.FULL_TRACE)
run_metadata = config_pb2.RunMetadata()
self.assertAllClose(
42, tensor_runner(options=run_options, run_metadata=run_metadata))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertGreater(len(run_metadata.step_stats.dev_stats), 0)
def testDebuggingMakeCallableOperationRunnerWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
v = variables.VariableV1(10.0)
inc_v = state_ops.assign_add(v, 1.0)
op_runner = wrapped_sess.make_callable(inc_v.op)
self.sess.run(v.initializer)
op_runner()
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual(11.0, self.sess.run(v))
def testDebuggingMakeCallableRunnerWithFeedListWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
ph1 = array_ops.placeholder(dtypes.float32)
ph2 = array_ops.placeholder(dtypes.float32)
a = math_ops.add(ph1, ph2)
tensor_runner = wrapped_sess.make_callable(a, feed_list=[ph1, ph2])
self.assertAllClose(42.0, tensor_runner(41.0, 1.0))
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingMakeCallableFromOptionsWithZeroFeedWorks(self):
variable_1 = variables.VariableV1(
10.5, dtype=dtypes.float32, name="variable_1")
a = math_ops.add(variable_1, variable_1, "callable_a")
math_ops.add(a, a, "callable_b")
self.sess.run(variable_1.initializer)
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
for _ in range(2):
callable_output = sess_callable()
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertItemsEqual(
["callable_a", "callable_b", "variable_1", "variable_1/read"],
node_names)
def testDebuggingMakeCallableFromOptionsWithOneFeedWorks(self):
ph1 = array_ops.placeholder(dtypes.float32, name="callable_ph1")
a = math_ops.add(ph1, ph1, "callable_a")
math_ops.add(a, a, "callable_b")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.feed.append("callable_ph1")
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
ph1_value = np.array([10.5, -10.5], dtype=np.float32)
for _ in range(2):
callable_output = sess_callable(ph1_value)
self.assertAllClose(
np.array([42.0, -42.0], dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn("callable_a", node_names)
self.assertIn("callable_b", node_names)
def testDebuggingMakeCallableFromOptionsWithTwoFeedsWorks(self):
ph1 = array_ops.placeholder(dtypes.float32, name="callable_ph1")
ph2 = array_ops.placeholder(dtypes.float32, name="callable_ph2")
a = math_ops.add(ph1, ph2, "callable_a")
math_ops.add(a, a, "callable_b")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]] * 3, self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.feed.append("callable_ph1")
callable_options.feed.append("callable_ph2")
callable_options.fetch.append("callable_b")
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
ph1_value = np.array(5.0, dtype=np.float32)
ph2_value = np.array(16.0, dtype=np.float32)
for _ in range(2):
callable_output = sess_callable(ph1_value, ph2_value)
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
for debug_dump in debug_dumps:
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn("callable_a", node_names)
self.assertIn("callable_b", node_names)
def testDebugMakeCallableFromOptionsWithCustomOptionsAndMetadataWorks(self):
variable_1 = variables.VariableV1(
10.5, dtype=dtypes.float32, name="variable_1")
a = math_ops.add(variable_1, variable_1, "callable_a")
math_ops.add(a, a, "callable_b")
self.sess.run(variable_1.initializer)
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
callable_options = config_pb2.CallableOptions()
callable_options.fetch.append("callable_b")
callable_options.run_options.trace_level = config_pb2.RunOptions.FULL_TRACE
sess_callable = wrapped_sess._make_callable_from_options(callable_options)
run_metadata = config_pb2.RunMetadata()
# Call the callable with a custom run_metadata.
callable_output = sess_callable(run_metadata=run_metadata)
# Verify that step_stats is populated in the custom run_metadata.
self.assertTrue(run_metadata.step_stats)
self.assertAllClose(np.array(42.0, dtype=np.float32), callable_output[0])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(1, len(debug_dumps))
debug_dump = debug_dumps[0]
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertItemsEqual(
["callable_a", "callable_b", "variable_1", "variable_1/read"],
node_names)
def testRuntimeErrorShouldBeCaught(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess, dump_root=self._tmp_dir)
# Do a run that should lead to an TensorFlow runtime error.
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0], [1.0], [2.0]]})
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
self.assertEqual([1], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
# Verify that the runtime error is caught by the wrapped session properly.
self.assertEqual(1, len(wrapped_sess.observers["tf_errors"]))
tf_error = wrapped_sess.observers["tf_errors"][0]
self.assertEqual("y", tf_error.op.name)
def testRuntimeErrorBeforeGraphExecutionIsRaised(self):
# Use an impossible device name to cause an error before graph execution.
with ops.device("/device:GPU:1337"):
w = variables.VariableV1([1.0] * 10, name="w")
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root=self._tmp_dir)
with self.assertRaisesRegexp(errors.OpError, r".*[Dd]evice.*1337.*"):
wrapped_sess.run(w)
def testRunTillFilterPassesShouldLaunchCLIAtCorrectRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "v_greater_than_twelve"],
["run", "-f", "v_greater_than_twelve"],
["run"]],
self.sess,
dump_root=self._tmp_dir)
def v_greater_than_twelve(datum, tensor):
return datum.node_name == "v" and tensor > 12.0
# Verify that adding the same tensor filter more than once is tolerated
# (i.e., as if it were added only once).
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(15.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# run-end CLI should NOT have been launched for run #2 and #3, because only
# starting from run #4 v becomes greater than 12.0.
self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"])
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
self.assertEqual([None, None], wrapped_sess.observers["tf_errors"])
def testRunTillFilterPassesWithExcludeOpNames(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "greater_than_twelve",
"--filter_exclude_node_names", "inc_v.*"],
["run"], ["run"]],
self.sess,
dump_root=self._tmp_dir)
def greater_than_twelve(datum, tensor):
del datum # Unused.
return tensor > 12.0
# Verify that adding the same tensor filter more than once is tolerated
# (i.e., as if it were added only once).
wrapped_sess.add_tensor_filter("greater_than_twelve", greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(14.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# Due to the --filter_exclude_op_names flag, the run-end CLI should show up
# not after run 3, but after run 4.
self.assertEqual([4], wrapped_sess.observers["run_end_cli_run_numbers"])
def testRunTillFilterPassesWorksInConjunctionWithOtherNodeNameFilter(self):
"""Test that --.*_filter flags work in conjunction with -f.
In other words, test that you can use a tensor filter on a subset of
the tensors.
"""
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-f", "v_greater_than_twelve", "--node_name_filter", "v$"],
["run", "-f", "v_greater_than_twelve", "--node_name_filter", "v$"],
["run"]],
self.sess,
dump_root=self._tmp_dir)
def v_greater_than_twelve(datum, tensor):
return datum.node_name == "v" and tensor > 12.0
wrapped_sess.add_tensor_filter("v_greater_than_twelve",
v_greater_than_twelve)
# run five times.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
self.assertAllClose(15.0, self.sess.run(self.v))
self.assertEqual([1], wrapped_sess.observers["run_start_cli_run_numbers"])
# run-end CLI should NOT have been launched for run #2 and #3, because only
# starting from run #4 v becomes greater than 12.0.
self.assertEqual([4, 5], wrapped_sess.observers["run_end_cli_run_numbers"])
debug_dumps = wrapped_sess.observers["debug_dumps"]
self.assertEqual(2, len(debug_dumps))
self.assertEqual(1, len(debug_dumps[0].dumped_tensor_data))
self.assertEqual("v:0", debug_dumps[0].dumped_tensor_data[0].tensor_name)
self.assertEqual(1, len(debug_dumps[1].dumped_tensor_data))
self.assertEqual("v:0", debug_dumps[1].dumped_tensor_data[0].tensor_name)
def testRunsUnderDebugModeWithWatchFnFilteringNodeNames(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "inc.*"],
["run", "--node_name_filter", "delta"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringOpTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--node_name_filter", "delta"],
["run", "--op_type_filter", "AssignAdd"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.inc_v)
wrapped_sess.run(self.inc_v)
# Verify that the assign_add op did take effect.
self.assertAllClose(12.0, self.sess.run(self.v))
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("delta", dumps.dumped_tensor_data[0].node_name)
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(1, dumps.size)
self.assertEqual("inc_v", dumps.dumped_tensor_data[0].node_name)
def testRunsUnderDebugModeWithWatchFnFilteringTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Variable.*"],
["run", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(2, dumps.size)
self.assertItemsEqual(
["v", "w"], [dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
dumps = wrapped_sess.observers["debug_dumps"][1]
self.assertEqual(2, dumps.size)
self.assertEqual(
["w_int_inner", "w_int_outer"],
[dumps.dumped_tensor_data[i].node_name for i in [0, 1]])
def testRunsUnderDebugModeWithWatchFnFilteringOpTypesAndTensorDTypes(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "--op_type_filter", "Cast", "--tensor_dtype_filter", "int32"],
["run"]],
self.sess, dump_root=self._tmp_dir)
# run under debug mode twice.
wrapped_sess.run(self.w_int)
# Verify that the dumps have been generated and picked up during run-end.
self.assertEqual(1, len(wrapped_sess.observers["debug_dumps"]))
dumps = wrapped_sess.observers["debug_dumps"][0]
self.assertEqual(1, dumps.size)
self.assertEqual("w_int_inner", dumps.dumped_tensor_data[0].node_name)
def testPrintFeedPrintsFeedValueForTensorFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={self.ph: [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsFeedValueForTensorNameFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "ph:0"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["Tensor \"ph:0 (feed)\":", "", "[[0.0, 1.0, 2.0]]"],
print_feed_responses[0].lines)
def testPrintFeedPrintsErrorForInvalidFeedKey(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
self.assertAllClose(
[[5.0], [-1.0]],
wrapped_sess.run(self.y, feed_dict={"ph:0": [[0.0, 1.0, 2.0]]}))
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run does not contain the key "
"spam"], print_feed_responses[0].lines)
def testPrintFeedPrintsErrorWhenFeedDictIsNone(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["print_feed", "spam"], ["run"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
print_feed_responses = wrapped_sess.observers["print_feed_responses"]
self.assertEqual(1, len(print_feed_responses))
self.assertEqual(
["ERROR: The feed_dict of the current run is None or empty."],
print_feed_responses[0].lines)
def testRunUnderProfilerModeWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-p"], ["run"]], self.sess)
wrapped_sess.run(self.w_int)
self.assertEqual(1, len(wrapped_sess.observers["profiler_run_metadata"]))
self.assertTrue(
wrapped_sess.observers["profiler_run_metadata"][0].step_stats)
self.assertEqual(1, len(wrapped_sess.observers["profiler_py_graphs"]))
self.assertIsInstance(
wrapped_sess.observers["profiler_py_graphs"][0], ops.Graph)
def testCallingHookDelBeforeAnyRun(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess)
del wrapped_sess
def testCallingShouldStopMethodOnNonWrappedNonMonitoredSessionErrors(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], self.sess)
with self.assertRaisesRegexp(
ValueError,
r"The wrapped session .* does not have a method .*should_stop.*"):
wrapped_sess.should_stop()
def testLocalCLIDebugWrapperSessionWorksOnMonitoredSession(self):
monitored_sess = monitored_session.MonitoredSession()
wrapped_monitored_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"]], monitored_sess)
self.assertFalse(wrapped_monitored_sess.should_stop())
def testRunsWithEmptyFetchWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root="")
run_output = wrapped_sess.run([])
self.assertEqual([], run_output)
def testDebuggingKerasFitWithSkippedRunsWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"], ["run"], ["run", "-t", "10"]], self.sess)
backend.set_session(wrapped_sess)
model = sequential.Sequential()
model.add(core.Dense(4, input_shape=[2], activation="relu"))
model.add(core.Dense(1))
model.compile(loss="mse", optimizer="sgd")
x = np.zeros([8, 2])
y = np.zeros([8, 1])
model.fit(x, y, epochs=2)
self.assertEqual(2, len(wrapped_sess.observers["debug_dumps"]))
def testDebuggingKerasFitWithProfilingWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run", "-p"]] * 10, self.sess)
backend.set_session(wrapped_sess)
model = sequential.Sequential()
model.add(core.Dense(4, input_shape=[2], activation="relu"))
model.add(core.Dense(1))
model.compile(loss="mse", optimizer="sgd")
x = np.zeros([8, 2])
y = np.zeros([8, 1])
model.fit(x, y, epochs=2)
self.assertEqual(0, len(wrapped_sess.observers["debug_dumps"]))
def testRunsWithEmptyNestedFetchWorks(self):
wrapped_sess = LocalCLIDebuggerWrapperSessionForTest(
[["run"]], self.sess, dump_root="")
run_output = wrapped_sess.run({"foo": {"baz": []}, "bar": ()})
self.assertEqual({"foo": {"baz": []}, "bar": ()}, run_output)
def testSessionRunHook(self):
a = array_ops.placeholder(dtypes.float32, [10])
b = a + 1
c = b * 2
class Hook(session_run_hook.SessionRunHook):
def before_run(self, _):
return session_run_hook.SessionRunArgs(fetches=c)
class Hook2(session_run_hook.SessionRunHook):
def before_run(self, _):
return session_run_hook.SessionRunArgs(fetches=b)
sess = session.Session()
sess = LocalCLIDebuggerWrapperSessionForTest([["run"], ["run"]], sess)
class SessionCreator(object):
def create_session(self):
return sess
final_sess = monitored_session.MonitoredSession(
session_creator=SessionCreator(), hooks=[Hook(), Hook2()])
final_sess.run(b, feed_dict={a: np.arange(10)})
debug_dumps = sess.observers["debug_dumps"]
self.assertEqual(1, len(debug_dumps))
debug_dump = debug_dumps[0]
node_names = [datum.node_name for datum in debug_dump.dumped_tensor_data]
self.assertIn(b.op.name, node_names)
if __name__ == "__main__":
googletest.main()
| 39.837719 | 88 | 0.708521 |
bf228d20b05bed3a515a9f21935949fb393a5927 | 1,460 | py | Python | google/cloud/webrisk_v1beta1/__init__.py | vam-google/python-webrisk | f9bd38d68caada7cd7bc670fe4c4c04050605b87 | [
"Apache-2.0"
] | null | null | null | google/cloud/webrisk_v1beta1/__init__.py | vam-google/python-webrisk | f9bd38d68caada7cd7bc670fe4c4c04050605b87 | [
"Apache-2.0"
] | 40 | 2019-07-16T10:04:48.000Z | 2020-01-20T09:04:59.000Z | google/cloud/webrisk_v1beta1/__init__.py | vam-google/python-webrisk | f9bd38d68caada7cd7bc670fe4c4c04050605b87 | [
"Apache-2.0"
] | 2 | 2019-07-18T00:05:31.000Z | 2019-11-27T14:17:22.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
import warnings
from google.cloud.webrisk_v1beta1 import types
from google.cloud.webrisk_v1beta1.gapic import enums
from google.cloud.webrisk_v1beta1.gapic import web_risk_service_v1_beta1_client
if sys.version_info[:2] == (2, 7):
message = (
"A future version of this library will drop support for Python 2.7."
"More details about Python 2 support for Google Cloud Client Libraries"
"can be found at https://cloud.google.com/python/docs/python2-sunset/"
)
warnings.warn(message, DeprecationWarning)
class WebRiskServiceV1Beta1Client(
web_risk_service_v1_beta1_client.WebRiskServiceV1Beta1Client
):
__doc__ = web_risk_service_v1_beta1_client.WebRiskServiceV1Beta1Client.__doc__
enums = enums
__all__ = ("enums", "types", "WebRiskServiceV1Beta1Client")
| 33.181818 | 82 | 0.763699 |
36be986c82539173ba045efc800d935224c849f0 | 491 | py | Python | blawx/ldap.py | Lexpedite/blawx | 782313219687a22cb1b1f941d101b6965cc8d7c0 | [
"MIT"
] | 9 | 2022-02-03T10:44:07.000Z | 2022-03-21T04:11:27.000Z | blawx/ldap.py | Lexpedite/blawx | 782313219687a22cb1b1f941d101b6965cc8d7c0 | [
"MIT"
] | 43 | 2022-02-01T20:03:11.000Z | 2022-03-29T20:46:13.000Z | blawx/ldap.py | Lexpedite/blawx | 782313219687a22cb1b1f941d101b6965cc8d7c0 | [
"MIT"
] | 1 | 2022-02-02T21:20:25.000Z | 2022-02-02T21:20:25.000Z | ldap_code = """
#pred overrules(R1,R2) :: 'the conclusion in @(R1) overrules the conclusion in @(R2)'.
#pred opposes(C1,C2) :: 'the conclusion @(C1) opposes the conclusion @(C2)'.
#pred defeated(R,_) :: 'the conclusion in @(R) is defeated'.
#pred refuted(R,_) :: 'the conclusion in @(R) is refuted'.
refuted(R,C) :-
opposes(C,OC),
overrules(OR,R),
according_to(OR,OC).
defeated(R,C) :-
refuted(R,C).
legally_holds(R,C) :-
according_to(R,C),
not defeated(R,C).
""" | 27.277778 | 86 | 0.625255 |
f2995a883a88b6808765091fc1fd4bdd671b1f77 | 8,273 | py | Python | autopandas_v2/generators/ml/networks/ggnn/ops/select.py | chyanju/autopandas | 16080ad12f0e8e7b0a614671aea1ed57b3fed7fe | [
"BSD-3-Clause"
] | 16 | 2019-08-13T02:49:44.000Z | 2022-02-08T03:14:34.000Z | autopandas_v2/generators/ml/networks/ggnn/ops/select.py | chyanju/autopandas | 16080ad12f0e8e7b0a614671aea1ed57b3fed7fe | [
"BSD-3-Clause"
] | 2 | 2020-09-25T22:40:40.000Z | 2022-02-09T23:42:53.000Z | autopandas_v2/generators/ml/networks/ggnn/ops/select.py | chyanju/autopandas | 16080ad12f0e8e7b0a614671aea1ed57b3fed7fe | [
"BSD-3-Clause"
] | 3 | 2021-07-06T10:30:36.000Z | 2022-01-11T23:21:31.000Z | import collections
import pickle
from typing import Dict
import numpy as np
import tensorflow as tf
from autopandas_v2.generators.ml.networks.ggnn import utils
from autopandas_v2.generators.ml.networks.ggnn.base import BaseSmartGenGGNN
class ModelSelect(BaseSmartGenGGNN):
def process_raw_graph(self, graph):
processed = super().process_raw_graph(graph)
processed['candidates'] = graph['candidates']
processed['selected_one_hot'] = [int(i == graph.get('selected', -1)) for i in graph['candidates']]
return processed
def define_placeholders(self):
super().define_placeholders()
self.placeholders['candidates'] = tf.placeholder(tf.int32, [None], name='candidates')
self.placeholders['candidates_true'] = tf.placeholder(tf.int32, [None], name='candidates_true')
self.placeholders['cand_graph_nodes_list'] = tf.placeholder(tf.int32, [None], name='candidates')
self.placeholders['select_targets'] = tf.placeholder(tf.int64, [None], name='select_targets')
def per_graph_custom_minibatch_iterator(self, graph_num: int, graph: Dict, node_offset: int) -> Dict:
return {
'candidates': np.array(graph['candidates']) + node_offset,
'candidates_true': np.array(graph['candidates']),
'select_targets': np.array(graph['selected_one_hot']),
'cand_graph_nodes_list': np.full([len(graph['candidates'])], fill_value=graph_num)
}
def final_classifier(self):
# By default, a simple MLP with one hidden layer
return utils.MLP(self.params['hidden_size_node'] * 3, 1,
[self.params['hidden_size_final_mlp']],
self.placeholders['out_layer_dropout_keep_prob'])
def compute_candidate_softmax_and_loss(self, cand_logits):
# The following is softmax-ing over the candidates per graph.
# As the number of candidates varies, we can't just use tf.softmax.
# We implement it with the logsumexp trick:
# Step (1): Obtain shift constant as max of the logits
max_per_graph = tf.unsorted_segment_max(
data=cand_logits,
segment_ids=self.placeholders['cand_graph_nodes_list'],
num_segments=self.placeholders['num_graphs_in_batch']
) # Shape [G]
# # Step (2): Distribute max out to the corresponding logits again, and shift scores:
max_per_cand = tf.gather(params=max_per_graph,
indices=self.placeholders['cand_graph_nodes_list'])
cand_logits_shifted = cand_logits - max_per_cand
# # Step (3): Exp, sum up per target, compute exp(score) / exp(sum) as softmax:
scores_exped = tf.exp(cand_logits_shifted)
scores_sum_per_graph = tf.unsorted_segment_sum(
data=scores_exped,
segment_ids=self.placeholders['cand_graph_nodes_list'],
num_segments=self.placeholders['num_graphs_in_batch']
) # Shape [G]
scores_sum_per_cand = tf.gather(
params=scores_sum_per_graph,
indices=self.placeholders['cand_graph_nodes_list']
)
self.ops['softmax_values'] = scores_exped / (scores_sum_per_cand + utils.SMALL_NUMBER)
self.ops['log_softmax_values'] = cand_logits_shifted - tf.log(scores_sum_per_cand + utils.SMALL_NUMBER)
labels = self.placeholders['select_targets']
flat_loss_values = -tf.cast(labels, "float32") * self.ops['log_softmax_values']
losses = tf.unsorted_segment_sum(
data=flat_loss_values,
segment_ids=self.placeholders['cand_graph_nodes_list'],
num_segments=self.placeholders['num_graphs_in_batch']
)
self.ops['loss'] = tf.reduce_mean(losses)
flat_correct_prediction = tf.cast(tf.equal(cand_logits, max_per_cand), "int64") * self.placeholders[
'select_targets']
correct_prediction = tf.unsorted_segment_max(
data=tf.cast(flat_correct_prediction, "float32"),
segment_ids=self.placeholders['cand_graph_nodes_list'],
num_segments=self.placeholders['num_graphs_in_batch']
)
self.ops['accuracy_task'] = tf.reduce_mean(correct_prediction)
def make_model(self, mode):
super().make_model(mode)
with tf.variable_scope("out_layer"):
self.ops['final_classifier'] = self.final_classifier()
candidates_repr = tf.gather(params=self.ops['final_node_representations'],
indices=self.placeholders['candidates'])
candidates_pooled = tf.unsorted_segment_sum(
data=candidates_repr,
segment_ids=self.placeholders['cand_graph_nodes_list'],
num_segments=self.placeholders['num_graphs_in_batch']
)
pooled_representations = self.prepare_pooled_node_representations()
graph_pooled = pooled_representations - candidates_pooled
graph_pooled_copies = tf.gather(params=graph_pooled,
indices=self.placeholders['graph_nodes_list'])
cand_pooled_copies = tf.gather(params=candidates_pooled,
indices=self.placeholders['graph_nodes_list'])
candidates_graph = tf.gather(params=graph_pooled_copies, indices=self.placeholders['candidates'])
candidates_pooled = tf.gather(params=cand_pooled_copies, indices=self.placeholders['candidates'])
candidates_concat = tf.concat([candidates_repr, candidates_graph, candidates_pooled], -1)
candidates_logits = tf.reshape(self.ops['final_classifier'](candidates_concat), [-1])
self.compute_candidate_softmax_and_loss(candidates_logits)
def save_interface(self, path: str):
from autopandas_v2.generators.ml.networks.ggnn.interfaces import ModelSelectInterface
interface = ModelSelectInterface()
with open(path, 'wb') as f:
pickle.dump(interface, f)
def infer(self, raw_graph_data, **kwargs):
graphs = [self.process_raw_graph(g) for g in raw_graph_data]
batch_iterator = utils.ThreadedIterator(self.make_minibatch_iterator(graphs, is_training=False),
max_queue_size=50)
preds = []
for step, batch_data in enumerate(batch_iterator):
batch_data[self.placeholders['out_layer_dropout_keep_prob']] = 1.0
fetch_list = [self.ops['softmax_values'],
self.placeholders['cand_graph_nodes_list'],
self.placeholders['candidates_true']]
result = self.sess.run(fetch_list, feed_dict=batch_data)
groupings = collections.defaultdict(list)
for probs, graph_id, candidate in zip(*result):
groupings[graph_id].append((probs, candidate))
preds += list(groupings.values())
return preds
def init_analysis(self):
super().init_analysis()
self.correct_indices = []
def finish_analysis(self):
result = super().finish_analysis()
top_k = self.params.args.top_k
for k in range(1, top_k + 1):
acc = sum(1 for i in self.correct_indices if i < k) / self.num_graphs
print("Top-{} Accuracy : {}".format(k, acc))
result['Top-{}'.format(k)] = acc
return result
def get_fetch_list(self):
return super().get_fetch_list() + [self.ops['softmax_values'],
self.placeholders['cand_graph_nodes_list'],
self.placeholders['select_targets']]
def analyze_result(self, num_graphs, result):
super().analyze_result(num_graphs, result)
groupings = collections.defaultdict(list)
for probs, graph_id, correct in zip(*result[1:]):
groupings[graph_id].append((probs, correct))
for graph_id, preds in groupings.items():
preds = sorted(preds, key=lambda x: -x[0])
for idx, (prob, correct) in enumerate(preds):
if correct:
self.correct_indices.append(idx)
break
| 45.961111 | 111 | 0.640396 |
651160cc70823de7281a8db8d0493b4036753468 | 39,530 | py | Python | myvenv/lib/python3.5/site-packages/_pytest/pytester.py | tuvapp/tuvappcom | 5ca2be19f4b0c86a1d4a9553711a4da9d3f32841 | [
"MIT"
] | 4 | 2020-10-31T19:52:05.000Z | 2021-09-22T11:39:27.000Z | myvenv/lib/python3.5/site-packages/_pytest/pytester.py | tuvapp/tuvappcom | 5ca2be19f4b0c86a1d4a9553711a4da9d3f32841 | [
"MIT"
] | 3 | 2017-12-16T08:23:18.000Z | 2018-01-24T23:23:39.000Z | myvenv/lib/python3.5/site-packages/_pytest/pytester.py | tuvapp/tuvappcom | 5ca2be19f4b0c86a1d4a9553711a4da9d3f32841 | [
"MIT"
] | 10 | 2017-12-15T13:48:41.000Z | 2021-01-24T10:31:22.000Z | """ (disabled by default) support for testing pytest and pytest plugins. """
import codecs
import gc
import os
import platform
import re
import subprocess
import sys
import time
import traceback
from fnmatch import fnmatch
from py.builtin import print_
from _pytest._code import Source
import py
import pytest
from _pytest.main import Session, EXIT_OK
from _pytest.assertion.rewrite import AssertionRewritingHook
def pytest_addoption(parser):
# group = parser.getgroup("pytester", "pytester (self-tests) options")
parser.addoption('--lsof',
action="store_true", dest="lsof", default=False,
help=("run FD checks if lsof is available"))
parser.addoption('--runpytest', default="inprocess", dest="runpytest",
choices=("inprocess", "subprocess", ),
help=("run pytest sub runs in tests using an 'inprocess' "
"or 'subprocess' (python -m main) method"))
def pytest_configure(config):
# This might be called multiple times. Only take the first.
global _pytest_fullpath
try:
_pytest_fullpath
except NameError:
_pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
_pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
if config.getvalue("lsof"):
checker = LsofFdLeakChecker()
if checker.matching_platform():
config.pluginmanager.register(checker)
class LsofFdLeakChecker(object):
def get_open_files(self):
out = self._exec_lsof()
open_files = self._parse_lsof_output(out)
return open_files
def _exec_lsof(self):
pid = os.getpid()
return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
def _parse_lsof_output(self, out):
def isopen(line):
return line.startswith('f') and ("deleted" not in line and
'mem' not in line and "txt" not in line and 'cwd' not in line)
open_files = []
for line in out.split("\n"):
if isopen(line):
fields = line.split('\0')
fd = fields[0][1:]
filename = fields[1][1:]
if filename.startswith('/'):
open_files.append((fd, filename))
return open_files
def matching_platform(self):
try:
py.process.cmdexec("lsof -v")
except (py.process.cmdexec.Error, UnicodeDecodeError):
# cmdexec may raise UnicodeDecodeError on Windows systems
# with locale other than english:
# https://bitbucket.org/pytest-dev/py/issues/66
return False
else:
return True
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_runtest_item(self, item):
lines1 = self.get_open_files()
yield
if hasattr(sys, "pypy_version_info"):
gc.collect()
lines2 = self.get_open_files()
new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1])
leaked_files = [t for t in lines2 if t[0] in new_fds]
if leaked_files:
error = []
error.append("***** %s FD leakage detected" % len(leaked_files))
error.extend([str(f) for f in leaked_files])
error.append("*** Before:")
error.extend([str(f) for f in lines1])
error.append("*** After:")
error.extend([str(f) for f in lines2])
error.append(error[0])
error.append("*** function %s:%s: %s " % item.location)
pytest.fail("\n".join(error), pytrace=False)
# XXX copied from execnet's conftest.py - needs to be merged
winpymap = {
'python2.7': r'C:\Python27\python.exe',
'python2.6': r'C:\Python26\python.exe',
'python3.1': r'C:\Python31\python.exe',
'python3.2': r'C:\Python32\python.exe',
'python3.3': r'C:\Python33\python.exe',
'python3.4': r'C:\Python34\python.exe',
'python3.5': r'C:\Python35\python.exe',
}
def getexecutable(name, cache={}):
try:
return cache[name]
except KeyError:
executable = py.path.local.sysfind(name)
if executable:
import subprocess
popen = subprocess.Popen([str(executable), "--version"],
universal_newlines=True, stderr=subprocess.PIPE)
out, err = popen.communicate()
if name == "jython":
if not err or "2.5" not in err:
executable = None
if "2.5.2" in err:
executable = None # http://bugs.jython.org/issue1790
elif popen.returncode != 0:
# Handle pyenv's 127.
executable = None
cache[name] = executable
return executable
@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4",
'pypy', 'pypy3'])
def anypython(request):
name = request.param
executable = getexecutable(name)
if executable is None:
if sys.platform == "win32":
executable = winpymap.get(name, None)
if executable:
executable = py.path.local(executable)
if executable.check():
return executable
pytest.skip("no suitable %s found" % (name,))
return executable
# used at least by pytest-xdist plugin
@pytest.fixture
def _pytest(request):
""" Return a helper which offers a gethookrecorder(hook)
method which returns a HookRecorder instance which helps
to make assertions about called hooks.
"""
return PytestArg(request)
class PytestArg:
def __init__(self, request):
self.request = request
def gethookrecorder(self, hook):
hookrecorder = HookRecorder(hook._pm)
self.request.addfinalizer(hookrecorder.finish_recording)
return hookrecorder
def get_public_names(l):
"""Only return names from iterator l without a leading underscore."""
return [x for x in l if x[0] != "_"]
class ParsedCall:
def __init__(self, name, kwargs):
self.__dict__.update(kwargs)
self._name = name
def __repr__(self):
d = self.__dict__.copy()
del d['_name']
return "<ParsedCall %r(**%r)>" %(self._name, d)
class HookRecorder:
"""Record all hooks called in a plugin manager.
This wraps all the hook calls in the plugin manager, recording
each call before propagating the normal calls.
"""
def __init__(self, pluginmanager):
self._pluginmanager = pluginmanager
self.calls = []
def before(hook_name, hook_impls, kwargs):
self.calls.append(ParsedCall(hook_name, kwargs))
def after(outcome, hook_name, hook_impls, kwargs):
pass
self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
def finish_recording(self):
self._undo_wrapping()
def getcalls(self, names):
if isinstance(names, str):
names = names.split()
return [call for call in self.calls if call._name in names]
def assert_contains(self, entries):
__tracebackhide__ = True
i = 0
entries = list(entries)
backlocals = sys._getframe(1).f_locals
while entries:
name, check = entries.pop(0)
for ind, call in enumerate(self.calls[i:]):
if call._name == name:
print_("NAMEMATCH", name, call)
if eval(check, backlocals, call.__dict__):
print_("CHECKERMATCH", repr(check), "->", call)
else:
print_("NOCHECKERMATCH", repr(check), "-", call)
continue
i += ind + 1
break
print_("NONAMEMATCH", name, "with", call)
else:
pytest.fail("could not find %r check %r" % (name, check))
def popcall(self, name):
__tracebackhide__ = True
for i, call in enumerate(self.calls):
if call._name == name:
del self.calls[i]
return call
lines = ["could not find call %r, in:" % (name,)]
lines.extend([" %s" % str(x) for x in self.calls])
pytest.fail("\n".join(lines))
def getcall(self, name):
l = self.getcalls(name)
assert len(l) == 1, (name, l)
return l[0]
# functionality for test reports
def getreports(self,
names="pytest_runtest_logreport pytest_collectreport"):
return [x.report for x in self.getcalls(names)]
def matchreport(self, inamepart="",
names="pytest_runtest_logreport pytest_collectreport", when=None):
""" return a testreport whose dotted import path matches """
l = []
for rep in self.getreports(names=names):
try:
if not when and rep.when != "call" and rep.passed:
# setup/teardown passing reports - let's ignore those
continue
except AttributeError:
pass
if when and getattr(rep, 'when', None) != when:
continue
if not inamepart or inamepart in rep.nodeid.split("::"):
l.append(rep)
if not l:
raise ValueError("could not find test report matching %r: "
"no test reports at all!" % (inamepart,))
if len(l) > 1:
raise ValueError(
"found 2 or more testreports matching %r: %s" %(inamepart, l))
return l[0]
def getfailures(self,
names='pytest_runtest_logreport pytest_collectreport'):
return [rep for rep in self.getreports(names) if rep.failed]
def getfailedcollections(self):
return self.getfailures('pytest_collectreport')
def listoutcomes(self):
passed = []
skipped = []
failed = []
for rep in self.getreports(
"pytest_collectreport pytest_runtest_logreport"):
if rep.passed:
if getattr(rep, "when", None) == "call":
passed.append(rep)
elif rep.skipped:
skipped.append(rep)
elif rep.failed:
failed.append(rep)
return passed, skipped, failed
def countoutcomes(self):
return [len(x) for x in self.listoutcomes()]
def assertoutcome(self, passed=0, skipped=0, failed=0):
realpassed, realskipped, realfailed = self.listoutcomes()
assert passed == len(realpassed)
assert skipped == len(realskipped)
assert failed == len(realfailed)
def clear(self):
self.calls[:] = []
@pytest.fixture
def linecomp(request):
return LineComp()
@pytest.fixture(name='LineMatcher')
def LineMatcher_fixture(request):
return LineMatcher
@pytest.fixture
def testdir(request, tmpdir_factory):
return Testdir(request, tmpdir_factory)
rex_outcome = re.compile("(\d+) ([\w-]+)")
class RunResult:
"""The result of running a command.
Attributes:
:ret: The return value.
:outlines: List of lines captured from stdout.
:errlines: List of lines captures from stderr.
:stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
reconstruct stdout or the commonly used
``stdout.fnmatch_lines()`` method.
:stderrr: :py:class:`LineMatcher` of stderr.
:duration: Duration in seconds.
"""
def __init__(self, ret, outlines, errlines, duration):
self.ret = ret
self.outlines = outlines
self.errlines = errlines
self.stdout = LineMatcher(outlines)
self.stderr = LineMatcher(errlines)
self.duration = duration
def parseoutcomes(self):
""" Return a dictionary of outcomestring->num from parsing
the terminal output that the test process produced."""
for line in reversed(self.outlines):
if 'seconds' in line:
outcomes = rex_outcome.findall(line)
if outcomes:
d = {}
for num, cat in outcomes:
d[cat] = int(num)
return d
def assert_outcomes(self, passed=0, skipped=0, failed=0):
""" assert that the specified outcomes appear with the respective
numbers (0 means it didn't occur) in the text output from a test run."""
d = self.parseoutcomes()
assert passed == d.get("passed", 0)
assert skipped == d.get("skipped", 0)
assert failed == d.get("failed", 0)
class Testdir:
"""Temporary test directory with tools to test/run pytest itself.
This is based on the ``tmpdir`` fixture but provides a number of
methods which aid with testing pytest itself. Unless
:py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as
current working directory.
Attributes:
:tmpdir: The :py:class:`py.path.local` instance of the temporary
directory.
:plugins: A list of plugins to use with :py:meth:`parseconfig` and
:py:meth:`runpytest`. Initially this is an empty list but
plugins can be added to the list. The type of items to add to
the list depend on the method which uses them so refer to them
for details.
"""
def __init__(self, request, tmpdir_factory):
self.request = request
# XXX remove duplication with tmpdir plugin
basetmp = tmpdir_factory.ensuretemp("testdir")
name = request.function.__name__
for i in range(100):
try:
tmpdir = basetmp.mkdir(name + str(i))
except py.error.EEXIST:
continue
break
self.tmpdir = tmpdir
self.plugins = []
self._savesyspath = (list(sys.path), list(sys.meta_path))
self._savemodulekeys = set(sys.modules)
self.chdir() # always chdir
self.request.addfinalizer(self.finalize)
method = self.request.config.getoption("--runpytest")
if method == "inprocess":
self._runpytest_method = self.runpytest_inprocess
elif method == "subprocess":
self._runpytest_method = self.runpytest_subprocess
def __repr__(self):
return "<Testdir %r>" % (self.tmpdir,)
def finalize(self):
"""Clean up global state artifacts.
Some methods modify the global interpreter state and this
tries to clean this up. It does not remove the temporary
directory however so it can be looked at after the test run
has finished.
"""
sys.path[:], sys.meta_path[:] = self._savesyspath
if hasattr(self, '_olddir'):
self._olddir.chdir()
self.delete_loaded_modules()
def delete_loaded_modules(self):
"""Delete modules that have been loaded during a test.
This allows the interpreter to catch module changes in case
the module is re-imported.
"""
for name in set(sys.modules).difference(self._savemodulekeys):
# it seems zope.interfaces is keeping some state
# (used by twisted related tests)
if name != "zope.interface":
del sys.modules[name]
def make_hook_recorder(self, pluginmanager):
"""Create a new :py:class:`HookRecorder` for a PluginManager."""
assert not hasattr(pluginmanager, "reprec")
pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
self.request.addfinalizer(reprec.finish_recording)
return reprec
def chdir(self):
"""Cd into the temporary directory.
This is done automatically upon instantiation.
"""
old = self.tmpdir.chdir()
if not hasattr(self, '_olddir'):
self._olddir = old
def _makefile(self, ext, args, kwargs):
items = list(kwargs.items())
if args:
source = py.builtin._totext("\n").join(
map(py.builtin._totext, args)) + py.builtin._totext("\n")
basename = self.request.function.__name__
items.insert(0, (basename, source))
ret = None
for name, value in items:
p = self.tmpdir.join(name).new(ext=ext)
source = Source(value)
def my_totext(s, encoding="utf-8"):
if py.builtin._isbytes(s):
s = py.builtin._totext(s, encoding=encoding)
return s
source_unicode = "\n".join([my_totext(line) for line in source.lines])
source = py.builtin._totext(source_unicode)
content = source.strip().encode("utf-8") # + "\n"
#content = content.rstrip() + "\n"
p.write(content, "wb")
if ret is None:
ret = p
return ret
def makefile(self, ext, *args, **kwargs):
"""Create a new file in the testdir.
ext: The extension the file should use, including the dot.
E.g. ".py".
args: All args will be treated as strings and joined using
newlines. The result will be written as contents to the
file. The name of the file will be based on the test
function requesting this fixture.
E.g. "testdir.makefile('.txt', 'line1', 'line2')"
kwargs: Each keyword is the name of a file, while the value of
it will be written as contents of the file.
E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
"""
return self._makefile(ext, args, kwargs)
def makeconftest(self, source):
"""Write a contest.py file with 'source' as contents."""
return self.makepyfile(conftest=source)
def makeini(self, source):
"""Write a tox.ini file with 'source' as contents."""
return self.makefile('.ini', tox=source)
def getinicfg(self, source):
"""Return the pytest section from the tox.ini config file."""
p = self.makeini(source)
return py.iniconfig.IniConfig(p)['pytest']
def makepyfile(self, *args, **kwargs):
"""Shortcut for .makefile() with a .py extension."""
return self._makefile('.py', args, kwargs)
def maketxtfile(self, *args, **kwargs):
"""Shortcut for .makefile() with a .txt extension."""
return self._makefile('.txt', args, kwargs)
def syspathinsert(self, path=None):
"""Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
This is undone automatically after the test.
"""
if path is None:
path = self.tmpdir
sys.path.insert(0, str(path))
# a call to syspathinsert() usually means that the caller
# wants to import some dynamically created files.
# with python3 we thus invalidate import caches.
self._possibly_invalidate_import_caches()
def _possibly_invalidate_import_caches(self):
# invalidate caches if we can (py33 and above)
try:
import importlib
except ImportError:
pass
else:
if hasattr(importlib, "invalidate_caches"):
importlib.invalidate_caches()
def mkdir(self, name):
"""Create a new (sub)directory."""
return self.tmpdir.mkdir(name)
def mkpydir(self, name):
"""Create a new python package.
This creates a (sub)direcotry with an empty ``__init__.py``
file so that is recognised as a python package.
"""
p = self.mkdir(name)
p.ensure("__init__.py")
return p
Session = Session
def getnode(self, config, arg):
"""Return the collection node of a file.
:param config: :py:class:`_pytest.config.Config` instance, see
:py:meth:`parseconfig` and :py:meth:`parseconfigure` to
create the configuration.
:param arg: A :py:class:`py.path.local` instance of the file.
"""
session = Session(config)
assert '::' not in str(arg)
p = py.path.local(arg)
config.hook.pytest_sessionstart(session=session)
res = session.perform_collect([str(p)], genitems=False)[0]
config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
return res
def getpathnode(self, path):
"""Return the collection node of a file.
This is like :py:meth:`getnode` but uses
:py:meth:`parseconfigure` to create the (configured) pytest
Config instance.
:param path: A :py:class:`py.path.local` instance of the file.
"""
config = self.parseconfigure(path)
session = Session(config)
x = session.fspath.bestrelpath(path)
config.hook.pytest_sessionstart(session=session)
res = session.perform_collect([x], genitems=False)[0]
config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
return res
def genitems(self, colitems):
"""Generate all test items from a collection node.
This recurses into the collection node and returns a list of
all the test items contained within.
"""
session = colitems[0].session
result = []
for colitem in colitems:
result.extend(session.genitems(colitem))
return result
def runitem(self, source):
"""Run the "test_func" Item.
The calling test instance (the class which contains the test
method) must provide a ``.getrunner()`` method which should
return a runner which can run the test protocol for a single
item, like e.g. :py:func:`_pytest.runner.runtestprotocol`.
"""
# used from runner functional tests
item = self.getitem(source)
# the test class where we are called from wants to provide the runner
testclassinstance = self.request.instance
runner = testclassinstance.getrunner()
return runner(item)
def inline_runsource(self, source, *cmdlineargs):
"""Run a test module in process using ``pytest.main()``.
This run writes "source" into a temporary file and runs
``pytest.main()`` on it, returning a :py:class:`HookRecorder`
instance for the result.
:param source: The source code of the test module.
:param cmdlineargs: Any extra command line arguments to use.
:return: :py:class:`HookRecorder` instance of the result.
"""
p = self.makepyfile(source)
l = list(cmdlineargs) + [p]
return self.inline_run(*l)
def inline_genitems(self, *args):
"""Run ``pytest.main(['--collectonly'])`` in-process.
Retuns a tuple of the collected items and a
:py:class:`HookRecorder` instance.
This runs the :py:func:`pytest.main` function to run all of
pytest inside the test process itself like
:py:meth:`inline_run`. However the return value is a tuple of
the collection items and a :py:class:`HookRecorder` instance.
"""
rec = self.inline_run("--collect-only", *args)
items = [x.item for x in rec.getcalls("pytest_itemcollected")]
return items, rec
def inline_run(self, *args, **kwargs):
"""Run ``pytest.main()`` in-process, returning a HookRecorder.
This runs the :py:func:`pytest.main` function to run all of
pytest inside the test process itself. This means it can
return a :py:class:`HookRecorder` instance which gives more
detailed results from then run then can be done by matching
stdout/stderr from :py:meth:`runpytest`.
:param args: Any command line arguments to pass to
:py:func:`pytest.main`.
:param plugin: (keyword-only) Extra plugin instances the
``pytest.main()`` instance should use.
:return: A :py:class:`HookRecorder` instance.
"""
# When running py.test inline any plugins active in the main
# test process are already imported. So this disables the
# warning which will trigger to say they can no longer be
# re-written, which is fine as they are already re-written.
orig_warn = AssertionRewritingHook._warn_already_imported
def revert():
AssertionRewritingHook._warn_already_imported = orig_warn
self.request.addfinalizer(revert)
AssertionRewritingHook._warn_already_imported = lambda *a: None
rec = []
class Collect:
def pytest_configure(x, config):
rec.append(self.make_hook_recorder(config.pluginmanager))
plugins = kwargs.get("plugins") or []
plugins.append(Collect())
ret = pytest.main(list(args), plugins=plugins)
self.delete_loaded_modules()
if len(rec) == 1:
reprec = rec.pop()
else:
class reprec:
pass
reprec.ret = ret
# typically we reraise keyboard interrupts from the child run
# because it's our user requesting interruption of the testing
if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
calls = reprec.getcalls("pytest_keyboard_interrupt")
if calls and calls[-1].excinfo.type == KeyboardInterrupt:
raise KeyboardInterrupt()
return reprec
def runpytest_inprocess(self, *args, **kwargs):
""" Return result of running pytest in-process, providing a similar
interface to what self.runpytest() provides. """
if kwargs.get("syspathinsert"):
self.syspathinsert()
now = time.time()
capture = py.io.StdCapture()
try:
try:
reprec = self.inline_run(*args, **kwargs)
except SystemExit as e:
class reprec:
ret = e.args[0]
except Exception:
traceback.print_exc()
class reprec:
ret = 3
finally:
out, err = capture.reset()
sys.stdout.write(out)
sys.stderr.write(err)
res = RunResult(reprec.ret,
out.split("\n"), err.split("\n"),
time.time()-now)
res.reprec = reprec
return res
def runpytest(self, *args, **kwargs):
""" Run pytest inline or in a subprocess, depending on the command line
option "--runpytest" and return a :py:class:`RunResult`.
"""
args = self._ensure_basetemp(args)
return self._runpytest_method(*args, **kwargs)
def _ensure_basetemp(self, args):
args = [str(x) for x in args]
for x in args:
if str(x).startswith('--basetemp'):
#print ("basedtemp exists: %s" %(args,))
break
else:
args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
#print ("added basetemp: %s" %(args,))
return args
def parseconfig(self, *args):
"""Return a new pytest Config instance from given commandline args.
This invokes the pytest bootstrapping code in _pytest.config
to create a new :py:class:`_pytest.core.PluginManager` and
call the pytest_cmdline_parse hook to create new
:py:class:`_pytest.config.Config` instance.
If :py:attr:`plugins` has been populated they should be plugin
modules which will be registered with the PluginManager.
"""
args = self._ensure_basetemp(args)
import _pytest.config
config = _pytest.config._prepareconfig(args, self.plugins)
# we don't know what the test will do with this half-setup config
# object and thus we make sure it gets unconfigured properly in any
# case (otherwise capturing could still be active, for example)
self.request.addfinalizer(config._ensure_unconfigure)
return config
def parseconfigure(self, *args):
"""Return a new pytest configured Config instance.
This returns a new :py:class:`_pytest.config.Config` instance
like :py:meth:`parseconfig`, but also calls the
pytest_configure hook.
"""
config = self.parseconfig(*args)
config._do_configure()
self.request.addfinalizer(config._ensure_unconfigure)
return config
def getitem(self, source, funcname="test_func"):
"""Return the test item for a test function.
This writes the source to a python file and runs pytest's
collection on the resulting module, returning the test item
for the requested function name.
:param source: The module source.
:param funcname: The name of the test function for which the
Item must be returned.
"""
items = self.getitems(source)
for item in items:
if item.name == funcname:
return item
assert 0, "%r item not found in module:\n%s\nitems: %s" %(
funcname, source, items)
def getitems(self, source):
"""Return all test items collected from the module.
This writes the source to a python file and runs pytest's
collection on the resulting module, returning all test items
contained within.
"""
modcol = self.getmodulecol(source)
return self.genitems([modcol])
def getmodulecol(self, source, configargs=(), withinit=False):
"""Return the module collection node for ``source``.
This writes ``source`` to a file using :py:meth:`makepyfile`
and then runs the pytest collection on it, returning the
collection node for the test module.
:param source: The source code of the module to collect.
:param configargs: Any extra arguments to pass to
:py:meth:`parseconfigure`.
:param withinit: Whether to also write a ``__init__.py`` file
to the temporarly directory to ensure it is a package.
"""
kw = {self.request.function.__name__: Source(source).strip()}
path = self.makepyfile(**kw)
if withinit:
self.makepyfile(__init__ = "#")
self.config = config = self.parseconfigure(path, *configargs)
node = self.getnode(config, path)
return node
def collect_by_name(self, modcol, name):
"""Return the collection node for name from the module collection.
This will search a module collection node for a collection
node matching the given name.
:param modcol: A module collection node, see
:py:meth:`getmodulecol`.
:param name: The name of the node to return.
"""
for colitem in modcol._memocollect():
if colitem.name == name:
return colitem
def popen(self, cmdargs, stdout, stderr, **kw):
"""Invoke subprocess.Popen.
This calls subprocess.Popen making sure the current working
directory is the PYTHONPATH.
You probably want to use :py:meth:`run` instead.
"""
env = os.environ.copy()
env['PYTHONPATH'] = os.pathsep.join(filter(None, [
str(os.getcwd()), env.get('PYTHONPATH', '')]))
kw['env'] = env
return subprocess.Popen(cmdargs,
stdout=stdout, stderr=stderr, **kw)
def run(self, *cmdargs):
"""Run a command with arguments.
Run a process using subprocess.Popen saving the stdout and
stderr.
Returns a :py:class:`RunResult`.
"""
return self._run(*cmdargs)
def _run(self, *cmdargs):
cmdargs = [str(x) for x in cmdargs]
p1 = self.tmpdir.join("stdout")
p2 = self.tmpdir.join("stderr")
print_("running:", ' '.join(cmdargs))
print_(" in:", str(py.path.local()))
f1 = codecs.open(str(p1), "w", encoding="utf8")
f2 = codecs.open(str(p2), "w", encoding="utf8")
try:
now = time.time()
popen = self.popen(cmdargs, stdout=f1, stderr=f2,
close_fds=(sys.platform != "win32"))
ret = popen.wait()
finally:
f1.close()
f2.close()
f1 = codecs.open(str(p1), "r", encoding="utf8")
f2 = codecs.open(str(p2), "r", encoding="utf8")
try:
out = f1.read().splitlines()
err = f2.read().splitlines()
finally:
f1.close()
f2.close()
self._dump_lines(out, sys.stdout)
self._dump_lines(err, sys.stderr)
return RunResult(ret, out, err, time.time()-now)
def _dump_lines(self, lines, fp):
try:
for line in lines:
py.builtin.print_(line, file=fp)
except UnicodeEncodeError:
print("couldn't print to %s because of encoding" % (fp,))
def _getpytestargs(self):
# we cannot use "(sys.executable,script)"
# because on windows the script is e.g. a pytest.exe
return (sys.executable, _pytest_fullpath,) # noqa
def runpython(self, script):
"""Run a python script using sys.executable as interpreter.
Returns a :py:class:`RunResult`.
"""
return self.run(sys.executable, script)
def runpython_c(self, command):
"""Run python -c "command", return a :py:class:`RunResult`."""
return self.run(sys.executable, "-c", command)
def runpytest_subprocess(self, *args, **kwargs):
"""Run pytest as a subprocess with given arguments.
Any plugins added to the :py:attr:`plugins` list will added
using the ``-p`` command line option. Addtionally
``--basetemp`` is used put any temporary files and directories
in a numbered directory prefixed with "runpytest-" so they do
not conflict with the normal numberd pytest location for
temporary files and directories.
Returns a :py:class:`RunResult`.
"""
p = py.path.local.make_numbered_dir(prefix="runpytest-",
keep=None, rootdir=self.tmpdir)
args = ('--basetemp=%s' % p, ) + args
#for x in args:
# if '--confcutdir' in str(x):
# break
#else:
# pass
# args = ('--confcutdir=.',) + args
plugins = [x for x in self.plugins if isinstance(x, str)]
if plugins:
args = ('-p', plugins[0]) + args
args = self._getpytestargs() + args
return self.run(*args)
def spawn_pytest(self, string, expect_timeout=10.0):
"""Run pytest using pexpect.
This makes sure to use the right pytest and sets up the
temporary directory locations.
The pexpect child is returned.
"""
basetemp = self.tmpdir.mkdir("pexpect")
invoke = " ".join(map(str, self._getpytestargs()))
cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
return self.spawn(cmd, expect_timeout=expect_timeout)
def spawn(self, cmd, expect_timeout=10.0):
"""Run a command using pexpect.
The pexpect child is returned.
"""
pexpect = pytest.importorskip("pexpect", "3.0")
if hasattr(sys, 'pypy_version_info') and '64' in platform.machine():
pytest.skip("pypy-64 bit not supported")
if sys.platform == "darwin":
pytest.xfail("pexpect does not work reliably on darwin?!")
if sys.platform.startswith("freebsd"):
pytest.xfail("pexpect does not work reliably on freebsd")
logfile = self.tmpdir.join("spawn.out").open("wb")
child = pexpect.spawn(cmd, logfile=logfile)
self.request.addfinalizer(logfile.close)
child.timeout = expect_timeout
return child
def getdecoded(out):
try:
return out.decode("utf-8")
except UnicodeDecodeError:
return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
py.io.saferepr(out),)
class LineComp:
def __init__(self):
self.stringio = py.io.TextIO()
def assert_contains_lines(self, lines2):
""" assert that lines2 are contained (linearly) in lines1.
return a list of extralines found.
"""
__tracebackhide__ = True
val = self.stringio.getvalue()
self.stringio.truncate(0)
self.stringio.seek(0)
lines1 = val.split("\n")
return LineMatcher(lines1).fnmatch_lines(lines2)
class LineMatcher:
"""Flexible matching of text.
This is a convenience class to test large texts like the output of
commands.
The constructor takes a list of lines without their trailing
newlines, i.e. ``text.splitlines()``.
"""
def __init__(self, lines):
self.lines = lines
self._log_output = []
def str(self):
"""Return the entire original text."""
return "\n".join(self.lines)
def _getlines(self, lines2):
if isinstance(lines2, str):
lines2 = Source(lines2)
if isinstance(lines2, Source):
lines2 = lines2.strip().lines
return lines2
def fnmatch_lines_random(self, lines2):
"""Check lines exist in the output.
The argument is a list of lines which have to occur in the
output, in any order. Each line can contain glob whildcards.
"""
lines2 = self._getlines(lines2)
for line in lines2:
for x in self.lines:
if line == x or fnmatch(x, line):
self._log("matched: ", repr(line))
break
else:
self._log("line %r not found in output" % line)
raise ValueError(self._log_text)
def get_lines_after(self, fnline):
"""Return all lines following the given line in the text.
The given line can contain glob wildcards.
"""
for i, line in enumerate(self.lines):
if fnline == line or fnmatch(line, fnline):
return self.lines[i+1:]
raise ValueError("line %r not found in output" % fnline)
def _log(self, *args):
self._log_output.append(' '.join((str(x) for x in args)))
@property
def _log_text(self):
return '\n'.join(self._log_output)
def fnmatch_lines(self, lines2):
"""Search the text for matching lines.
The argument is a list of lines which have to match and can
use glob wildcards. If they do not match an pytest.fail() is
called. The matches and non-matches are also printed on
stdout.
"""
lines2 = self._getlines(lines2)
lines1 = self.lines[:]
nextline = None
extralines = []
__tracebackhide__ = True
for line in lines2:
nomatchprinted = False
while lines1:
nextline = lines1.pop(0)
if line == nextline:
self._log("exact match:", repr(line))
break
elif fnmatch(nextline, line):
self._log("fnmatch:", repr(line))
self._log(" with:", repr(nextline))
break
else:
if not nomatchprinted:
self._log("nomatch:", repr(line))
nomatchprinted = True
self._log(" and:", repr(nextline))
extralines.append(nextline)
else:
self._log("remains unmatched: %r" % (line,))
pytest.fail(self._log_text)
| 34.889673 | 82 | 0.59279 |
304c007d7891c7976975f67f5c3aa42d9be01954 | 3,570 | py | Python | personal/Ervin/tf_collaborative_item.py | edervishaj/spotify-recsys-challenge | 4077201ac7e4ed9da433bd10a92c183614182437 | [
"Apache-2.0"
] | 3 | 2018-10-12T20:19:57.000Z | 2019-12-11T01:11:38.000Z | personal/Ervin/tf_collaborative_item.py | kiminh/spotify-recsys-challenge | 5e7844a77ce3c26658400f161d2d74d682f30e69 | [
"Apache-2.0"
] | null | null | null | personal/Ervin/tf_collaborative_item.py | kiminh/spotify-recsys-challenge | 5e7844a77ce3c26658400f161d2d74d682f30e69 | [
"Apache-2.0"
] | 4 | 2018-10-27T20:30:18.000Z | 2020-10-14T07:43:27.000Z | from recommenders.recommender import Recommender
from recommenders.similarity.s_plus import dot_product
from utils.datareader import Datareader
from utils.evaluator import Evaluator
from utils.post_processing import eurm_to_recommendation_list
from personal.Ervin.other_similarity import position_similarity
import time, sys
import numpy as np
from scipy import sparse as sps
from sklearn.preprocessing import normalize
class TF_collaborative_item(Recommender):
def __init__(self):
super()
def compute_model(self, knn=100, verbose=False, power=1, save_model=False):
if verbose:
print("[ Creating model with item TF-IDF similarity ]")
start_time = time.time()
# Calculate DF[u] & IDF[u]
urm_bin = sps.csr_matrix(self.urm)
urm_bin.data = np.ones(len(self.urm.data))
dft = urm_bin.sum(axis=1).A1
idft = np.log(self.urm.shape[1] / (dft + 1e-8))
# dft = self.urm.sum(axis=1).A1
# idft = np.log(self.urm.shape[1] / (dft + 1e-8))
# Multiply each listened track with its respective idf
URM_enhanced = self.urm.multiply(idft.reshape(-1,1)).tocsr()
# Get the user similarity matrix
self.model = dot_product(URM_enhanced.T, self.urm, k=knn, verbose=verbose)
self.model = self.model.tolil()
self.model.setdiag(np.zeros(self.model.shape[0]))
self.model = self.model.tocsr()
self.model.eliminate_zeros()
self.model.data = np.power(self.model.data, power)
if save_model:
if verbose:
print('[ Saving the model ]')
sps.save_npz('tf_idf_item_sim_' + str(knn), self.model)
if verbose:
print("time: " + str(int(time.time() - start_time) / 60))
return self.model
def compute_rating(self, top_k=500, verbose=False, small=False):
if small:
self.urm = sps.csr_matrix(self.urm[self.pid])
self.model = sps.csr_matrix(self.model)
if verbose:
print("[ Compute ratings ]")
start_time = time.time()
# Normalize the original URM to get pop for each track
norm_urm = normalize(self.urm, axis=0, norm='l1')
# dft = self.urm.sum(axis=0).A1
# idft = np.log(self.urm.shape[0] / (dft + 1e-8))
# idft = np.power(idft, 0.5)
# norm_urm = self.urm.multiply(idft.reshape(1,-1)).tocsr()
# Computer the eURM
self.eurm = dot_product(norm_urm, self.model, k=top_k)
self.eurm = sps.csr_matrix(self.eurm)
if verbose:
print("time: " + str(int(time.time() - start_time) / 60))
return self.eurm
if __name__ == '__main__':
dr = Datareader(verbose=False, mode='offline', only_load=True)
urm = dr.get_urm(binary=False)
pid = dr.get_test_pids()
position_urm = dr.get_position_matrix(position_type='last')
pos_urm = position_urm.T.tocoo().tocsr()
ev = Evaluator(dr)
knn = 100
topk = 750
rec = TF_collaborative_item()
# for knn in range(50, 300, 50):
rec.fit(urm, pid)
rec.compute_model(verbose=True, knn=knn, power=0.6, save_model=False)
# rec.model = rec.model.tocsr()
# rec.model.eliminate_zeros()
#
# rec.model = position_similarity(rec.model, pos_urm, knn=knn, verbose=True, num_threads=64)
rec.compute_rating(top_k=topk, verbose=True, small=True)
ev.evaluate(recommendation_list=eurm_to_recommendation_list(rec.eurm, datareader=dr, remove_seed=True),
name="TFIDF_item_"+str(knn), old_mode=False) | 35.346535 | 107 | 0.641737 |
c6ac1900b4632977421ad0beaf5facddd23ff83a | 3,906 | py | Python | mostlikedtweet.py | kaamilmirza/UltimateTwitterScrapper | e19a8b1d2af41d0d78291bbecf192125b38c83fe | [
"MIT"
] | 1 | 2021-05-03T15:12:05.000Z | 2021-05-03T15:12:05.000Z | mostlikedtweet.py | kaamilmirza/UltimateTwitterScrapper | e19a8b1d2af41d0d78291bbecf192125b38c83fe | [
"MIT"
] | null | null | null | mostlikedtweet.py | kaamilmirza/UltimateTwitterScrapper | e19a8b1d2af41d0d78291bbecf192125b38c83fe | [
"MIT"
] | null | null | null | import tweepy
import pandas as pd
import re
from sys import stdin
import socket
import authenticate
socket.getaddrinfo('localhost', 8080) # sometimes crashes and gives weird errors this somehow fixed it
auth = authenticate.auth
auth.set_access_token(authenticate.twitterAPIAT,authenticate.twitterAPIATS)
twetterApi = tweepy.API(auth, wait_on_rate_limit=True)
def cleanUpTweet(txt):
# Remove mentions
txt = re.sub(r'@[A-Za-z0-9_]+', '', txt)
# Remove hashtags
txt = re.sub(r'#', '', txt)
# Remove retweets:
txt = re.sub(r'RT : ', '', txt)
# Remove urls
txt = re.sub(r'https?:\/\/[A-Za-z0-9\.\/]+', '', txt)
return txt
print("The usernames you want to find likes for :")
def taking_input():
username_list = []
for line in stdin:
username_list.append(line.rstrip())
if line == '\n': # If empty string is read then stop the loop
break
username_list.pop()
main(username_list)
def main(username_list):
count_tweet_list = []
count_retweet_list = []
for user in username_list:
numberOfTweets = input("The number of tweets for user:{} you want to be picked:".format(user))
print("Please wait...getting tweets of {}".format(user))
for tweet in tweepy.Cursor(twetterApi.user_timeline,
screen_name=user,
since_id=None,
max_id=None,
trim_user=True,
exclude_replies=True,
contributor_details=False,
include_entities=True
).items(int(numberOfTweets)):
clean_tweet = cleanUpTweet(tweet._json['text'])
liked_tweet_count = tweet._json['favorite_count']
retweet_count = tweet._json['retweet_count']
count_tweet_list.append([liked_tweet_count, clean_tweet])
count_retweet_list.append([retweet_count,clean_tweet])
df = pd.DataFrame(count_tweet_list, columns=['Number Of Likes', 'The tweets'])
df_retweet = pd.DataFrame(count_retweet_list, columns=['Number of retweets', 'The tweets'])
choice = input("If you want just top likes and retweets of dataframe enter 1 :")
try:
if choice == str(1):
sorted_df = df.sort_values(by="Number Of Likes", ascending=False)
sorted_retweet_df = df_retweet.sort_values(by="Number of retweets", ascending=False)
print("The data regarding likes:")
print(sorted_df.head(10))
print("The data regarding retweets:")
print(sorted_retweet_df.head(10))
else:
with pd.option_context('display.max_rows', None, 'display.max_columns',
None): # more options can be specified also
sorted_df = df.sort_values(by="Number Of Likes", ascending=False)
sorted_retweet_df = df.sort_values(by="Number of retweets", ascending=False)
print("The data regarding likes:")
print(sorted_df.head(10))
print("The data regarding retweets:")
print(sorted_retweet_df.head(10))
except ValueError:
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
# more options can be specified also
sorted_df = df.sort_values(by="Number Of Likes", ascending=False)
sorted_retweet_df = df.sort_values(by="Number of retweets", ascending=False)
print("The data regarding likes:")
print(sorted_df.head(10))
print("The data regarding retweets:")
print(sorted_retweet_df.head(10))
taking_input() | 43.88764 | 103 | 0.585253 |
f37987abfcc063406cebd390fb137c939925ef4d | 4,854 | py | Python | test/test_ctrl/WaterPapCtrl_mis_feat.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
] | 43 | 2016-11-25T15:21:23.000Z | 2021-08-20T06:09:40.000Z | test/test_ctrl/WaterPapCtrl_mis_feat.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
] | 1,263 | 2016-11-25T15:58:37.000Z | 2021-11-02T22:23:47.000Z | test/test_ctrl/WaterPapCtrl_mis_feat.py | marc2332/sardana | 48dc9191baaa63f6c714d8c025e8f3f96548ad26 | [
"CC-BY-3.0"
] | 58 | 2016-11-21T11:33:55.000Z | 2021-09-01T06:21:21.000Z | import PyTango
import socket
import MotorController
class IcePapController(MotorController.MotorController):
ctrl_features = ['Encoder', 'CanDoBacklash']
ctrl_extra_attributes = {'First_extra': {'Type': 'PyTango.DevDouble', 'R/W Type': 'PyTango.READ_WRITE'},
'Second_extra': {'Type': 'PyTango.DevLong', 'R/W Type': 'PyTango.READ'}}
MaxDevice = 11
def __init__(self, inst, props):
print "PYTHON -> IcePapController ctor for instance", inst
MotorController.MotorController.__init__(self, inst, props)
self.nb_call = 0
self.socket_connected = False
self.db = PyTango.Database()
self.ct_name = "IcePapController/" + self.inst_name
#
# Get controller properties
#
prop_list = ['host', 'port', 'timeout']
prop = self.db.get_property(self.ct_name, prop_list)
if len(prop["host"]) != 0:
self.host = prop["host"][0]
else:
print "Property host not defined for controller", self.ct_name
self.host = "nada"
if len(prop["port"]) != 0:
self.port = int(prop["port"][0])
else:
print "Property port not defined for controller", self.ct_name
self.port = 0
if len(prop["timeout"]) != 0:
self.timeout = int(prop["timeout"][0])
else:
print "Property timeout not defined for controller", self.ct_name
self.timeout = 3
#
# Connect to the icepap
#
print "PYTHON -> IcePap on", self.host, " and port", self.port, " with timeout = ", self.timeout
# self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# self.sock.settimeout(self.timeout)
# self.sock.connect(("icepap", self.port))
# self.socket_connected = True
print "PYTHON -> Connected to", self.host, " on port", self.port
#
# Check that the Icepap is OK
#
# ans = self.IceWriteRead("?ID")
def AddDevice(self, axis):
print "PYTHON -> IcePapController/", self.inst_name, ": In AddDevice method for axis", axis
# raise RuntimeError,"Hola la la"
def DeleteDevice(self, axis):
print "PYTHON -> IcePapController/", self.inst_name, ": In DeleteDevice method for axis", axis
def StateOne(self, axis):
print "PYTHON -> IcePapController/", self.inst_name, ": In StateOne method for axis", axis
tup = (PyTango.DevState.ON, 0)
return tup
def PreReadAll(self):
print "PYTHON -> IcePapController/", self.inst_name, ": In PreReadAll method"
def PreReadOne(self, axis):
print "PYTHON -> IcePapController/", self.inst_name, ": In PreReadOne method for axis", axis
def ReadAll(self):
print "PYTHON -> IcePapController/", self.inst_name, ": In ReadAll method"
def ReadOne(self, axis):
print "PYTHON -> IcePapController/", self.inst_name, ": In ReadOne method for axis", axis
return 123
def PreStartAll(self):
print "PYTHON -> IcePapController/", self.inst_name, ": In PreStartAll method"
def PreStartOne(self, axis, pos):
print "PYTHON -> IcePapController/", self.inst_name, ": In PreStartOne method for axis", axis, " with pos", pos
return True
def StartOne(self, axis, pos):
print "PYTHON -> IcePapController/", self.inst_name, ": In StartOne method for axis", axis, " with pos", pos
def StartAll(self):
print "PYTHON -> IcePapController/", self.inst_name, ": In StartAll method"
def SetPar(self, axis, name, value):
print "PYTHON -> IcePapController/", self.inst_name, ": In SetPar method for axis", axis, " name=", name, " value=", value
def GetPar(self, axis, name):
print "PYTHON -> IcePapController/", self.inst_name, ": In GetPar method for axis", axis, " name=", name
return 12.34
def IceWrite(self, data):
data = data + "\n"
byteSent = self.sock.send(data)
print "PYTHON -> Sent", byteSent, "bytes to icepap"
def IceWriteRead(self, data):
self.IceWrite(data)
byteReceived = self.sock.recv(1024)
print "PYTHON -> Icepap answered:", byteReceived
return byteReceived
def IceResetFifo(self):
self.IceWrite("fiforst")
def IceCheckError(self, ice_answer):
if (ice_answer.find("ERROR") != -1):
new_ans = self.IceWriteRead("?ERR 1")
print "Error from IcePap =", new_ans
def __del__(self):
print "PYTHON -> IcePapController/", self.inst_name, ": Aarrrrrg, I am dying"
#
# Reset IcePap FIFO
#
if (self.socket_connected):
print "PYTHON -> Closing connection"
self.IceResetFifo()
self.sock.close()
if __name__ == "__main__":
obj = IcePapController('test')
# obj.AddDevice(2)
# obj.DeleteDevice(2)
| 32.797297 | 130 | 0.616811 |
d91d95e90e0879634b44369d5de7646c72f72ad0 | 4,254 | py | Python | galileo/framework/pytorch/python/transforms/rw_neg.py | YaoPu2021/galileo | 0ebee2052bf78205f93f8cbbe0e2884095dd7af7 | [
"Apache-2.0"
] | 115 | 2021-09-09T03:01:58.000Z | 2022-03-30T10:46:26.000Z | galileo/framework/pytorch/python/transforms/rw_neg.py | Hacky-DH/galileo | e4d5021f0287dc879730dfa287b9a056f152f712 | [
"Apache-2.0"
] | 1 | 2021-12-09T07:34:41.000Z | 2021-12-20T06:24:27.000Z | galileo/framework/pytorch/python/transforms/rw_neg.py | Hacky-DH/galileo | e4d5021f0287dc879730dfa287b9a056f152f712 | [
"Apache-2.0"
] | 28 | 2021-09-10T08:47:20.000Z | 2022-03-17T07:29:26.000Z | # Copyright 2020 JD.com, Inc. Galileo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import torch
from galileo.framework.python.base_transform import BaseTransform
from galileo.framework.pytorch.python.ops import PTOps as ops
from galileo.platform.export import export
@export('galileo.pytorch')
class RandomWalkNegTransform(BaseTransform):
r'''
\brief randomwalk with negative sampling
\par example
\code{.py}
>>> from galileo.pytorch import RandomWalkNegTransform
>>> transform = RandomWalkNegTransform([0],[0],3,2,1,
... walk_length=3).transform
>>> res = transform([2,4,6])
>>> res.keys()
dict_keys(['target', 'context', 'negative'])
>>> res['target'].shape
torch.Size([30, 1])
>>> res['context'].shape
torch.Size([30, 1])
>>> res['negative'].shape
torch.Size([30, 3])
\endcode
'''
def __init__(self,
vertex_type: list,
edge_types: list,
negative_num: int,
context_size: int,
repetition: int = 1,
walk_p: float = 1.,
walk_q: float = 1.,
walk_length=None,
metapath=None,
**kwargs):
r'''
\param vertex_type
\param edge_types
\param negative_num
\param context_size
\param repetition
\param walk_p
\param walk_q
\param walk_length
\param metapath
'''
if walk_length is None and metapath is None:
raise ValueError(
'one of walk_length and metapath must be specified')
if metapath is None:
metapath = [edge_types] * walk_length
config = dict(
vertex_type=vertex_type,
edge_types=edge_types,
negative_num=negative_num,
context_size=context_size,
repetition=repetition,
walk_p=walk_p,
walk_q=walk_q,
walk_length=walk_length,
metapath=metapath,
)
super().__init__(config=config)
def transform(self, inputs):
r'''
\param inputs vertices
\return dict(target=tensor,context=tensor,negative=tensor)
'''
vertex_type = self.config['vertex_type']
context_size = self.config['context_size']
negative_num = self.config['negative_num']
repetition = self.config['repetition']
walk_p = self.config['walk_p']
walk_q = self.config['walk_q']
metapath = self.config['metapath']
if not torch.is_tensor(inputs):
inputs = torch.tensor(inputs)
vertices = inputs.flatten().contiguous()
pair = ops.sample_pairs_by_random_walk(vertices=vertices,
metapath=metapath,
repetition=repetition,
context_size=context_size,
p=walk_p,
q=walk_q)
if pair is None:
raise ValueError(
'Error sample pair random walk, see logs for details')
target, context = torch.split(pair, [1, 1], dim=-1)
negative = ops.sample_vertices(types=vertex_type,
count=negative_num * pair.size(0))[0]
negative = negative.view(pair.size(0), negative_num)
return {
'target': target.contiguous(),
'context': context.contiguous(),
'negative': negative.contiguous()
}
| 35.747899 | 80 | 0.561354 |
5593d573aae2463e834733a8620c0568f9670955 | 3,594 | py | Python | settings.py | KirkMartinez/docker-imagr_server | 0571c726ec2d18fce5dae3ba24085dfdcd0683a5 | [
"Apache-2.0"
] | 2 | 2016-04-07T19:14:39.000Z | 2021-03-12T20:59:48.000Z | settings.py | KirkMartinez/docker-imagr_server | 0571c726ec2d18fce5dae3ba24085dfdcd0683a5 | [
"Apache-2.0"
] | 1 | 2019-04-12T17:37:48.000Z | 2019-04-12T17:37:48.000Z | settings.py | KirkMartinez/docker-imagr_server | 0571c726ec2d18fce5dae3ba24085dfdcd0683a5 | [
"Apache-2.0"
] | 2 | 2016-02-04T16:52:51.000Z | 2019-05-28T19:27:30.000Z | """
Django settings for imagr_server project.
Generated by 'django-admin startproject' using Django 1.8.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from settings_import import ADMINS, TIME_ZONE, LANGUAGE_CODE, ALLOWED_HOSTS, DISPLAY_NAME
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '#6*8&7d70yd2$&&j9&z6mnlk_dw4=%a)9q86%o+few3!5eymgm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'imagr_server',
'report',
'dashboard'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'imagr_server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'imagr_server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'imagr_server.sqlite3'),
}
}
# PG Database
if os.environ.has_key('DB_PORT_5432_TCP_ADDR'):
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASS'],
'HOST': os.environ['DB_PORT_5432_TCP_ADDR'],
'PORT': os.environ['DB_PORT_5432_TCP_PORT'],
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(BASE_DIR, 'site_static'),
)
LOGIN_URL='/login/'
LOGIN_REDIRECT_URL='/'
| 28.299213 | 89 | 0.690039 |
9b1526e6f84bcfe9396b386e78579c3109d4354e | 30,590 | py | Python | donkeycar/templates/cfg_complete.py | luigidiguglielmo-devel/donkeycar | b99de845b7a68f8e5deb2eb488f27964a17302b9 | [
"MIT"
] | null | null | null | donkeycar/templates/cfg_complete.py | luigidiguglielmo-devel/donkeycar | b99de845b7a68f8e5deb2eb488f27964a17302b9 | [
"MIT"
] | null | null | null | donkeycar/templates/cfg_complete.py | luigidiguglielmo-devel/donkeycar | b99de845b7a68f8e5deb2eb488f27964a17302b9 | [
"MIT"
] | null | null | null | """
CAR CONFIG
This file is read by your car application's manage.py script to change the car
performance.
EXAMPLE
-----------
import dk
cfg = dk.load_config(config_path='~/mycar/config.py')
print(cfg.CAMERA_RESOLUTION)
"""
import os
#PATHS
CAR_PATH = PACKAGE_PATH = os.path.dirname(os.path.realpath(__file__))
DATA_PATH = os.path.join(CAR_PATH, 'data')
MODELS_PATH = os.path.join(CAR_PATH, 'models')
#VEHICLE
DRIVE_LOOP_HZ = 20 # the vehicle loop will pause if faster than this speed.
MAX_LOOPS = None # the vehicle loop can abort after this many iterations, when given a positive integer.
#CAMERA
CAMERA_TYPE = "PICAM" # (PICAM|WEBCAM|CVCAM|CSIC|V4L|D435|MOCK|IMAGE_LIST)
IMAGE_W = 160
IMAGE_H = 120
IMAGE_DEPTH = 3 # default RGB=3, make 1 for mono
CAMERA_FRAMERATE = DRIVE_LOOP_HZ
CAMERA_VFLIP = False
CAMERA_HFLIP = False
CAMERA_INDEX = 0 # used for 'WEBCAM' and 'CVCAM' when there is more than one camera connected
# For CSIC camera - If the camera is mounted in a rotated position, changing the below parameter will correct the output frame orientation
CSIC_CAM_GSTREAMER_FLIP_PARM = 0 # (0 => none , 4 => Flip horizontally, 6 => Flip vertically)
# For IMAGE_LIST camera
# PATH_MASK = "~/mycar/data/tub_1_20-03-12/*.jpg"
#9865, over rides only if needed, ie. TX2..
PCA9685_I2C_ADDR = 0x40 #I2C address, use i2cdetect to validate this number
PCA9685_I2C_BUSNUM = None #None will auto detect, which is fine on the pi. But other platforms should specify the bus num.
#SSD1306_128_32
USE_SSD1306_128_32 = False # Enable the SSD_1306 OLED Display
SSD1306_128_32_I2C_ROTATION = 0 # 0 = text is right-side up, 1 = rotated 90 degrees clockwise, 2 = 180 degrees (flipped), 3 = 270 degrees
SSD1306_RESOLUTION = 1 # 1 = 128x32; 2 = 128x64
#
# DRIVE_TRAIN_TYPE
# These options specify which chasis and motor setup you are using.
# See Actuators documentation https://docs.donkeycar.com/parts/actuators/
# for a detailed explanation of each drive train type and it's configuration.
# Choose one of the following and then update the related configuration section:
#
# "PWM_STEERING_THROTTLE" uses two PWM output pins to control a steering servo and an ESC, as in a standard RC car.
# "PWM_STEERING_DC_TWO_L298N" uses one PWM output pin to control a steering servo and
# two PWM output pins to control HBridge motor driver in 3pin mode for two drive motors (left and right).
# "MM1" Robo HAT MM1 board
# "SERVO_HBRIDGE_2PIN" Servo for steering and HBridge motor driver in 2pin mode for motor
# "SERVO_HBRIDGE_3PIN" Servo for steering and HBridge motor driver in 3pin mode for motor
# "DC_STEER_THROTTLE" uses HBridge pwm to control one steering dc motor, and one drive wheel motor
# "DC_TWO_WHEEL" uses HBridge in two-pin mode to control two drive motors, one on the left, and one on the right.
# "DC_TWO_WHEEL_L298N" using HBridge in three-pin mode to control two drive motors, one of the left and one on the right.
# "MOCK" no drive train. This can be used to test other features in a test rig.
# (deprecated) "SERVO_HBRIDGE_PWM" use ServoBlaster to output pwm control from the PiZero directly to control steering, and HBridge for a drive motor.
# (deprecated) "PIGPIO_PWM" uses Raspberrys internal PWM
# (deprecated) "I2C_SERVO" uses PCA9685 servo controller to control a steering servo and an ESC, as in a standard RC car
#
DRIVE_TRAIN_TYPE = "PWM_STEERING_THROTTLE"
#
# PWM_STEERING_THROTTLE
#
# Drive train for RC car with a steering servo and ESC.
# Uses a PwmPin for steering (servo) and a second PwmPin for throttle (ESC)
# Base PWM Frequence is presumed to be 60hz; use PWM_xxxx_SCALE to adjust pulse with for non-standard PWM frequencies
#
PWM_STEERING_PIN = "PCA9685.1:40.1" # PWM output pin for steering servo
PWM_STEERING_SCALE = 1.0 # used to compensate for PWM frequency differents from 60hz; NOT for adjusting steering range
PWM_STEERING_INVERTED = False # True if hardware requires an inverted PWM pulse
PWM_THROTTLE_PIN = "PCA9685.1:40.0" # PWM output pin for ESC
PWM_THROTTLE_SCALE = 1.0 # used to compensate for PWM frequence differences from 60hz; NOT for increasing/limiting speed
PWM_THROTTLE_INVERTED = False # True if hardware requires an inverted PWM pulse
#STEERING FOR PWM_STEERING_THROTTLE (and deprecated I2C_SERVO)
STEERING_CHANNEL = 1 #(deprecated) channel on the 9685 pwm board 0-15
STEERING_LEFT_PWM = 460 #pwm value for full left steering
STEERING_RIGHT_PWM = 290 #pwm value for full right steering
#STEERING FOR PWM_STEERING_THROTTLE (and deprecated PIGPIO_PWM OUTPUT)
STEERING_PWM_PIN = 13 #(deprecated) Pin numbering according to Broadcom numbers
STEERING_PWM_FREQ = 50 #Frequency for PWM
STEERING_PWM_INVERTED = False #If PWM needs to be inverted
#THROTTLE FOR PWM_STEERING_THROTTLE (and deprecated I2C_SERVO)
THROTTLE_CHANNEL = 0 #(deprecated) channel on the 9685 pwm board 0-15
THROTTLE_FORWARD_PWM = 500 #pwm value for max forward throttle
THROTTLE_STOPPED_PWM = 370 #pwm value for no movement
THROTTLE_REVERSE_PWM = 220 #pwm value for max reverse throttle
#THROTTLE FOR PWM_STEERING_THROTTLE (and deprecated PIGPIO_PWM OUTPUT)
THROTTLE_PWM_PIN = 18 #(deprecated) Pin numbering according to Broadcom numbers
THROTTLE_PWM_FREQ = 50 #Frequency for PWM
THROTTLE_PWM_INVERTED = False #If PWM needs to be inverted
#
# SERVO_HBRIDGE_2PIN
# - configures a steering servo and an HBridge in 2pin mode (2 pwm pins)
# - Servo takes a standard servo PWM pulse between 1 millisecond (fully reverse)
# and 2 milliseconds (full forward) with 1.5ms being neutral.
# - the motor is controlled by two pwm pins,
# one for forward and one for backward (reverse).
# - the pwm pin produces a duty cycle from 0 (completely LOW)
# to 1 (100% completely high), which is proportional to the
# amount of power delivered to the motor.
# - in forward mode, the reverse pwm is 0 duty_cycle,
# in backward mode, the forward pwm is 0 duty cycle.
# - both pwms are 0 duty cycle (LOW) to 'detach' motor and
# and glide to a stop.
# - both pwms are full duty cycle (100% HIGH) to brake
#
# Pin specifier string format:
# - use RPI_GPIO for RPi/Nano header pin output
# - use BOARD for board pin numbering
# - use BCM for Broadcom GPIO numbering
# - for example "RPI_GPIO.BOARD.18"
# - use PIPGIO for RPi header pin output using pigpio server
# - must use BCM (broadcom) pin numbering scheme
# - for example, "PIGPIO.BCM.13"
# - use PCA9685 for PCA9685 pin output
# - include colon separated I2C channel and address
# - for example "PCA9685.1:40.13"
# - RPI_GPIO, PIGPIO and PCA9685 can be mixed arbitrarily,
# although it is discouraged to mix RPI_GPIO and PIGPIO.
#
HBRIDGE_2PIN_DUTY_FWD = "RPI_GPIO.BOARD.18" # provides forward duty cycle to motor
HBRIDGE_2PIN_DUTY_BWD = "RPI_GPIO.BOARD.16" # provides reverse duty cycle to motor
PWM_STEERING_PIN = "RPI_GPIO.BOARD.33" # provides servo pulse to steering servo
PWM_STEERING_SCALE = 1.0 # used to compensate for PWM frequency differents from 60hz; NOT for adjusting steering range
PWM_STEERING_INVERTED = False # True if hardware requires an inverted PWM pulse
STEERING_LEFT_PWM = 460 # pwm value for full left steering (use `donkey calibrate` to measure value for your car)
STEERING_RIGHT_PWM = 290 # pwm value for full right steering (use `donkey calibrate` to measure value for your car)
#
# SERVO_HBRIDGE_3PIN
# - configures a steering servo and an HBridge in 3pin mode (2 ttl pins, 1 pwm pin)
# - Servo takes a standard servo PWM pulse between 1 millisecond (fully reverse)
# and 2 milliseconds (full forward) with 1.5ms being neutral.
# - the motor is controlled by three pins,
# one ttl output for forward, one ttl output
# for backward (reverse) enable and one pwm pin
# for motor power.
# - the pwm pin produces a duty cycle from 0 (completely LOW)
# to 1 (100% completely high), which is proportional to the
# amount of power delivered to the motor.
# - in forward mode, the forward pin is HIGH and the
# backward pin is LOW,
# - in backward mode, the forward pin is LOW and the
# backward pin is HIGH.
# - both forward and backward pins are LOW to 'detach' motor
# and glide to a stop.
# - both forward and backward pins are HIGH to brake
#
# Pin specifier string format:
# - use RPI_GPIO for RPi/Nano header pin output
# - use BOARD for board pin numbering
# - use BCM for Broadcom GPIO numbering
# - for example "RPI_GPIO.BOARD.18"
# - use PIPGIO for RPi header pin output using pigpio server
# - must use BCM (broadcom) pin numbering scheme
# - for example, "PIGPIO.BCM.13"
# - use PCA9685 for PCA9685 pin output
# - include colon separated I2C channel and address
# - for example "PCA9685.1:40.13"
# - RPI_GPIO, PIGPIO and PCA9685 can be mixed arbitrarily,
# although it is discouraged to mix RPI_GPIO and PIGPIO.
#
HBRIDGE_3PIN_FWD = "RPI_GPIO.BOARD.18" # ttl pin, high enables motor forward
HBRIDGE_3PIN_BWD = "RPI_GPIO.BOARD.16" # ttl pin, highenables motor reverse
HBRIDGE_3PIN_DUTY = "RPI_GPIO.BOARD.35" # provides duty cycle to motor
PWM_STEERING_PIN = "RPI_GPIO.BOARD.33" # provides servo pulse to steering servo
PWM_STEERING_SCALE = 1.0 # used to compensate for PWM frequency differents from 60hz; NOT for adjusting steering range
PWM_STEERING_INVERTED = False # True if hardware requires an inverted PWM pulse
STEERING_LEFT_PWM = 460 # pwm value for full left steering (use `donkey calibrate` to measure value for your car)
STEERING_RIGHT_PWM = 290 # pwm value for full right steering (use `donkey calibrate` to measure value for your car)
#
# DC_STEER_THROTTLE with one motor as steering, one as drive
# - uses L298N type motor controller in two pin wiring
# scheme utilizing two pwm pins per motor; one for
# forward(or right) and one for reverse (or left)
#
# GPIO pin configuration for the DRIVE_TRAIN_TYPE=DC_STEER_THROTTLE
# - use RPI_GPIO for RPi/Nano header pin output
# - use BOARD for board pin numbering
# - use BCM for Broadcom GPIO numbering
# - for example "RPI_GPIO.BOARD.18"
# - use PIPGIO for RPi header pin output using pigpio server
# - must use BCM (broadcom) pin numbering scheme
# - for example, "PIGPIO.BCM.13"
# - use PCA9685 for PCA9685 pin output
# - include colon separated I2C channel and address
# - for example "PCA9685.1:40.13"
# - RPI_GPIO, PIGPIO and PCA9685 can be mixed arbitrarily,
# although it is discouraged to mix RPI_GPIO and PIGPIO.
#
HBRIDGE_PIN_LEFT = "RPI_GPIO.BOARD.18" # pwm pin produces duty cycle for steering left
HBRIDGE_PIN_RIGHT = "RPI_GPIO.BOARD.16" # pwm pin produces duty cycle for steering right
HBRIDGE_PIN_FWD = "RPI_GPIO.BOARD.15" # pwm pin produces duty cycle for forward drive
HBRIDGE_PIN_BWD = "RPI_GPIO.BOARD.13" # pwm pin produces duty cycle for reverse drive
#
# DC_TWO_WHEEL pin configuration
# - configures L298N_HBridge_2pin driver
# - two wheels as differential drive, left and right.
# - each wheel is controlled by two pwm pins,
# one for forward and one for backward (reverse).
# - each pwm pin produces a duty cycle from 0 (completely LOW)
# to 1 (100% completely high), which is proportional to the
# amount of power delivered to the motor.
# - in forward mode, the reverse pwm is 0 duty_cycle,
# in backward mode, the forward pwm is 0 duty cycle.
# - both pwms are 0 duty cycle (LOW) to 'detach' motor and
# and glide to a stop.
# - both pwms are full duty cycle (100% HIGH) to brake
#
# Pin specifier string format:
# - use RPI_GPIO for RPi/Nano header pin output
# - use BOARD for board pin numbering
# - use BCM for Broadcom GPIO numbering
# - for example "RPI_GPIO.BOARD.18"
# - use PIPGIO for RPi header pin output using pigpio server
# - must use BCM (broadcom) pin numbering scheme
# - for example, "PIGPIO.BCM.13"
# - use PCA9685 for PCA9685 pin output
# - include colon separated I2C channel and address
# - for example "PCA9685.1:40.13"
# - RPI_GPIO, PIGPIO and PCA9685 can be mixed arbitrarily,
# although it is discouraged to mix RPI_GPIO and PIGPIO.
#
HBRIDGE_PIN_LEFT_FWD = "RPI_GPIO.BOARD.18" # pwm pin produces duty cycle for left wheel forward
HBRIDGE_PIN_LEFT_BWD = "RPI_GPIO.BOARD.16" # pwm pin produces duty cycle for left wheel reverse
HBRIDGE_PIN_RIGHT_FWD = "RPI_GPIO.BOARD.15" # pwm pin produces duty cycle for right wheel forward
HBRIDGE_PIN_RIGHT_BWD = "RPI_GPIO.BOARD.13" # pwm pin produces duty cycle for right wheel reverse
#
# DC_TWO_WHEEL_L298N pin configuration
# - configures L298N_HBridge_3pin driver
# - two wheels as differential drive, left and right.
# - each wheel is controlled by three pins,
# one ttl output for forward, one ttl output
# for backward (reverse) enable and one pwm pin
# for motor power.
# - the pwm pin produces a duty cycle from 0 (completely LOW)
# to 1 (100% completely high), which is proportional to the
# amount of power delivered to the motor.
# - in forward mode, the forward pin is HIGH and the
# backward pin is LOW,
# - in backward mode, the forward pin is LOW and the
# backward pin is HIGH.
# - both forward and backward pins are LOW to 'detach' motor
# and glide to a stop.
# - both forward and backward pins are HIGH to brake
#
# GPIO pin configuration for the DRIVE_TRAIN_TYPE=DC_TWO_WHEEL_L298N
# - use RPI_GPIO for RPi/Nano header pin output
# - use BOARD for board pin numbering
# - use BCM for Broadcom GPIO numbering
# - for example "RPI_GPIO.BOARD.18"
# - use PIPGIO for RPi header pin output using pigpio server
# - must use BCM (broadcom) pin numbering scheme
# - for example, "PIGPIO.BCM.13"
# - use PCA9685 for PCA9685 pin output
# - include colon separated I2C channel and address
# - for example "PCA9685.1:40.13"
# - RPI_GPIO, PIGPIO and PCA9685 can be mixed arbitrarily,
# although it is discouraged to mix RPI_GPIO and PIGPIO.
#
HBRIDGE_L298N_PIN_LEFT_FWD = "RPI_GPIO.BOARD.16" # TTL output pin enables left wheel forward
HBRIDGE_L298N_PIN_LEFT_BWD = "RPI_GPIO.BOARD.18" # TTL output pin enables left wheel reverse
HBRIDGE_L298N_PIN_LEFT_EN = "RPI_GPIO.BOARD.22" # PWM pin generates duty cycle for left motor speed
HBRIDGE_L298N_PIN_RIGHT_FWD = "RPI_GPIO.BOARD.15" # TTL output pin enables right wheel forward
HBRIDGE_L298N_PIN_RIGHT_BWD = "RPI_GPIO.BOARD.13" # TTL output pin enables right wheel reverse
HBRIDGE_L298N_PIN_RIGHT_EN = "RPI_GPIO.BOARD.11" # PWM pin generates duty cycle for right wheel speed
#ODOMETRY
HAVE_ODOM = False # Do you have an odometer/encoder
ENCODER_TYPE = 'GPIO' # What kind of encoder? GPIO|Arduino|Astar
MM_PER_TICK = 12.7625 # How much travel with a single tick, in mm. Roll you car a meter and divide total ticks measured by 1,000
ODOM_PIN = 13 # if using GPIO, which GPIO board mode pin to use as input
ODOM_DEBUG = False # Write out values on vel and distance as it runs
# #LIDAR
USE_LIDAR = False
LIDAR_TYPE = 'RP' #(RP|YD)
LIDAR_LOWER_LIMIT = 90 # angles that will be recorded. Use this to block out obstructed areas on your car, or looking backwards. Note that for the RP A1M8 Lidar, "0" is in the direction of the motor
LIDAR_UPPER_LIMIT = 270
#TRAINING
# The default AI framework to use. Choose from (tensorflow|pytorch)
DEFAULT_AI_FRAMEWORK = 'tensorflow'
# The DEFAULT_MODEL_TYPE will choose which model will be created at training
# time. This chooses between different neural network designs. You can
# override this setting by passing the command line parameter --type to the
# python manage.py train and drive commands.
# tensorflow models: (linear|categorical|tflite_linear|tensorrt_linear)
# pytorch models: (resnet18)
DEFAULT_MODEL_TYPE = 'linear'
BATCH_SIZE = 128 #how many records to use when doing one pass of gradient decent. Use a smaller number if your gpu is running out of memory.
TRAIN_TEST_SPLIT = 0.8 #what percent of records to use for training. the remaining used for validation.
MAX_EPOCHS = 100 #how many times to visit all records of your data
SHOW_PLOT = True #would you like to see a pop up display of final loss?
VERBOSE_TRAIN = True #would you like to see a progress bar with text during training?
USE_EARLY_STOP = True #would you like to stop the training if we see it's not improving fit?
EARLY_STOP_PATIENCE = 5 #how many epochs to wait before no improvement
MIN_DELTA = .0005 #early stop will want this much loss change before calling it improved.
PRINT_MODEL_SUMMARY = True #print layers and weights to stdout
OPTIMIZER = None #adam, sgd, rmsprop, etc.. None accepts default
LEARNING_RATE = 0.001 #only used when OPTIMIZER specified
LEARNING_RATE_DECAY = 0.0 #only used when OPTIMIZER specified
SEND_BEST_MODEL_TO_PI = False #change to true to automatically send best model during training
CREATE_TF_LITE = True # automatically create tflite model in training
CREATE_TENSOR_RT = False # automatically create tensorrt model in training
PRUNE_CNN = False #This will remove weights from your model. The primary goal is to increase performance.
PRUNE_PERCENT_TARGET = 75 # The desired percentage of pruning.
PRUNE_PERCENT_PER_ITERATION = 20 # Percenge of pruning that is perform per iteration.
PRUNE_VAL_LOSS_DEGRADATION_LIMIT = 0.2 # The max amout of validation loss that is permitted during pruning.
PRUNE_EVAL_PERCENT_OF_DATASET = .05 # percent of dataset used to perform evaluation of model.
# Augmentations and Transformations
AUGMENTATIONS = []
TRANSFORMATIONS = []
# Settings for brightness and blur, use 'MULTIPLY' and/or 'BLUR' in
# AUGMENTATIONS
AUG_MULTIPLY_RANGE = (0.5, 3.0)
AUG_BLUR_RANGE = (0.0, 3.0)
# Region of interest cropping, requires 'CROP' in TRANSFORMATIONS to be set
# If these crops values are too large, they will cause the stride values to
# become negative and the model with not be valid.
ROI_CROP_TOP = 45 # the number of rows of pixels to ignore on the top of the image
ROI_CROP_BOTTOM = 0 # the number of rows of pixels to ignore on the bottom of the image
ROI_CROP_RIGHT = 0 # the number of rows of pixels to ignore on the right of the image
ROI_CROP_LEFT = 0 # the number of rows of pixels to ignore on the left of the image
# For trapezoidal see explanation in augmentations.py. Requires 'TRAPEZE' in
# TRANSFORMATIONS to be set
ROI_TRAPEZE_LL = 0
ROI_TRAPEZE_LR = 160
ROI_TRAPEZE_UL = 20
ROI_TRAPEZE_UR = 140
ROI_TRAPEZE_MIN_Y = 60
ROI_TRAPEZE_MAX_Y = 120
#Model transfer options
#When copying weights during a model transfer operation, should we freeze a certain number of layers
#to the incoming weights and not allow them to change during training?
FREEZE_LAYERS = False #default False will allow all layers to be modified by training
NUM_LAST_LAYERS_TO_TRAIN = 7 #when freezing layers, how many layers from the last should be allowed to train?
#WEB CONTROL
WEB_CONTROL_PORT = int(os.getenv("WEB_CONTROL_PORT", 8887)) # which port to listen on when making a web controller
WEB_INIT_MODE = "user" # which control mode to start in. one of user|local_angle|local. Setting local will start in ai mode.
#JOYSTICK
USE_JOYSTICK_AS_DEFAULT = False #when starting the manage.py, when True, will not require a --js option to use the joystick
JOYSTICK_MAX_THROTTLE = 0.5 #this scalar is multiplied with the -1 to 1 throttle value to limit the maximum throttle. This can help if you drop the controller or just don't need the full speed available.
JOYSTICK_STEERING_SCALE = 1.0 #some people want a steering that is less sensitve. This scalar is multiplied with the steering -1 to 1. It can be negative to reverse dir.
AUTO_RECORD_ON_THROTTLE = True #if true, we will record whenever throttle is not zero. if false, you must manually toggle recording with some other trigger. Usually circle button on joystick.
CONTROLLER_TYPE = 'xbox' #(ps3|ps4|xbox|pigpio_rc|nimbus|wiiu|F710|rc3|MM1|custom) custom will run the my_joystick.py controller written by the `donkey createjs` command
USE_NETWORKED_JS = False #should we listen for remote joystick control over the network?
NETWORK_JS_SERVER_IP = None #when listening for network joystick control, which ip is serving this information
JOYSTICK_DEADZONE = 0.01 # when non zero, this is the smallest throttle before recording triggered.
JOYSTICK_THROTTLE_DIR = -1.0 # use -1.0 to flip forward/backward, use 1.0 to use joystick's natural forward/backward
USE_FPV = False # send camera data to FPV webserver
JOYSTICK_DEVICE_FILE = "/dev/input/js0" # this is the unix file use to access the joystick.
#For the categorical model, this limits the upper bound of the learned throttle
#it's very IMPORTANT that this value is matched from the training PC config.py and the robot.py
#and ideally wouldn't change once set.
MODEL_CATEGORICAL_MAX_THROTTLE_RANGE = 0.8
#RNN or 3D
SEQUENCE_LENGTH = 3 #some models use a number of images over time. This controls how many.
#IMU
HAVE_IMU = False #when true, this add a Mpu6050 part and records the data. Can be used with a
IMU_SENSOR = 'mpu6050' # (mpu6050|mpu9250)
IMU_DLP_CONFIG = 0 # Digital Lowpass Filter setting (0:250Hz, 1:184Hz, 2:92Hz, 3:41Hz, 4:20Hz, 5:10Hz, 6:5Hz)
#SOMBRERO
HAVE_SOMBRERO = False #set to true when using the sombrero hat from the Donkeycar store. This will enable pwm on the hat.
#PIGPIO RC control
STEERING_RC_GPIO = 26
THROTTLE_RC_GPIO = 20
DATA_WIPER_RC_GPIO = 19
PIGPIO_STEERING_MID = 1500 # Adjust this value if your car cannot run in a straight line
PIGPIO_MAX_FORWARD = 2000 # Max throttle to go fowrward. The bigger the faster
PIGPIO_STOPPED_PWM = 1500
PIGPIO_MAX_REVERSE = 1000 # Max throttle to go reverse. The smaller the faster
PIGPIO_SHOW_STEERING_VALUE = False
PIGPIO_INVERT = False
PIGPIO_JITTER = 0.025 # threshold below which no signal is reported
#ROBOHAT MM1
MM1_STEERING_MID = 1500 # Adjust this value if your car cannot run in a straight line
MM1_MAX_FORWARD = 2000 # Max throttle to go fowrward. The bigger the faster
MM1_STOPPED_PWM = 1500
MM1_MAX_REVERSE = 1000 # Max throttle to go reverse. The smaller the faster
MM1_SHOW_STEERING_VALUE = False
# Serial port
# -- Default Pi: '/dev/ttyS0'
# -- Jetson Nano: '/dev/ttyTHS1'
# -- Google coral: '/dev/ttymxc0'
# -- Windows: 'COM3', Arduino: '/dev/ttyACM0'
# -- MacOS/Linux:please use 'ls /dev/tty.*' to find the correct serial port for mm1
# eg.'/dev/tty.usbmodemXXXXXX' and replace the port accordingly
MM1_SERIAL_PORT = '/dev/ttyS0' # Serial Port for reading and sending MM1 data.
#LOGGING
HAVE_CONSOLE_LOGGING = True
LOGGING_LEVEL = 'INFO' # (Python logging level) 'NOTSET' / 'DEBUG' / 'INFO' / 'WARNING' / 'ERROR' / 'FATAL' / 'CRITICAL'
LOGGING_FORMAT = '%(message)s' # (Python logging format - https://docs.python.org/3/library/logging.html#formatter-objects
#TELEMETRY
HAVE_MQTT_TELEMETRY = False
TELEMETRY_DONKEY_NAME = 'my_robot1234'
TELEMETRY_MQTT_TOPIC_TEMPLATE = 'donkey/%s/telemetry'
TELEMETRY_MQTT_JSON_ENABLE = False
TELEMETRY_MQTT_BROKER_HOST = 'broker.hivemq.com'
TELEMETRY_MQTT_BROKER_PORT = 1883
TELEMETRY_PUBLISH_PERIOD = 1
TELEMETRY_LOGGING_ENABLE = True
TELEMETRY_LOGGING_LEVEL = 'INFO' # (Python logging level) 'NOTSET' / 'DEBUG' / 'INFO' / 'WARNING' / 'ERROR' / 'FATAL' / 'CRITICAL'
TELEMETRY_LOGGING_FORMAT = '%(message)s' # (Python logging format - https://docs.python.org/3/library/logging.html#formatter-objects
TELEMETRY_DEFAULT_INPUTS = 'pilot/angle,pilot/throttle,recording'
TELEMETRY_DEFAULT_TYPES = 'float,float'
# PERF MONITOR
HAVE_PERFMON = False
#RECORD OPTIONS
RECORD_DURING_AI = False #normally we do not record during ai mode. Set this to true to get image and steering records for your Ai. Be careful not to use them to train.
AUTO_CREATE_NEW_TUB = False #create a new tub (tub_YY_MM_DD) directory when recording or append records to data directory directly
#LED
HAVE_RGB_LED = False #do you have an RGB LED like https://www.amazon.com/dp/B07BNRZWNF
LED_INVERT = False #COMMON ANODE? Some RGB LED use common anode. like https://www.amazon.com/Xia-Fly-Tri-Color-Emitting-Diffused/dp/B07MYJQP8B
#LED board pin number for pwm outputs
#These are physical pinouts. See: https://www.raspberrypi-spy.co.uk/2012/06/simple-guide-to-the-rpi-gpio-header-and-pins/
LED_PIN_R = 12
LED_PIN_G = 10
LED_PIN_B = 16
#LED status color, 0-100
LED_R = 0
LED_G = 0
LED_B = 1
#LED Color for record count indicator
REC_COUNT_ALERT = 1000 #how many records before blinking alert
REC_COUNT_ALERT_CYC = 15 #how many cycles of 1/20 of a second to blink per REC_COUNT_ALERT records
REC_COUNT_ALERT_BLINK_RATE = 0.4 #how fast to blink the led in seconds on/off
#first number is record count, second tuple is color ( r, g, b) (0-100)
#when record count exceeds that number, the color will be used
RECORD_ALERT_COLOR_ARR = [ (0, (1, 1, 1)),
(3000, (5, 5, 5)),
(5000, (5, 2, 0)),
(10000, (0, 5, 0)),
(15000, (0, 5, 5)),
(20000, (0, 0, 5)), ]
#LED status color, 0-100, for model reloaded alert
MODEL_RELOADED_LED_R = 100
MODEL_RELOADED_LED_G = 0
MODEL_RELOADED_LED_B = 0
#BEHAVIORS
#When training the Behavioral Neural Network model, make a list of the behaviors,
#Set the TRAIN_BEHAVIORS = True, and use the BEHAVIOR_LED_COLORS to give each behavior a color
TRAIN_BEHAVIORS = False
BEHAVIOR_LIST = ['Left_Lane', "Right_Lane"]
BEHAVIOR_LED_COLORS = [(0, 10, 0), (10, 0, 0)] #RGB tuples 0-100 per chanel
#Localizer
#The localizer is a neural network that can learn to predict its location on the track.
#This is an experimental feature that needs more developement. But it can currently be used
#to predict the segement of the course, where the course is divided into NUM_LOCATIONS segments.
TRAIN_LOCALIZER = False
NUM_LOCATIONS = 10
BUTTON_PRESS_NEW_TUB = False #when enabled, makes it easier to divide our data into one tub per track length if we make a new tub on each X button press.
#DonkeyGym
#Only on Ubuntu linux, you can use the simulator as a virtual donkey and
#issue the same python manage.py drive command as usual, but have them control a virtual car.
#This enables that, and sets the path to the simualator and the environment.
#You will want to download the simulator binary from: https://github.com/tawnkramer/donkey_gym/releases/download/v18.9/DonkeySimLinux.zip
#then extract that and modify DONKEY_SIM_PATH.
DONKEY_GYM = False
DONKEY_SIM_PATH = "path to sim" #"/home/tkramer/projects/sdsandbox/sdsim/build/DonkeySimLinux/donkey_sim.x86_64" when racing on virtual-race-league use "remote", or user "remote" when you want to start the sim manually first.
DONKEY_GYM_ENV_NAME = "donkey-generated-track-v0" # ("donkey-generated-track-v0"|"donkey-generated-roads-v0"|"donkey-warehouse-v0"|"donkey-avc-sparkfun-v0")
GYM_CONF = { "body_style" : "donkey", "body_rgb" : (128, 128, 128), "car_name" : "car", "font_size" : 100} # body style(donkey|bare|car01) body rgb 0-255
GYM_CONF["racer_name"] = "Your Name"
GYM_CONF["country"] = "Place"
GYM_CONF["bio"] = "I race robots."
SIM_HOST = "127.0.0.1" # when racing on virtual-race-league use host "trainmydonkey.com"
SIM_ARTIFICIAL_LATENCY = 0 # this is the millisecond latency in controls. Can use useful in emulating the delay when useing a remote server. values of 100 to 400 probably reasonable.
# Save info from Simulator (pln)
SIM_RECORD_LOCATION = False
SIM_RECORD_GYROACCEL= False
SIM_RECORD_VELOCITY = False
SIM_RECORD_LIDAR = False
#publish camera over network
#This is used to create a tcp service to publish the camera feed
PUB_CAMERA_IMAGES = False
#When racing, to give the ai a boost, configure these values.
AI_LAUNCH_DURATION = 0.0 # the ai will output throttle for this many seconds
AI_LAUNCH_THROTTLE = 0.0 # the ai will output this throttle value
AI_LAUNCH_ENABLE_BUTTON = 'R2' # this keypress will enable this boost. It must be enabled before each use to prevent accidental trigger.
AI_LAUNCH_KEEP_ENABLED = False # when False ( default) you will need to hit the AI_LAUNCH_ENABLE_BUTTON for each use. This is safest. When this True, is active on each trip into "local" ai mode.
#Scale the output of the throttle of the ai pilot for all model types.
AI_THROTTLE_MULT = 1.0 # this multiplier will scale every throttle value for all output from NN models
#Path following
PATH_FILENAME = "donkey_path.pkl" # the path will be saved to this filename
PATH_SCALE = 5.0 # the path display will be scaled by this factor in the web page
PATH_OFFSET = (0, 0) # 255, 255 is the center of the map. This offset controls where the origin is displayed.
PATH_MIN_DIST = 0.3 # after travelling this distance (m), save a path point
PID_P = -10.0 # proportional mult for PID path follower
PID_I = 0.000 # integral mult for PID path follower
PID_D = -0.2 # differential mult for PID path follower
PID_THROTTLE = 0.2 # constant throttle value during path following
SAVE_PATH_BTN = "cross" # joystick button to save path
RESET_ORIGIN_BTN = "triangle" # joystick button to press to move car back to origin
# Intel Realsense D435 and D435i depth sensing camera
REALSENSE_D435_RGB = True # True to capture RGB image
REALSENSE_D435_DEPTH = True # True to capture depth as image array
REALSENSE_D435_IMU = False # True to capture IMU data (D435i only)
REALSENSE_D435_ID = None # serial number of camera or None if you only have one camera (it will autodetect)
# Stop Sign Detector
STOP_SIGN_DETECTOR = False
STOP_SIGN_MIN_SCORE = 0.2
STOP_SIGN_SHOW_BOUNDING_BOX = True
STOP_SIGN_MAX_REVERSE_COUNT = 10 # How many times should the car reverse when detected a stop sign, set to 0 to disable reversing
STOP_SIGN_REVERSE_THROTTLE = -0.5 # Throttle during reversing when detected a stop sign
# FPS counter
SHOW_FPS = False
FPS_DEBUG_INTERVAL = 10 # the interval in seconds for printing the frequency info into the shell
| 53.666667 | 225 | 0.739065 |
b1175fc512b6ccf49a3b8adcb05976ad5624ce7b | 17,402 | py | Python | test/orm/test_default_strategies.py | mjpieters/sqlalchemy | a8efeb6c052330b7b8d44960132d638b08d42d18 | [
"MIT"
] | null | null | null | test/orm/test_default_strategies.py | mjpieters/sqlalchemy | a8efeb6c052330b7b8d44960132d638b08d42d18 | [
"MIT"
] | null | null | null | test/orm/test_default_strategies.py | mjpieters/sqlalchemy | a8efeb6c052330b7b8d44960132d638b08d42d18 | [
"MIT"
] | null | null | null | from test.orm import _fixtures
from sqlalchemy import testing
from sqlalchemy.orm import mapper, relationship, create_session
from sqlalchemy import util
import sqlalchemy as sa
from sqlalchemy.testing import eq_, assert_raises_message
class DefaultStrategyOptionsTest(_fixtures.FixtureTest):
def _assert_fully_loaded(self, users):
# verify everything loaded, with no additional sql needed
def go():
# comparison with no additional sql
eq_(users, self.static.user_all_result)
# keywords are not part of self.static.user_all_result, so
# verify all the item keywords were loaded, with no more sql.
# 'any' verifies at least some items have keywords; we build
# a list for any([...]) instead of any(...) to prove we've
# iterated all the items with no sql.
f = util.flatten_iterator
assert any([i.keywords for i in
f([o.items for o in f([u.orders for u in users])])])
self.assert_sql_count(testing.db, go, 0)
def _assert_addresses_loaded(self, users):
# verify all the addresses were joined loaded with no more sql
def go():
for u, static in zip(users, self.static.user_all_result):
eq_(u.addresses, static.addresses)
self.assert_sql_count(testing.db, go, 0)
def _downgrade_fixture(self):
users, Keyword, items, order_items, orders, Item, User, \
Address, keywords, item_keywords, Order, addresses = \
self.tables.users, self.classes.Keyword, self.tables.items, \
self.tables.order_items, self.tables.orders, \
self.classes.Item, self.classes.User, self.classes.Address, \
self.tables.keywords, self.tables.item_keywords, \
self.classes.Order, self.tables.addresses
mapper(Address, addresses)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
lazy='subquery',
order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy='subquery',
order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined',
order_by=addresses.c.id),
orders=relationship(Order, lazy='joined',
order_by=orders.c.id)))
return create_session()
def _upgrade_fixture(self):
users, Keyword, items, order_items, orders, Item, User, \
Address, keywords, item_keywords, Order, addresses = \
self.tables.users, self.classes.Keyword, self.tables.items, \
self.tables.order_items, self.tables.orders, \
self.classes.Item, self.classes.User, self.classes.Address, \
self.tables.keywords, self.tables.item_keywords, \
self.classes.Order, self.tables.addresses
mapper(Address, addresses)
mapper(Keyword, keywords)
mapper(Item, items, properties=dict(
keywords=relationship(Keyword, secondary=item_keywords,
lazy='select',
order_by=item_keywords.c.keyword_id)))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy=True,
order_by=order_items.c.item_id)))
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy=True,
order_by=addresses.c.id),
orders=relationship(Order,
order_by=orders.c.id)))
return create_session()
def test_downgrade_baseline(self):
"""Mapper strategy defaults load as expected
(compare to rest of DefaultStrategyOptionsTest downgrade tests)."""
sess = self._downgrade_fixture()
users = []
# test _downgrade_fixture mapper defaults, 3 queries (2 subquery loads).
def go():
users[:] = sess.query(self.classes.User)\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 3)
# all loaded with no additional sql
self._assert_fully_loaded(users)
def test_disable_eagerloads(self):
"""Mapper eager load strategy defaults can be shut off
with enable_eagerloads(False)."""
# While this isn't testing a mapper option, it is included
# as baseline reference for how XYZload('*') option
# should work, namely, it shouldn't affect later queries
# (see other test_select_s)
sess = self._downgrade_fixture()
users = []
# demonstrate that enable_eagerloads loads with only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.enable_eagerloads(False)\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# demonstrate that users[0].orders must now be loaded with 3 sql
# (need to lazyload, and 2 subquery: 3 total)
def go():
users[0].orders
self.assert_sql_count(testing.db, go, 3)
def test_last_one_wins(self):
sess = self._downgrade_fixture()
users = []
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.options(sa.orm.lazyload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
def test_star_must_be_alone(self):
sess = self._downgrade_fixture()
User = self.classes.User
opt = sa.orm.subqueryload('*', User.addresses)
assert_raises_message(
sa.exc.ArgumentError,
"Wildcard token cannot be followed by another entity",
sess.query(User).options, opt
)
def test_select_with_joinedload(self):
"""Mapper load strategy defaults can be downgraded with
lazyload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# lazyload('*') shuts off 'orders' subquery: only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
# users[0] has orders, which need to lazy load, and 2 subquery:
# (same as with test_disable_eagerloads): 3 total sql
def go():
users[0].orders
self.assert_sql_count(testing.db, go, 3)
def test_select_with_subqueryload(self):
"""Mapper load strategy defaults can be downgraded with
lazyload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# now test 'default_strategy' option combined with 'subquery'
# shuts off 'addresses' load AND orders.items load: 2 sql expected
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('*'))\
.options(sa.orm.subqueryload(self.classes.User.orders))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
# Verify orders have already been loaded: 0 sql
def go():
for u, static in zip(users, self.static.user_all_result):
assert len(u.orders) == len(static.orders)
self.assert_sql_count(testing.db, go, 0)
# Verify lazyload('*') prevented orders.items load
# users[0].orders[0] has 3 items, each with keywords: 2 sql
# ('items' and 'items.keywords' subquery)
def go():
for i in users[0].orders[0].items:
i.keywords
self.assert_sql_count(testing.db, go, 2)
# lastly, make sure they actually loaded properly
eq_(users, self.static.user_all_result)
def test_noload_with_joinedload(self):
"""Mapper load strategy defaults can be downgraded with
noload('*') option, while explicit joinedload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# test noload('*') shuts off 'orders' subquery, only 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.noload('*'))\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify all the addresses were joined loaded (no more sql)
self._assert_addresses_loaded(users)
# User.orders should have loaded "noload" (meaning [])
def go():
for u in users:
assert u.orders == []
self.assert_sql_count(testing.db, go, 0)
def test_noload_with_subqueryload(self):
"""Mapper load strategy defaults can be downgraded with
noload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._downgrade_fixture()
users = []
# test noload('*') option combined with subqueryload()
# shuts off 'addresses' load AND orders.items load: 2 sql expected
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.noload('*'))\
.options(sa.orm.subqueryload(self.classes.User.orders))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
def go():
# Verify orders have already been loaded: 0 sql
for u, static in zip(users, self.static.user_all_result):
assert len(u.orders) == len(static.orders)
# Verify noload('*') prevented orders.items load
# and set 'items' to []
for u in users:
for o in u.orders:
assert o.items == []
self.assert_sql_count(testing.db, go, 0)
def test_joined(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option."""
sess = self._upgrade_fixture()
users = []
# test upgrade all to joined: 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_joined_path_wildcards(self):
sess = self._upgrade_fixture()
users = []
# test upgrade all to joined: 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload('.*'))\
.options(sa.orm.joinedload("addresses.*"))\
.options(sa.orm.joinedload("orders.*"))\
.options(sa.orm.joinedload("orders.items.*"))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
self._assert_fully_loaded(users)
def test_joined_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test joined all but 'keywords': upgraded to 1 sql
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('orders.items.keywords'))\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 1)
# everything (but keywords) loaded ok
# (note self.static.user_all_result contains no keywords)
def go():
eq_(users, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 0)
# verify the items were loaded, while item.keywords were not
def go():
# redundant with last test, but illustrative
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
def test_joined_with_subqueryload(self):
"""Mapper load strategy defaults can be upgraded with
joinedload('*') option, while explicit subqueryload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test upgrade all but 'addresses', which is subquery loaded (2 sql)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload(self.classes.User.addresses))\
.options(sa.orm.joinedload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 2)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option."""
sess = self._upgrade_fixture()
users = []
# test upgrade all to subquery: 1 sql + 4 relationships = 5
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 5)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery_path_wildcards(self):
sess = self._upgrade_fixture()
users = []
# test upgrade all to subquery: 1 sql + 4 relationships = 5
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.subqueryload('.*'))\
.options(sa.orm.subqueryload('addresses.*'))\
.options(sa.orm.subqueryload('orders.*'))\
.options(sa.orm.subqueryload('orders.items.*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 5)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
def test_subquery_with_lazyload(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while explicit lazyload() option
is still honored"""
sess = self._upgrade_fixture()
users = []
# test subquery all but 'keywords' (1 sql + 3 relationships = 4)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.lazyload('orders.items.keywords'))\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 4)
# no more sql
# (note self.static.user_all_result contains no keywords)
def go():
eq_(users, self.static.user_all_result)
self.assert_sql_count(testing.db, go, 0)
# verify the item.keywords were not loaded
def go():
users[0].orders[0].items[0]
self.assert_sql_count(testing.db, go, 0)
def go():
users[0].orders[0].items[0].keywords
self.assert_sql_count(testing.db, go, 1)
def test_subquery_with_joinedload(self):
"""Mapper load strategy defaults can be upgraded with
subqueryload('*') option, while multiple explicit
joinedload() options are still honored"""
sess = self._upgrade_fixture()
users = []
# test upgrade all but 'addresses' & 'orders', which are joinedloaded
# (1 sql + items + keywords = 3)
def go():
users[:] = sess.query(self.classes.User)\
.options(sa.orm.joinedload(self.classes.User.addresses))\
.options(sa.orm.joinedload(self.classes.User.orders))\
.options(sa.orm.subqueryload('*'))\
.order_by(self.classes.User.id)\
.all()
self.assert_sql_count(testing.db, go, 3)
# verify everything loaded, with no additional sql needed
self._assert_fully_loaded(users)
| 39.55 | 80 | 0.586254 |
bd73f16da329dbfaf01aaea63c78181cd01e48f6 | 87 | py | Python | __init__.py | ufownl/fake_chs_lp | 3a5c609549befaa607f1a101dae3ab948a71844f | [
"BSD-3-Clause"
] | 80 | 2019-12-01T09:55:40.000Z | 2022-03-16T09:33:25.000Z | __init__.py | ufownl/fake_chs_lp | 3a5c609549befaa607f1a101dae3ab948a71844f | [
"BSD-3-Clause"
] | 2 | 2020-01-14T03:06:01.000Z | 2020-09-10T02:13:10.000Z | __init__.py | ufownl/fake_chs_lp | 3a5c609549befaa607f1a101dae3ab948a71844f | [
"BSD-3-Clause"
] | 33 | 2020-01-14T03:31:57.000Z | 2022-03-30T07:57:42.000Z | __all__ = ["blue_plate", "black_plate", "yellow_plate", "green_plate", "random_plate"]
| 43.5 | 86 | 0.724138 |
61da3840a948a00fb7f4ad353d92caf024929a40 | 1,708 | py | Python | djangolg/methods/bgp_community.py | wolcomm/djangolg | 724dbb631da3c61d42f62024f9d7826423624191 | [
"Apache-2.0"
] | 6 | 2016-10-24T09:15:47.000Z | 2021-01-20T11:09:39.000Z | djangolg/methods/bgp_community.py | benmaddison/djangolg | 724dbb631da3c61d42f62024f9d7826423624191 | [
"Apache-2.0"
] | null | null | null | djangolg/methods/bgp_community.py | benmaddison/djangolg | 724dbb631da3c61d42f62024f9d7826423624191 | [
"Apache-2.0"
] | 1 | 2022-03-20T03:25:07.000Z | 2022-03-20T03:25:07.000Z | # Copyright 2017 Workonline Communications (Pty) Ltd. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""BGP Community method class for djangolg."""
from __future__ import print_function
from __future__ import unicode_literals
from django import forms
from djangolg.methods.base import BaseMethod
class BGPCommunityMethod(BaseMethod):
"""Look up BGP RIB entries by AS_PATH."""
name = "bgp_community"
title = "BGP Community"
new = True
description = """
Look up BGP RIB entries with a COMMUNITIES attribute that contains the
target value. The Community Value field accepts a pair of colon-seperated
16-bit decimal numbers. Some platforms may also support named "well known"
community values such as 'no-export', etc. Options are provided to select
either the IPv4 or the IPv6 address family.
"""
target_field = forms.CharField(
widget=forms.TextInput({
'class': 'form-control',
'placeholder': "Community Value",
'data-toggle': 'tooltip',
'title': "RFC1997 Community Value"
}),
)
options = ["IPv4", "IPv6"]
test_target = "65000:65000"
| 36.340426 | 79 | 0.711944 |
329d06b45e0dbcf38f05339a37779ecda3935d01 | 4,139 | py | Python | python3_cron_scripts/libs3/AzureConnector.py | bhumikaSinghal/Marinus | d64cf6217e422bb0be8b19f50b9a63e01d7b0783 | [
"Apache-2.0"
] | null | null | null | python3_cron_scripts/libs3/AzureConnector.py | bhumikaSinghal/Marinus | d64cf6217e422bb0be8b19f50b9a63e01d7b0783 | [
"Apache-2.0"
] | null | null | null | python3_cron_scripts/libs3/AzureConnector.py | bhumikaSinghal/Marinus | d64cf6217e422bb0be8b19f50b9a63e01d7b0783 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
# Copyright 2018 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""
This module manages interactions with Azure and supports filed based and RBAC based authentication.
The authentication methods are described here:
https://docs.microsoft.com/en-us/python/azure/python-sdk-azure-authenticate?view=azure-python
"""
import configparser
import json
import logging
import requests
from azure.mgmt.dns import DnsManagementClient
from azure.mgmt.resource import ResourceManagementClient
from libs3.ConnectorUtil import ConnectorUtil
class AzureConnector(object):
"""
This class is designed for interacting with the Azure APIs
"""
azure_config_file = "connector.config"
TENANT_ID = None
SUBSCRIPTION_ID = None
KEY = None
CLIENT_ID = None
FILE_PATH = None
_logger = None
def _log(self):
"""
Get the log
"""
return logging.getLogger(__name__)
def _init_azure(self, config):
self.TENANT_ID = ConnectorUtil.get_config_setting(
self._logger, config, "Azure", "az.tenant_id"
)
self.CLIENT_ID = ConnectorUtil.get_config_setting(
self._logger, config, "Azure", "az.client_id"
)
self.KEY = ConnectorUtil.get_config_setting(
self._logger, config, "Azure", "az.sp_password"
)
self.SUBSCRIPTION_ID = ConnectorUtil.get_config_setting(
self._logger, config, "Azure", "az.subscription_id"
)
self.FILE_PATH = ConnectorUtil.get_config_setting(
self._logger, config, "Azure", "az.file_path"
)
def __init__(self, config_file="", log_level=None):
if config_file != "":
self.azure_config_file = config_file
self._logger = self._log()
if log_level is not None:
self._logger.setLevel(log_level)
config = configparser.ConfigParser()
list = config.read(self.azure_config_file)
if len(list) == 0:
self._logger.error("Error: Could not find the config file")
exit(1)
self._init_azure(config)
def get_dns_client(self):
"""
Get a connection to the Azure DNS service
"""
if self.FILE_PATH is not None and self.FILE_PATH != "":
from azure.common.client_factory import get_client_from_auth_file
return get_client_from_auth_file(DnsManagementClient)
elif self.KEY is not None and self.KEY != "":
from azure.common.credentials import ServicePrincipalCredentials
credentials = ServicePrincipalCredentials(
client_id=self.CLIENT_ID, secret=self.KEY, tenant=self.TENANT_ID
)
dns_client = DnsManagementClient(credentials, self.SUBSCRIPTION_ID)
return dns_client
def get_resources_client(self):
"""
Get a connection to the Azure Resource Mananger
"""
if self.FILE_PATH is not None and self.FILE_PATH != "":
from azure.common.client_factory import get_client_from_auth_file
return get_client_from_auth_file(ResourceManagementClient)
elif self.KEY is not None and self.KEY != "":
from azure.common.credentials import ServicePrincipalCredentials
credentials = ServicePrincipalCredentials(
client_id=self.CLIENT_ID, secret=self.KEY, tenant=self.TENANT_ID
)
resources_client = ResourceManagementClient(
credentials, self.SUBSCRIPTION_ID
)
return resources_client
| 33.650407 | 99 | 0.670935 |
3055618b7d3d1791e48de76c2932bf7362bf1eb1 | 5,544 | py | Python | xknx/devices/travelcalculator.py | cyberjunky/xknx | c708ed6a2ca6449b74c6cea197d658e3399b99d1 | [
"MIT"
] | 1 | 2020-12-09T16:17:49.000Z | 2020-12-09T16:17:49.000Z | xknx/devices/travelcalculator.py | cyberjunky/xknx | c708ed6a2ca6449b74c6cea197d658e3399b99d1 | [
"MIT"
] | null | null | null | xknx/devices/travelcalculator.py | cyberjunky/xknx | c708ed6a2ca6449b74c6cea197d658e3399b99d1 | [
"MIT"
] | null | null | null | """
Module TravelCalculator provides functionality for predicting the current position of a Cover.
E.g.:
* Given a Cover that takes 100 seconds to travel from top to bottom.
* Starting from position 90, directed to position 60 at time 0.
* At time 10 TravelCalculator will return position 80 (final position not reached).
* At time 20 TravelCalculator will return position 70 (final position not reached).
* At time 30 TravelCalculator will return position 60 (final position reached).
"""
import time
from enum import Enum
class PositionType(Enum):
"""Enum class for different type of calculated positions."""
UNKNOWN = 1
CALCULATED = 2
CONFIRMED = 3
class TravelStatus(Enum):
"""Enum class for travel status."""
DIRECTION_UP = 1
DIRECTION_DOWN = 2
STOPPED = 3
class TravelCalculator:
"""Class for calculating the current position of a cover."""
# pylint: disable=too-many-instance-attributes
def __init__(self, travel_time_down, travel_time_up):
"""Initialize TravelCalculator class."""
self.position_type = PositionType.UNKNOWN
self.last_known_position = 0
self.travel_time_down = travel_time_down
self.travel_time_up = travel_time_up
self.travel_to_position = 0
self.travel_started_time = 0
self.travel_direction = TravelStatus.STOPPED
# 0 is closed, 100 is fully open
self.position_closed = 0
self.position_open = 100
self.time_set_from_outside = None
def set_position(self, position):
"""Set known position of cover."""
self.last_known_position = position
self.travel_to_position = position
self.position_type = PositionType.CONFIRMED
def stop(self):
"""Stop traveling."""
self.last_known_position = self.current_position()
self.travel_to_position = self.last_known_position
self.position_type = PositionType.CALCULATED
self.travel_direction = TravelStatus.STOPPED
def start_travel(self, travel_to_position):
"""Start traveling to position."""
self.stop()
self.travel_started_time = self.current_time()
self.travel_to_position = travel_to_position
self.position_type = PositionType.CALCULATED
self.travel_direction = \
TravelStatus.DIRECTION_UP \
if travel_to_position > self.last_known_position else \
TravelStatus.DIRECTION_DOWN
def start_travel_up(self):
"""Start traveling up."""
self.start_travel(self.position_open)
def start_travel_down(self):
"""Start traveling down."""
self.start_travel(self.position_closed)
def current_position(self):
"""Return current (calculated or known) position."""
if self.position_type == PositionType.CALCULATED:
return self._calculate_position()
return self.last_known_position
def is_traveling(self):
"""Return if cover is traveling."""
return self.current_position() != self.travel_to_position
def position_reached(self):
"""Return if cover has reached designated position."""
return self.current_position() == self.travel_to_position
def is_open(self):
"""Return if cover is (fully) open."""
return self.current_position() == self.position_open
def is_closed(self):
"""Return if cover is (fully) closed."""
return self.current_position() == self.position_closed
def _calculate_position(self):
"""Return calculated position."""
relative_position = self.travel_to_position - self.last_known_position
def position_reached_or_exceeded(relative_position):
"""Return if designated position was reached."""
if relative_position >= 0 \
and self.travel_direction == TravelStatus.DIRECTION_DOWN:
return True
if relative_position <= 0 \
and self.travel_direction == TravelStatus.DIRECTION_UP:
return True
return False
if position_reached_or_exceeded(relative_position):
return self.travel_to_position
travel_time = self._calculate_travel_time(relative_position)
if self.current_time() > self.travel_started_time + travel_time:
return self.travel_to_position
progress = (self.current_time()-self.travel_started_time)/travel_time
position = self.last_known_position + relative_position * progress
return int(position)
def _calculate_travel_time(self, relative_position):
"""Calculate time to travel to relative position."""
travel_direction = \
TravelStatus.DIRECTION_UP \
if relative_position > 0 else \
TravelStatus.DIRECTION_DOWN
travel_time_full = \
self.travel_time_up \
if travel_direction == TravelStatus.DIRECTION_UP else \
self.travel_time_down
travel_range = self.position_open - self.position_closed
return travel_time_full * abs(relative_position) / travel_range
def current_time(self):
"""Get current time. May be modified from outside (for unit tests)."""
# time_set_from_outside is used within unit tests
if self.time_set_from_outside is not None:
return self.time_set_from_outside
return time.time()
def __eq__(self, other):
"""Equal operator."""
return self.__dict__ == other.__dict__
| 35.088608 | 94 | 0.67298 |
d56e4940e4c35629c49f569fc87b7c0ec32985ea | 3,897 | py | Python | scripts/getSilverData.py | zenetoshl/league-recommendations | 156c0c8641f15aed4faa4e230228ccb64708d57c | [
"MIT"
] | null | null | null | scripts/getSilverData.py | zenetoshl/league-recommendations | 156c0c8641f15aed4faa4e230228ccb64708d57c | [
"MIT"
] | null | null | null | scripts/getSilverData.py | zenetoshl/league-recommendations | 156c0c8641f15aed4faa4e230228ccb64708d57c | [
"MIT"
] | null | null | null | import requests
import time
import pandas as pd
# golbal variables
matchesInfo = []
try:
api_key = 'api_key=RGAPI-17fc5594-8fac-430c-bb62-729b4420bae8'
url = 'https://br1.api.riotgames.com/lol/'
challengerSummoners = f'{url}league/v4/entries/RANKED_SOLO_5x5/SILVER/IV?{api_key}'
response = requests.get(challengerSummoners)
summoners = response.json()
for summoner in summoners:
try:
summonerId = summoner['summonerId']
accountBySummoner = f'{url}summoner/v4/summoners/{summonerId}?{api_key}'
account = requests.get(accountBySummoner).json()
accountId = account['accountId']
beginIndex = 0
while beginIndex < 10:
print(accountId + " index: " + str(beginIndex) + " Size: " + str(len(matchesInfo)))
try:
matchlistByAccount = f'{url}match/v4/matchlists/by-account/{accountId}?beginIndex={beginIndex}&endIndex=40&{api_key}'
beginIndex += 100
responseMatches = requests.get(matchlistByAccount)
matchesJson = responseMatches.json()['matches']
for match in matchesJson:
matchId = match['gameId']
readed = False
while not readed:
try:
matchByMatchId = f'{url}match/v4/matches/{matchId}?{api_key}'
matchResponse = requests.get(matchByMatchId).json()
matchId = matchResponse['gameId']
identities = matchResponse['participantIdentities']
duration = matchResponse['gameDuration']
participants = matchResponse['participants']
cont = 0
for participant in participants:
stats = participant['stats']
stats['gameId'] = matchId
stats['gameDuration'] = duration
stats['lane'] = participant['timeline']['lane']
stats['participantId'] = participant['participantId']
stats['teamId'] = participant['teamId']
stats['championId'] = participant['championId']
stats['spell1Id'] = participant['spell1Id']
stats['spell2Id'] = participant['spell2Id']
stats['participantId'] = identities[cont]['player']['accountId']
matchesInfo.append(stats)
cont += 1
readed = True
except Exception as e:
print(e)
print("Reconectando")
readed = False
time.sleep(5)
except Exception:
print("Reconectando")
time.sleep(30)
df = pd.DataFrame(matchesInfo)
df.to_csv('silverParticipants.csv', index = False, header=True)
except Exception:
print("Reconectando")
time.sleep(30)
df = pd.DataFrame(matchesInfo)
df.to_csv('silverParticipants.csv', index = False, header=True)
except KeyboardInterrupt:
print('interrompendo')
df = pd.DataFrame(matchesInfo)
df.to_csv('silverParticipants.csv', index = False, header=True)
time.sleep(5)
exit(0)
df = pd.DataFrame(matchesInfo)
df.to_csv('silverParticipants.csv', index = False, header=True) | 51.276316 | 137 | 0.486528 |
de0ecdaa1837786076003b2b6d1329124e114ee0 | 1,103 | py | Python | codes/glue/churn-xgboost/src/glue_etl.py | aJoohongKim/amazon-sagemaker-built-in-algorithms-mlops-pipeline-using-aws-cdk | 9162b26467d545218af45424193e95a5d02ea70c | [
"MIT-0"
] | 3 | 2021-08-18T03:01:04.000Z | 2022-02-09T14:04:29.000Z | codes/glue/churn-xgboost/src/glue_etl.py | aJoohongKim/amazon-sagemaker-built-in-algorithms-mlops-pipeline-using-aws-cdk | 9162b26467d545218af45424193e95a5d02ea70c | [
"MIT-0"
] | null | null | null | codes/glue/churn-xgboost/src/glue_etl.py | aJoohongKim/amazon-sagemaker-built-in-algorithms-mlops-pipeline-using-aws-cdk | 9162b26467d545218af45424193e95a5d02ea70c | [
"MIT-0"
] | 4 | 2021-08-18T03:01:07.000Z | 2021-12-06T10:07:57.000Z | import sys
from awsglue.context import GlueContext
from awsglue.dynamicframe import DynamicFrame
from awsglue.job import Job
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
# Retrieve parameters for the Glue job.
args = getResolvedOptions(sys.argv, ["JOB_NAME", "S3_INPUT_FILE", "S3_TRAIN_KEY", "S3_VALIDATE_KEY"])
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
job.init(args["JOB_NAME"], args)
# Create a PySpark dataframe from the source table.
source_data_frame = spark.read.load(args["S3_INPUT_FILE"], format="csv", inferSchema=True, header=False)
# Split the dataframe in to training and validation dataframes.
train_data, val_data = source_data_frame.randomSplit([0.7, 0.3])
# Write both dataframes to the destination datastore.
train_path = args["S3_TRAIN_KEY"]
val_path = args["S3_VALIDATE_KEY"]
train_data.write.save(train_path, format="csv", mode="overwrite")
val_data.write.save(val_path, format="csv", mode="overwrite")
# Complete the job.
job.commit() | 33.424242 | 104 | 0.787851 |
d2c98285a46b3ce9bba714eceb63c6ed39fcc9cd | 180,536 | py | Python | ui_main.py | Brktrlw/ogrencitakip | 74937f6ddbf8ea9ab735139ef636366fa7d14b33 | [
"Apache-2.0"
] | 1 | 2021-05-09T19:06:04.000Z | 2021-05-09T19:06:04.000Z | ui_main.py | Brktrlw/ogrencitakip | 74937f6ddbf8ea9ab735139ef636366fa7d14b33 | [
"Apache-2.0"
] | null | null | null | ui_main.py | Brktrlw/ogrencitakip | 74937f6ddbf8ea9ab735139ef636366fa7d14b33 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from PySide2.QtCore import (QCoreApplication, QMetaObject, QObject, QPoint,
QRect, QSize, QUrl, Qt)
from PySide2.QtGui import (QBrush, QColor, QConicalGradient, QCursor, QFont,
QFontDatabase, QIcon, QLinearGradient, QPalette, QPainter, QPixmap,
QRadialGradient)
from PySide2.QtWidgets import *
import yardimciFonk
import files_rc
from matplotlib import pyplot as plt
import matplotlib.patches as mpatches
import random
from operator import add
import sqlite3
css_lineedit="QLineEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding-left: 10px;\n""}\n""QLineEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QLineEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}"
css_buton=""""QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}"""
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
if MainWindow.objectName():
MainWindow.setObjectName(u"MainWindow")
icon3 = QIcon()
icon3.addPixmap(QPixmap(":/16x16/icons/16x16/cil-save.png"), QIcon.Normal, QIcon.Off)
icon4 = QIcon()
icon4.addPixmap(QPixmap(":/16x16/icons/16x16/cil-chart-line.png"), QIcon.Normal, QIcon.Off)
icon6 = QIcon()
icon6.addPixmap(QPixmap(":/16x16/icons/16x16/cil-pencil.png"), QIcon.Normal, QIcon.Off)
MainWindow.resize(1173, 790)
MainWindow.setMinimumSize(QSize(1173, 790))
palette = QPalette()
brush = QBrush(QColor(255, 255, 255, 255))
brush.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.WindowText, brush)
brush1 = QBrush(QColor(0, 0, 0, 0))
brush1.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Button, brush1)
brush2 = QBrush(QColor(66, 73, 90, 255))
brush2.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Light, brush2)
brush3 = QBrush(QColor(55, 61, 75, 255))
brush3.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Midlight, brush3)
brush4 = QBrush(QColor(22, 24, 30, 255))
brush4.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Dark, brush4)
brush5 = QBrush(QColor(29, 32, 40, 255))
brush5.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Mid, brush5)
brush6 = QBrush(QColor(210, 210, 210, 255))
brush6.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Text, brush6)
palette.setBrush(QPalette.Active, QPalette.BrightText, brush)
palette.setBrush(QPalette.Active, QPalette.ButtonText, brush)
palette.setBrush(QPalette.Active, QPalette.Base, brush1)
palette.setBrush(QPalette.Active, QPalette.Window, brush1)
brush7 = QBrush(QColor(0, 0, 0, 255))
brush7.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Shadow, brush7)
brush8 = QBrush(QColor(85, 170, 255, 255))
brush8.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.Highlight, brush8)
palette.setBrush(QPalette.Active, QPalette.Link, brush8)
brush9 = QBrush(QColor(255, 0, 127, 255))
brush9.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.LinkVisited, brush9)
palette.setBrush(QPalette.Active, QPalette.AlternateBase, brush4)
brush10 = QBrush(QColor(44, 49, 60, 255))
brush10.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Active, QPalette.ToolTipBase, brush10)
palette.setBrush(QPalette.Active, QPalette.ToolTipText, brush6)
brush11 = QBrush(QColor(210, 210, 210, 128))
brush11.setStyle(Qt.NoBrush)
palette.setBrush(QPalette.Active, QPalette.PlaceholderText, brush11)
icon5 = QIcon()
icon5.addPixmap(QPixmap(":/16x16/icons/16x16/cil-file.png"), QIcon.Normal, QIcon.Off)
palette.setBrush(QPalette.Inactive, QPalette.WindowText, brush)
palette.setBrush(QPalette.Inactive, QPalette.Button, brush1)
palette.setBrush(QPalette.Inactive, QPalette.Light, brush2)
palette.setBrush(QPalette.Inactive, QPalette.Midlight, brush3)
palette.setBrush(QPalette.Inactive, QPalette.Dark, brush4)
palette.setBrush(QPalette.Inactive, QPalette.Mid, brush5)
palette.setBrush(QPalette.Inactive, QPalette.Text, brush6)
palette.setBrush(QPalette.Inactive, QPalette.BrightText, brush)
palette.setBrush(QPalette.Inactive, QPalette.ButtonText, brush)
palette.setBrush(QPalette.Inactive, QPalette.Base, brush1)
palette.setBrush(QPalette.Inactive, QPalette.Window, brush1)
palette.setBrush(QPalette.Inactive, QPalette.Shadow, brush7)
palette.setBrush(QPalette.Inactive, QPalette.Highlight, brush8)
palette.setBrush(QPalette.Inactive, QPalette.Link, brush8)
palette.setBrush(QPalette.Inactive, QPalette.LinkVisited, brush9)
palette.setBrush(QPalette.Inactive, QPalette.AlternateBase, brush4)
palette.setBrush(QPalette.Inactive, QPalette.ToolTipBase, brush10)
palette.setBrush(QPalette.Inactive, QPalette.ToolTipText, brush6)
brush12 = QBrush(QColor(210, 210, 210, 128))
brush12.setStyle(Qt.NoBrush)
palette.setBrush(QPalette.Inactive, QPalette.PlaceholderText, brush12)
palette.setBrush(QPalette.Disabled, QPalette.WindowText, brush4)
palette.setBrush(QPalette.Disabled, QPalette.Button, brush1)
palette.setBrush(QPalette.Disabled, QPalette.Light, brush2)
palette.setBrush(QPalette.Disabled, QPalette.Midlight, brush3)
palette.setBrush(QPalette.Disabled, QPalette.Dark, brush4)
palette.setBrush(QPalette.Disabled, QPalette.Mid, brush5)
palette.setBrush(QPalette.Disabled, QPalette.Text, brush4)
palette.setBrush(QPalette.Disabled, QPalette.BrightText, brush)
palette.setBrush(QPalette.Disabled, QPalette.ButtonText, brush4)
palette.setBrush(QPalette.Disabled, QPalette.Base, brush1)
palette.setBrush(QPalette.Disabled, QPalette.Window, brush1)
palette.setBrush(QPalette.Disabled, QPalette.Shadow, brush7)
brush13 = QBrush(QColor(51, 153, 255, 255))
brush13.setStyle(Qt.SolidPattern)
palette.setBrush(QPalette.Disabled, QPalette.Highlight, brush13)
palette.setBrush(QPalette.Disabled, QPalette.Link, brush8)
palette.setBrush(QPalette.Disabled, QPalette.LinkVisited, brush9)
palette.setBrush(QPalette.Disabled, QPalette.AlternateBase, brush10)
palette.setBrush(QPalette.Disabled, QPalette.ToolTipBase, brush10)
palette.setBrush(QPalette.Disabled, QPalette.ToolTipText, brush6)
brush14 = QBrush(QColor(210, 210, 210, 128))
brush14.setStyle(Qt.NoBrush)
palette.setBrush(QPalette.Disabled, QPalette.PlaceholderText, brush14)
MainWindow.setPalette(palette)
font = QFont()
font.setFamily(u"Segoe UI")
font.setPointSize(10)
MainWindow.setFont(font)
MainWindow.setStyleSheet(u"QMainWindow {background: transparent; }\n""QToolTip {\n"" color: #ffffff;\n"" background-color: rgba(27, 29, 35, 160);\n"" border: 1px solid rgb(40, 40, 40);\n"" border-radius: 2px;\n""}")
self.centralwidget = QWidget(MainWindow)
self.centralwidget.setObjectName(u"centralwidget")
self.centralwidget.setStyleSheet(u"background: transparent;\n""color: rgb(210, 210, 210);")
self.horizontalLayout = QHBoxLayout(self.centralwidget)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.horizontalLayout.setContentsMargins(10, 10, 10, 10)
self.frame_main = QFrame(self.centralwidget)
self.frame_main.setObjectName(u"frame_main")
self.frame_main.setStyleSheet(u"/* LINE EDIT */\n""QLineEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding-left: 10px;\n""}\n""QLineEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QLineEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}\n""\n""/* SCROLL BARS */\n""QScrollBar:horizontal {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" height: 14px;\n"" margin: 0px 21px 0 21px;\n"" border-radius: 0px;\n""}\n""QScrollBar::handle:horizontal {\n"" background: rgb(85, 170, 255);\n"" min-width: 25px;\n"" border-radius: 7px\n""}\n""QScrollBar::add-line:horizontal {\n"" border: none;\n"" background: rgb(55, 63, 77);\n"" width: 20px;\n"" border-top-right-radius: 7px;\n"" border-bottom-right-radius: 7px;\n"" subcontrol-position: right;\n"" subcontrol-origin: margin;\n""}\n""QScrollBar::sub-line:horizontal {\n"" border: none;\n"" background: rgb(55, 63, 77);\n"" width: 20px;\n""" " border-top-left-radius: 7px;\n"" border-bottom-left-radius: 7px;\n"" subcontrol-position: left;\n"" subcontrol-origin: margin;\n""}\n""QScrollBar::up-arrow:horizontal, QScrollBar::down-arrow:horizontal\n""{\n"" background: none;\n""}\n""QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal\n""{\n"" background: none;\n""}\n"" QScrollBar:vertical {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" width: 14px;\n"" margin: 21px 0 21px 0;\n"" border-radius: 0px;\n"" }\n"" QScrollBar::handle:vertical { \n"" background: rgb(85, 170, 255);\n"" min-height: 25px;\n"" border-radius: 7px\n"" }\n"" QScrollBar::add-line:vertical {\n"" border: none;\n"" background: rgb(55, 63, 77);\n"" height: 20px;\n"" border-bottom-left-radius: 7px;\n"" border-bottom-right-radius: 7px;\n"" subcontrol-position: bottom;\n"" subcontrol-origin: margin;\n"" }\n"" QScrollBar::sub-line:vertical {\n"" border: none;\n"" background: rgb(55, 63" ", 77);\n"" height: 20px;\n"" border-top-left-radius: 7px;\n"" border-top-right-radius: 7px;\n"" subcontrol-position: top;\n"" subcontrol-origin: margin;\n"" }\n"" QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical {\n"" background: none;\n"" }\n""\n"" QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {\n"" background: none;\n"" }\n""\n""/* CHECKBOX */\n""QCheckBox::indicator {\n"" border: 3px solid rgb(52, 59, 72);\n"" width: 15px;\n"" height: 15px;\n"" border-radius: 10px;\n"" background: rgb(44, 49, 60);\n""}\n""QCheckBox::indicator:hover {\n"" border: 3px solid rgb(58, 66, 81);\n""}\n""QCheckBox::indicator:checked {\n"" background: 3px solid rgb(52, 59, 72);\n"" border: 3px solid rgb(52, 59, 72); \n"" background-image: url(:/16x16/icons/16x16/cil-check-alt.png);\n""}\n""\n""/* RADIO BUTTON */\n""QRadioButton::indicator {\n"" border: 3px solid rgb(52, 59, 72);\n"" width: 15px;\n"" height: 15px;\n"" border-radius" ": 10px;\n"" background: rgb(44, 49, 60);\n""}\n""QRadioButton::indicator:hover {\n"" border: 3px solid rgb(58, 66, 81);\n""}\n""QRadioButton::indicator:checked {\n"" background: 3px solid rgb(94, 106, 130);\n"" border: 3px solid rgb(52, 59, 72); \n""}\n""\n""/* COMBOBOX */\n""QComboBox{\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding: 5px;\n"" padding-left: 10px;\n""}\n""QComboBox:hover{\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QComboBox::drop-down {\n"" subcontrol-origin: padding;\n"" subcontrol-position: top right;\n"" width: 25px; \n"" border-left-width: 3px;\n"" border-left-color: rgba(39, 44, 54, 150);\n"" border-left-style: solid;\n"" border-top-right-radius: 3px;\n"" border-bottom-right-radius: 3px; \n"" background-image: url(:/16x16/icons/16x16/cil-arrow-bottom.png);\n"" background-position: center;\n"" background-repeat: no-reperat;\n"" }\n""QComboBox QAbstractItemView {\n"" color: rgb(" "85, 170, 255); \n"" background-color: rgb(27, 29, 35);\n"" padding: 10px;\n"" selection-background-color: rgb(39, 44, 54);\n""}\n""\n""/* SLIDERS */\n""QSlider::groove:horizontal {\n"" border-radius: 9px;\n"" height: 18px;\n"" margin: 0px;\n"" background-color: rgb(52, 59, 72);\n""}\n""QSlider::groove:horizontal:hover {\n"" background-color: rgb(55, 62, 76);\n""}\n""QSlider::handle:horizontal {\n"" background-color: rgb(85, 170, 255);\n"" border: none;\n"" height: 18px;\n"" width: 18px;\n"" margin: 0px;\n"" border-radius: 9px;\n""}\n""QSlider::handle:horizontal:hover {\n"" background-color: rgb(105, 180, 255);\n""}\n""QSlider::handle:horizontal:pressed {\n"" background-color: rgb(65, 130, 195);\n""}\n""\n""QSlider::groove:vertical {\n"" border-radius: 9px;\n"" width: 18px;\n"" margin: 0px;\n"" background-color: rgb(52, 59, 72);\n""}\n""QSlider::groove:vertical:hover {\n"" background-color: rgb(55, 62, 76);\n""}\n""QSlider::handle:verti" "cal {\n"" background-color: rgb(85, 170, 255);\n"" border: none;\n"" height: 18px;\n"" width: 18px;\n"" margin: 0px;\n"" border-radius: 9px;\n""}\n""QSlider::handle:vertical:hover {\n"" background-color: rgb(105, 180, 255);\n""}\n""QSlider::handle:vertical:pressed {\n"" background-color: rgb(65, 130, 195);\n""}\n""\n""")
self.frame_main.setFrameShape(QFrame.NoFrame)
self.frame_main.setFrameShadow(QFrame.Raised)
self.verticalLayout = QVBoxLayout(self.frame_main)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName(u"verticalLayout")
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.frame_top = QFrame(self.frame_main)
self.frame_top.setObjectName(u"frame_top")
self.frame_top.setMinimumSize(QSize(0, 65))
self.frame_top.setMaximumSize(QSize(16777215, 65))
self.frame_top.setStyleSheet(u"background-color: transparent;")
self.frame_top.setFrameShape(QFrame.NoFrame)
self.frame_top.setFrameShadow(QFrame.Raised)
self.horizontalLayout_3 = QHBoxLayout(self.frame_top)
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setObjectName(u"horizontalLayout_3")
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.frame_toggle = QFrame(self.frame_top)
self.frame_toggle.setObjectName(u"frame_toggle")
self.frame_toggle.setMaximumSize(QSize(70, 16777215))
self.frame_toggle.setStyleSheet(u"background-color: rgb(27, 29, 35);")
self.frame_toggle.setFrameShape(QFrame.NoFrame)
self.frame_toggle.setFrameShadow(QFrame.Raised)
self.verticalLayout_3 = QVBoxLayout(self.frame_toggle)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName(u"verticalLayout_3")
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.btn_toggle_menu = QPushButton(self.frame_toggle)
self.btn_toggle_menu.setObjectName(u"btn_toggle_menu")
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_toggle_menu.sizePolicy().hasHeightForWidth())
self.btn_toggle_menu.setSizePolicy(sizePolicy)
self.btn_toggle_menu.setStyleSheet(u"QPushButton {\n"" background-image: url(:/24x24/icons/24x24/cil-menu.png);\n"" background-position: center;\n"" background-repeat: no-reperat;\n"" border: none;\n"" background-color: rgb(27, 29, 35);\n""}\n""QPushButton:hover {\n"" background-color: rgb(33, 37, 43);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(85, 170, 255);\n""}")
self.verticalLayout_3.addWidget(self.btn_toggle_menu)
self.horizontalLayout_3.addWidget(self.frame_toggle)
self.frame_top_right = QFrame(self.frame_top)
self.frame_top_right.setObjectName(u"frame_top_right")
self.frame_top_right.setStyleSheet(u"background: transparent;")
self.frame_top_right.setFrameShape(QFrame.NoFrame)
self.frame_top_right.setFrameShadow(QFrame.Raised)
self.verticalLayout_2 = QVBoxLayout(self.frame_top_right)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName(u"verticalLayout_2")
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.frame_top_btns = QFrame(self.frame_top_right)
self.frame_top_btns.setObjectName(u"frame_top_btns")
self.frame_top_btns.setMaximumSize(QSize(16777215, 42))
self.frame_top_btns.setStyleSheet(u"background-color: rgba(27, 29, 35, 200)")
self.frame_top_btns.setFrameShape(QFrame.NoFrame)
self.frame_top_btns.setFrameShadow(QFrame.Raised)
self.horizontalLayout_4 = QHBoxLayout(self.frame_top_btns)
self.horizontalLayout_4.setSpacing(0)
self.horizontalLayout_4.setObjectName(u"horizontalLayout_4")
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.frame_label_top_btns = QFrame(self.frame_top_btns)
self.frame_label_top_btns.setObjectName(u"frame_label_top_btns")
sizePolicy1 = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.frame_label_top_btns.sizePolicy().hasHeightForWidth())
self.frame_label_top_btns.setSizePolicy(sizePolicy1)
self.frame_label_top_btns.setFrameShape(QFrame.NoFrame)
self.frame_label_top_btns.setFrameShadow(QFrame.Raised)
self.horizontalLayout_10 = QHBoxLayout(self.frame_label_top_btns)
self.horizontalLayout_10.setSpacing(0)
self.horizontalLayout_10.setObjectName(u"horizontalLayout_10")
self.horizontalLayout_10.setContentsMargins(5, 0, 10, 0)
self.frame_icon_top_bar = QFrame(self.frame_label_top_btns)
self.frame_icon_top_bar.setObjectName(u"frame_icon_top_bar")
self.frame_icon_top_bar.setMaximumSize(QSize(30, 30))
self.frame_icon_top_bar.setStyleSheet(u"background: transparent;\n""background-image: url(images/cil-pencil.png);\n""background-position: center;\n""background-repeat: no-repeat;\n""")
self.frame_icon_top_bar.setFrameShape(QFrame.StyledPanel)
self.frame_icon_top_bar.setFrameShadow(QFrame.Raised)
self.horizontalLayout_10.addWidget(self.frame_icon_top_bar)
self.label_title_bar_top = QLabel(self.frame_label_top_btns)
self.label_title_bar_top.setObjectName(u"label_title_bar_top")
font1 = QFont()
font1.setFamily(u"Segoe UI")
font1.setPointSize(10)
font1.setBold(True)
font1.setWeight(75)
self.label_title_bar_top.setFont(font1)
self.label_title_bar_top.setStyleSheet(u"background: transparent;\n""")
self.horizontalLayout_10.addWidget(self.label_title_bar_top)
self.horizontalLayout_4.addWidget(self.frame_label_top_btns)
self.frame_btns_right = QFrame(self.frame_top_btns)
self.frame_btns_right.setObjectName(u"frame_btns_right")
sizePolicy1.setHeightForWidth(self.frame_btns_right.sizePolicy().hasHeightForWidth())
self.frame_btns_right.setSizePolicy(sizePolicy1)
self.frame_btns_right.setMaximumSize(QSize(120, 16777215))
self.frame_btns_right.setFrameShape(QFrame.NoFrame)
self.frame_btns_right.setFrameShadow(QFrame.Raised)
self.horizontalLayout_5 = QHBoxLayout(self.frame_btns_right)
self.horizontalLayout_5.setSpacing(0)
self.horizontalLayout_5.setObjectName(u"horizontalLayout_5")
self.horizontalLayout_5.setContentsMargins(0, 0, 0, 0)
self.btn_minimize = QPushButton(self.frame_btns_right)
self.btn_minimize.setObjectName(u"btn_minimize")
sizePolicy2 = QSizePolicy(QSizePolicy.Minimum, QSizePolicy.Expanding)
sizePolicy2.setHorizontalStretch(0)
sizePolicy2.setVerticalStretch(0)
sizePolicy2.setHeightForWidth(self.btn_minimize.sizePolicy().hasHeightForWidth())
self.btn_minimize.setSizePolicy(sizePolicy2)
self.btn_minimize.setMinimumSize(QSize(40, 0))
self.btn_minimize.setMaximumSize(QSize(40, 16777215))
self.btn_minimize.setStyleSheet(u"QPushButton { \n"" border: none;\n"" background-color: transparent;\n""}\n""QPushButton:hover {\n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(85, 170, 255);\n""}")
icon = QIcon()
icon.addFile(u":/16x16/icons/16x16/cil-window-minimize.png", QSize(), QIcon.Normal, QIcon.Off)
self.btn_minimize.setIcon(icon)
self.horizontalLayout_5.addWidget(self.btn_minimize)
self.btn_maximize_restore = QPushButton(self.frame_btns_right)
self.btn_maximize_restore.setObjectName(u"btn_maximize_restore")
sizePolicy2.setHeightForWidth(self.btn_maximize_restore.sizePolicy().hasHeightForWidth())
self.btn_maximize_restore.setSizePolicy(sizePolicy2)
self.btn_maximize_restore.setMinimumSize(QSize(40, 0))
self.btn_maximize_restore.setMaximumSize(QSize(40, 16777215))
self.btn_maximize_restore.setStyleSheet(u"QPushButton { \n"" border: none;\n"" background-color: transparent;\n""}\n""QPushButton:hover {\n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(85, 170, 255);\n""}")
icon1 = QIcon()
icon1.addFile(u":/16x16/icons/16x16/cil-window-maximize.png", QSize(), QIcon.Normal, QIcon.Off)
self.btn_maximize_restore.setIcon(icon1)
self.horizontalLayout_5.addWidget(self.btn_maximize_restore)
self.btn_close = QPushButton(self.frame_btns_right)
self.btn_close.setObjectName(u"btn_close")
sizePolicy2.setHeightForWidth(self.btn_close.sizePolicy().hasHeightForWidth())
self.btn_close.setSizePolicy(sizePolicy2)
self.btn_close.setMinimumSize(QSize(40, 0))
self.btn_close.setMaximumSize(QSize(40, 16777215))
self.btn_close.setStyleSheet(u"QPushButton { \n"" border: none;\n"" background-color: transparent;\n""}\n""QPushButton:hover {\n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(85, 170, 255);\n""}")
icon2 = QIcon()
icon2.addFile(u":/16x16/icons/16x16/cil-x.png", QSize(), QIcon.Normal, QIcon.Off)
self.btn_close.setIcon(icon2)
self.horizontalLayout_5.addWidget(self.btn_close)
self.horizontalLayout_4.addWidget(self.frame_btns_right, 0, Qt.AlignRight)
self.verticalLayout_2.addWidget(self.frame_top_btns)
self.frame_top_info = QFrame(self.frame_top_right)
self.frame_top_info.setObjectName(u"frame_top_info")
self.frame_top_info.setMaximumSize(QSize(16777215, 65))
self.frame_top_info.setStyleSheet(u"background-color: rgb(39, 44, 54);")
self.frame_top_info.setFrameShape(QFrame.NoFrame)
self.frame_top_info.setFrameShadow(QFrame.Raised)
self.horizontalLayout_8 = QHBoxLayout(self.frame_top_info)
self.horizontalLayout_8.setSpacing(0)
self.horizontalLayout_8.setObjectName(u"horizontalLayout_8")
self.horizontalLayout_8.setContentsMargins(10, 0, 10, 0)
self.label_top_info_1 = QLabel(self.frame_top_info)
self.label_top_info_1.setObjectName(u"label_top_info_1")
self.label_top_info_1.setMaximumSize(QSize(16777215, 15))
font2 = QFont()
font2.setFamily(u"Segoe UI")
self.label_top_info_1.setFont(font2)
self.label_top_info_1.setStyleSheet(u"color: rgb(98, 103, 111); ")
self.horizontalLayout_8.addWidget(self.label_top_info_1)
self.label_top_info_2 = QLabel(self.frame_top_info)
self.label_top_info_2.setObjectName(u"label_top_info_2")
self.label_top_info_2.setMinimumSize(QSize(0, 0))
self.label_top_info_2.setMaximumSize(QSize(250, 20))
font3 = QFont()
font3.setFamily(u"Segoe UI")
font3.setBold(True)
font3.setWeight(75)
self.label_top_info_2.setFont(font3)
self.label_top_info_2.setStyleSheet(u"color: rgb(98, 103, 111);")
self.label_top_info_2.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.horizontalLayout_8.addWidget(self.label_top_info_2)
self.verticalLayout_2.addWidget(self.frame_top_info)
self.horizontalLayout_3.addWidget(self.frame_top_right)
self.verticalLayout.addWidget(self.frame_top)
self.frame_center = QFrame(self.frame_main)
self.frame_center.setObjectName(u"frame_center")
sizePolicy.setHeightForWidth(self.frame_center.sizePolicy().hasHeightForWidth())
self.frame_center.setSizePolicy(sizePolicy)
self.frame_center.setStyleSheet(u"background-color: rgb(40, 44, 52);")
self.frame_center.setFrameShape(QFrame.NoFrame)
self.frame_center.setFrameShadow(QFrame.Raised)
self.horizontalLayout_2 = QHBoxLayout(self.frame_center)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.frame_left_menu = QFrame(self.frame_center)
self.frame_left_menu.setObjectName(u"frame_left_menu")
sizePolicy3 = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy3.setHorizontalStretch(0)
sizePolicy3.setVerticalStretch(0)
sizePolicy3.setHeightForWidth(self.frame_left_menu.sizePolicy().hasHeightForWidth())
self.frame_left_menu.setSizePolicy(sizePolicy3)
self.frame_left_menu.setMinimumSize(QSize(70, 0))
self.frame_left_menu.setMaximumSize(QSize(70, 16777215))
self.frame_left_menu.setLayoutDirection(Qt.LeftToRight)
self.frame_left_menu.setStyleSheet(u"background-color: rgb(27, 29, 35);")
self.frame_left_menu.setFrameShape(QFrame.NoFrame)
self.frame_left_menu.setFrameShadow(QFrame.Raised)
self.verticalLayout_5 = QVBoxLayout(self.frame_left_menu)
self.verticalLayout_5.setSpacing(1)
self.verticalLayout_5.setObjectName(u"verticalLayout_5")
self.verticalLayout_5.setContentsMargins(0, 0, 0, 0)
self.frame_menus = QFrame(self.frame_left_menu)
self.frame_menus.setObjectName(u"frame_menus")
self.frame_menus.setFrameShape(QFrame.NoFrame)
self.frame_menus.setFrameShadow(QFrame.Raised)
self.layout_menus = QVBoxLayout(self.frame_menus)
self.layout_menus.setSpacing(0)
self.layout_menus.setObjectName(u"layout_menus")
self.layout_menus.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_5.addWidget(self.frame_menus, 0, Qt.AlignTop)
self.frame_extra_menus = QFrame(self.frame_left_menu)
self.frame_extra_menus.setObjectName(u"frame_extra_menus")
sizePolicy3.setHeightForWidth(self.frame_extra_menus.sizePolicy().hasHeightForWidth())
self.frame_extra_menus.setSizePolicy(sizePolicy3)
self.frame_extra_menus.setFrameShape(QFrame.NoFrame)
self.frame_extra_menus.setFrameShadow(QFrame.Raised)
self.layout_menu_bottom = QVBoxLayout(self.frame_extra_menus)
self.layout_menu_bottom.setSpacing(10)
self.layout_menu_bottom.setObjectName(u"layout_menu_bottom")
self.layout_menu_bottom.setContentsMargins(0, 0, 0, 25)
self.label_user_icon = QLabel(self.frame_extra_menus)
self.label_user_icon.setObjectName(u"label_user_icon")
sizePolicy4 = QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Maximum)
sizePolicy4.setHorizontalStretch(0)
sizePolicy4.setVerticalStretch(0)
sizePolicy4.setHeightForWidth(self.label_user_icon.sizePolicy().hasHeightForWidth())
self.label_user_icon.setSizePolicy(sizePolicy4)
self.label_user_icon.setMinimumSize(QSize(60, 60))
self.label_user_icon.setMaximumSize(QSize(60, 60))
font4 = QFont()
font4.setFamily(u"Segoe UI")
font4.setPointSize(12)
self.label_user_icon.setFont(font4)
self.label_user_icon.setStyleSheet(u"QLabel {\n"" border-radius: 30px;\n"" background-color: rgb(44, 49, 60);\n"" border: 5px solid rgb(39, 44, 54);\n"" background-position: center;\n"" background-repeat: no-repeat;\n""}")
self.label_user_icon.setAlignment(Qt.AlignCenter)
self.layout_menu_bottom.addWidget(self.label_user_icon, 0, Qt.AlignHCenter)
self.verticalLayout_5.addWidget(self.frame_extra_menus, 0, Qt.AlignBottom)
self.horizontalLayout_2.addWidget(self.frame_left_menu)
self.frame_content_right = QFrame(self.frame_center)
self.frame_content_right.setObjectName(u"frame_content_right")
self.frame_content_right.setStyleSheet(u"background-color: rgb(44, 49, 60);")
self.frame_content_right.setFrameShape(QFrame.NoFrame)
self.frame_content_right.setFrameShadow(QFrame.Raised)
self.verticalLayout_4 = QVBoxLayout(self.frame_content_right)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName(u"verticalLayout_4")
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.frame_content = QFrame(self.frame_content_right)
self.frame_content.setObjectName(u"frame_content")
self.frame_content.setFrameShape(QFrame.NoFrame)
self.frame_content.setFrameShadow(QFrame.Raised)
self.verticalLayout_9 = QVBoxLayout(self.frame_content)
self.verticalLayout_9.setSpacing(0)
self.verticalLayout_9.setObjectName(u"verticalLayout_9")
self.verticalLayout_9.setContentsMargins(5, 5, 5, 5)
self.stackedWidget = QStackedWidget(self.frame_content)
self.stackedWidget.setObjectName(u"stackedWidget")
self.stackedWidget.setStyleSheet(u"background: transparent;")
self.page_home = QWidget()
self.page_home.setObjectName(u"page_home")
self.verticalLayout_10 = QVBoxLayout(self.page_home)
self.verticalLayout_10.setObjectName(u"verticalLayout_10")
self.label_6 = QLabel(self.page_home)
self.label_6.setObjectName(u"label_6")
font5 = QFont()
font5.setFamily(u"Segoe UI")
font5.setPointSize(40)
self.label_6.setFont(font5)
self.label_6.setStyleSheet(u"")
self.label_6.setAlignment(Qt.AlignCenter)
self.verticalLayout_10.addWidget(self.label_6)
self.label = QLabel(self.page_home)
self.label.setObjectName(u"label")
font6 = QFont()
font6.setFamily(u"Segoe UI")
font6.setPointSize(14)
self.label.setFont(font6)
self.label.setAlignment(Qt.AlignCenter)
self.verticalLayout_10.addWidget(self.label)
self.label_7 = QLabel(self.page_home)
self.label_7.setObjectName(u"label_7")
font7 = QFont()
font7.setFamily(u"Segoe UI")
font7.setPointSize(15)
self.label_7.setFont(font7)
self.label_7.setAlignment(Qt.AlignCenter)
self.verticalLayout_10.addWidget(self.label_7)
self.stackedWidget.addWidget(self.page_home)
self.page_derslerim = QWidget()
self.page_derslerim.setObjectName("page_derslerim")
self.verticalLayout_10 = QVBoxLayout(self.page_derslerim)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.frame_4 = QFrame(self.page_derslerim)
self.frame_4.setFrameShape(QFrame.StyledPanel)
self.frame_4.setFrameShadow(QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.verticalLayout_16 = QVBoxLayout(self.frame_4)
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.groupBox_4 = QGroupBox(self.frame_4)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName("groupBox_4")
self.horizontalLayout_14 = QHBoxLayout(self.groupBox_4)
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.list_dersler = QListWidget(self.groupBox_4)
self.list_dersler.setMaximumSize(QSize(500, 16777215))
self.list_dersler.setObjectName("list_dersler")
self.list_dersler.addItems(["Matematik TYT","Matematik AYT","Fizik TYT","Fizik AYT","Kimya TYT","Kimya AYT","Biyoloji TYT","Biyoloji AYT","Geometri TYT","Geometri AYT","Felsefe","Türkçe","Tarih","Coğrafya","Din Kültürü"])
self.list_dersler.setCurrentRow(0)
self.horizontalLayout_14.addWidget(self.list_dersler)
self.frame_6 = QFrame(self.groupBox_4)
self.frame_6.setMinimumSize(QSize(350, 0))
self.frame_6.setFrameShape(QFrame.StyledPanel)
self.frame_6.setFrameShadow(QFrame.Raised)
self.frame_6.setObjectName("frame_6")
self.verticalLayout_17 = QVBoxLayout(self.frame_6)
self.verticalLayout_17.setContentsMargins(0, 0, 0, 9)
self.verticalLayout_17.setObjectName("verticalLayout_17")
self.frame_13 = QFrame(self.frame_6)
self.frame_13.setMinimumSize(QSize(0, 200))
self.frame_13.setMaximumSize(QSize(16777215, 175))
self.frame_13.setFrameShape(QFrame.StyledPanel)
self.frame_13.setFrameShadow(QFrame.Raised)
self.frame_13.setObjectName("frame_13")
self.formLayout = QFormLayout(self.frame_13)
self.formLayout.setContentsMargins(0, 0, 0, 0)
self.formLayout.setObjectName("formLayout")
self.Pd_lbl2 = QLabel(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_lbl2.setFont(font)
self.Pd_lbl2.setObjectName("Pd_lbl2")
self.formLayout.setWidget(0, QFormLayout.LabelRole, self.Pd_lbl2)
self.Pd_cbox_gunler = QComboBox(self.frame_13)
self.Pd_cbox_gunler.setObjectName("Pd_cbox_gunler")
self.Pd_cbox_gunler.addItems(["Gün Seçiniz","Pazartesi","Salı","Çarşamba","Perşembe","Cuma","Cumartesi","Pazar"])
self.Pd_cbox_gunler.setStyleSheet("""QComboBox{ background-color: rgb(27, 29, 35); border-radius: 5px; border: 2px solid rgb(27, 29, 35); padding: 5px; padding-left: 10px;}QComboBox:hover{ border: 2px solid rgb(64, 71, 88);}QComboBox QAbstractItemView { color: rgb(85, 170, 255); background-color: rgb(27, 29, 35); padding: 10px; selection-background-color: rgb(39, 44, 54);}""")
self.formLayout.setWidget(0, QFormLayout.FieldRole, self.Pd_cbox_gunler)
self.Pd_lbl1 = QLabel(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_lbl1.setFont(font)
self.Pd_lbl1.setObjectName("Pd_lbl1")
self.formLayout.setWidget(1, QFormLayout.LabelRole, self.Pd_lbl1)
self.Pd_lne_soru = QLineEdit(self.frame_13)
self.Pd_lne_soru.setMinimumSize(QSize(0, 27))
self.Pd_lne_soru.setObjectName("Pd_lne_soru")
self.Pd_lne_soru.setPlaceholderText("Toplam soru sayısı giriniz")
self.Pd_lne_soru.setStyleSheet(css_lineedit)
self.formLayout.setWidget(1, QFormLayout.FieldRole, self.Pd_lne_soru)
self.Pd_lbl3 = QLabel(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_lbl3.setFont(font)
self.Pd_lbl3.setObjectName("Pd_lbl3")
self.formLayout.setWidget(2, QFormLayout.LabelRole, self.Pd_lbl3)
self.Pd_lne_dogru = QLineEdit(self.frame_13)
self.Pd_lne_dogru.setMinimumSize(QSize(0, 27))
self.Pd_lne_dogru.setObjectName("Pd_lne_dogru")
self.Pd_lne_dogru.setPlaceholderText("Toplam doğru sayısını giriniz")
self.Pd_lne_dogru.setStyleSheet(css_lineedit)
self.formLayout.setWidget(2, QFormLayout.FieldRole, self.Pd_lne_dogru)
self.Pd_lbl4 = QLabel(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_lbl4.setFont(font)
self.Pd_lbl4.setObjectName("Pd_lbl4")
self.formLayout.setWidget(3, QFormLayout.LabelRole, self.Pd_lbl4)
self.Pd_lne_yanlis = QLineEdit(self.frame_13)
self.Pd_lne_yanlis.setMinimumSize(QSize(0, 27))
self.Pd_lne_yanlis.setObjectName("Pd_lne_yanlis")
self.Pd_lne_yanlis.setStyleSheet(css_lineedit)
self.Pd_lne_yanlis.setPlaceholderText("Toplam yanlış sayısını giriniz")
self.formLayout.setWidget(3, QFormLayout.FieldRole, self.Pd_lne_yanlis)
self.Pd_radio_e = QRadioButton(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_radio_e.setFont(font)
self.Pd_radio_e.setObjectName("Pd_radio_e")
self.formLayout.setWidget(4, QFormLayout.LabelRole, self.Pd_radio_e)
self.Pd_radio_g = QRadioButton(self.frame_13)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_radio_g.setFont(font)
self.Pd_radio_g.setObjectName("Pd_radio_g")
self.formLayout.setWidget(5, QFormLayout.LabelRole, self.Pd_radio_g)
self.verticalLayout_17.addWidget(self.frame_13)
self.frame_8 = QFrame(self.frame_6)
self.frame_8.setFrameShape(QFrame.StyledPanel)
self.frame_8.setFrameShadow(QFrame.Raised)
self.frame_8.setObjectName("frame_8")
self.horizontalLayout_16 = QHBoxLayout(self.frame_8)
self.horizontalLayout_16.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
spacerItem = QSpacerItem(10, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout_16.addItem(spacerItem)
self.Pd_lbl9 = QLabel(self.frame_8)
self.Pd_lbl9.setText("")
self.Pd_lbl9.setPixmap(QPixmap("images/write.png"))
self.Pd_lbl9.setObjectName("Pd_lbl9")
self.horizontalLayout_16.addWidget(self.Pd_lbl9)
spacerItem1 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
self.horizontalLayout_16.addItem(spacerItem1)
self.verticalLayout_17.addWidget(self.frame_8)
self.frame_11 = QFrame(self.frame_6)
self.frame_11.setMinimumSize(QSize(0, 50))
self.frame_11.setMaximumSize(QSize(16777215, 50))
self.frame_11.setFrameShape(QFrame.StyledPanel)
self.frame_11.setFrameShadow(QFrame.Raised)
self.frame_11.setObjectName("frame_11")
self.horizontalLayout_15 = QHBoxLayout(self.frame_11)
self.horizontalLayout_15.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.Pd_btn_coz = QPushButton(self.frame_11)
self.Pd_btn_coz.setMinimumSize(QSize(200, 40))
self.Pd_btn_coz.setMaximumSize(QSize(120, 30))
self.Pd_btn_coz.setIcon(icon6)
self.Pd_btn_coz.clicked.connect(self.Soru_Coz_Proccess)
self.Pd_btn_coz.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_coz.setObjectName("Pd_btn_coz")
self.horizontalLayout_15.addWidget(self.Pd_btn_coz)
self.verticalLayout_17.addWidget(self.frame_11)
self.horizontalLayout_14.addWidget(self.frame_6)
self.Pd_ptext_yardim = QPlainTextEdit(self.groupBox_4)
self.Pd_ptext_yardim.setMaximumSize(QSize(500, 16777215))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_ptext_yardim.setFont(font)
self.Pd_ptext_yardim.setReadOnly(True)
self.Pd_ptext_yardim.setObjectName("Pd_ptext_yardim")
motive_sozleri = [
"Bırakma. Şimdi acı çek ve hayatının geri kalanını bir şampiyon olarak yaşa.",
"Ne istediğime karar verdim ve başarana kadar asla pes etmeyeceğim.",
"İlk önce izlerler. Başarınca nefret ederler. Sonra da taklit ederler.",
"Yürürsen yakındır bakarsan uzak.",
"Bazıları başarıyı sadece hayal ederken, bazıları ise her sabah erkenden kalkar ve hayallerini gerçekleştirir.",
"Şimdi çalış, sonra ağlarsın.",
"Bu hayatta her şey zor olsa da hiçbir zaman imkansız değildir.",
"Yalnızca bugün yaptıkların, bütün yarınlarını değiştirebilir.",
"Bundan bir yıl sonra bugün başlamış olmayı dileyeceksin.",
"Başarı her gün tekrarlanan küçük çabaların toplamıdır.",
"Umutsuz durumlar yoktur, umutsuz insanlar vardır.Ben hiçbir zaman umudumu yitirmedim.",
"Senin almaya cesaret edemediğin riskleri alanlar,senin yaşamak istediğin hayatı yaşarlar.",
"Yüzüstü yere serilseniz bile, hala ileriye doğru hareket ediyorsunuzdur.",
"Durmadığın sürece ne kadar yavaş gittiğin önemli değil",
"Başarıdaki kararlılığım yeterince güçlü ise başarısızlık asla beni yakalayamaz.",
"Arkamda bıraktığım köprüleri yıkarım ki, ilerlemekten başka çarem kalmasın.",
"İşlerin nasıl yapıldığını bilmiyorsan kendi bildiğin gibi değiştir!",
"Yapamayacağını düşündüğün şeyi yap ve başarısız olursan tekrar dene.Takla atmayan adam ipte hiçbir zaman yürümeyenlerdir.",
"Hiç kimse geriye gidip yeni bir başlangıç yapamaz;ama bugün yeni bir son yapıp yeniden başlayabilir",
"Önce FARKI yaratırsın, sonra da FARK yaratırsın.",
"Diğerlerinden daha akıllı olmak zorunda değiliz.Diğerlerinden daha disiplinli olmak zorundayız.",
"Daha iyisini yapmanın bir yolu var ise bulun ve kimseye kulak asmayın.",
"İnsanların en büyük zayıflığı pes etmektir.Başarılı olmanın en kesin kuralı ise her zaman bir kez daha denemektir.",
"lk önce kendine ne olacağını sor; sonra ne yapmak gerekiyorsa yap.",
"Kazanma isteği, başarıya ulaşma arzusu ve ulaşma arzusu… Bunlar birleşirse kişisel mükemmelliğin kapısı açılır",
"Bir gün kalkacaksınız ve hep hayal ettiğiniz şeyleri yapmaya vakit kalmamış olacak. Şimdi tam zamanı. Harekete geçin.",
"Zaferin coşkusunu hissedebilmeniz için zorlukları kabul edin.",
"Hayatımı sadece ben değiştirebilirim. Kimse benim için bunu yapmaz!",
"Dışınızda, içinizdeki güçten üstün olan hiçbir sorun yoktur.",
"Hiçbir engel yürekteki kadar büyük değildir.",
"Başarılı olmak istiyorsanız, başarının bilinen yollarını dolaşmak yerine yeni yollar keşfedin.",
"İyi yapılmış bir iş, iyi söylenmiş bir işten daha iyidir!",
"Batan güneş için ağlamayın; yeniden doğduğunda ne yapacağınıza karar verin.",
]
rastgele = random.choice(motive_sozleri)
self.Pd_ptext_yardim.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35); border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
self.horizontalLayout_14.addWidget(self.Pd_ptext_yardim)
self.verticalLayout_16.addWidget(self.groupBox_4)
self.verticalLayout_10.addWidget(self.frame_4)
self.frame_5 = QFrame(self.page_derslerim)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_5.sizePolicy().hasHeightForWidth())
self.frame_5.setSizePolicy(sizePolicy)
self.frame_5.setMaximumSize(QSize(16777215, 250))
self.frame_5.setFrameShape(QFrame.StyledPanel)
self.frame_5.setFrameShadow(QFrame.Raised)
self.frame_5.setObjectName("frame_5")
self.horizontalLayout_13 = QHBoxLayout(self.frame_5)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.groupBox = QGroupBox(self.frame_5)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox.setFont(font)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_12 = QVBoxLayout(self.groupBox)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.Pd_btn_1 = QPushButton(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Pd_btn_1.sizePolicy().hasHeightForWidth())
self.Pd_btn_1.setSizePolicy(sizePolicy)
self.Pd_btn_1.setMinimumSize(QSize(40, 45))
self.Pd_btn_1.setIcon(icon4)
self.Pd_btn_1.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_1.clicked.connect(self.Dersin_Gunluk_Sonucu)
self.Pd_btn_1.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_1.setObjectName("Pd_btn_1")
self.verticalLayout_12.addWidget(self.Pd_btn_1)
self.Pd_btn_2 = QPushButton(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Pd_btn_2.sizePolicy().hasHeightForWidth())
self.Pd_btn_2.setSizePolicy(sizePolicy)
self.Pd_btn_2.setMinimumSize(QSize(40, 45))
self.Pd_btn_2.setIcon(icon4)
self.Pd_btn_2.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_2.clicked.connect(self.Dersin_Haftalik_Sonucu)
self.Pd_btn_2.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_2.setObjectName("Pd_btn_2")
self.verticalLayout_12.addWidget(self.Pd_btn_2)
self.Pd_btn_3 = QPushButton(self.groupBox)
sizePolicy = QSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Pd_btn_3.sizePolicy().hasHeightForWidth())
self.Pd_btn_3.setSizePolicy(sizePolicy)
self.Pd_btn_3.setMinimumSize(QSize(40, 45))
self.Pd_btn_3.setIcon(icon4)
self.Pd_btn_3.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_3.clicked.connect(self.Tum_Ders_Haftalik_Sonuc)
self.Pd_btn_3.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_3.setObjectName("Pd_btn_3")
self.verticalLayout_12.addWidget(self.Pd_btn_3)
self.horizontalLayout_13.addWidget(self.groupBox)
self.groupBox_2 = QGroupBox(self.frame_5)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName("groupBox_2")
self.verticalLayout_14 = QVBoxLayout(self.groupBox_2)
self.verticalLayout_14.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.Pd_ptext_motiv = QPlainTextEdit(self.groupBox_2)
self.Pd_ptext_motiv.setReadOnly(True)
self.Pd_ptext_motiv.setObjectName("Pd_ptext_motiv")
self.Pd_ptext_motiv.insertPlainText(rastgele)
self.Pd_ptext_motiv.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35);font:bold 15px;border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
self.verticalLayout_14.addWidget(self.Pd_ptext_motiv)
self.horizontalLayout_13.addWidget(self.groupBox_2)
self.groupBox_3 = QGroupBox(self.frame_5)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout_13 = QVBoxLayout(self.groupBox_3)
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.Pd_btn_4 = QPushButton(self.groupBox_3)
self.Pd_btn_4.setMinimumSize(QSize(0, 45))
self.Pd_btn_4.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_4.setIcon(icon5)
self.Pd_btn_4.clicked.connect(self.Dersin_Gunluk_Temizle)
self.Pd_btn_4.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_4.setObjectName("Pd_btn_4")
self.verticalLayout_13.addWidget(self.Pd_btn_4)
self.Pd_btn_6 = QPushButton(self.groupBox_3)
self.Pd_btn_6.setMinimumSize(QSize(0, 45))
self.Pd_btn_6.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_6.setIcon(icon5)
self.Pd_btn_6.clicked.connect(self.Dersin_Haftalik_Temizle)
self.Pd_btn_6.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_6.setObjectName("Pd_btn_6")
self.verticalLayout_13.addWidget(self.Pd_btn_6)
self.Pd_btn_5 = QPushButton(self.groupBox_3)
self.Pd_btn_5.setMinimumSize(QSize(0, 45))
self.Pd_btn_5.setMaximumSize(QSize(16777215, 50))
self.Pd_btn_5.setIcon(icon5)
self.Pd_btn_5.clicked.connect(self.Tum_Ders_Hafta_Temizle)
self.Pd_btn_5.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.Pd_btn_5.setObjectName("Pd_btn_5")
self.verticalLayout_13.addWidget(self.Pd_btn_5)
self.horizontalLayout_13.addWidget(self.groupBox_3)
self.verticalLayout_10.addWidget(self.frame_5)
self.stackedWidget.addWidget(self.page_derslerim)
self.page_derslerim1 = QWidget()
self.page_derslerim1.setObjectName("page_derslerim1")
self.verticalLayout_10 = QVBoxLayout(self.page_derslerim1)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.frame_41 = QFrame(self.page_derslerim1)
self.frame_41.setFrameShape(QFrame.StyledPanel)
self.frame_41.setFrameShadow(QFrame.Raised)
self.frame_41.setObjectName("frame_41")
self.verticalLayout_16 = QVBoxLayout(self.frame_41)
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.groupBox_41 = QGroupBox(self.frame_41)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_41.setFont(font)
self.groupBox_41.setObjectName("groupBox_41")
self.horizontalLayout_14 = QHBoxLayout(self.groupBox_41)
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.list_dersler1 = QListWidget(self.groupBox_41)
self.list_dersler1.setMaximumSize(QSize(500, 16777215))
self.list_dersler1.setObjectName("list_dersler1")
denemeisimeleri = yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
self.horizontalLayout_14.addWidget(self.list_dersler1)
self.frame_61 = QFrame(self.groupBox_41)
self.frame_61.setMinimumSize(QSize(350, 0))
self.frame_61.setFrameShape(QFrame.StyledPanel)
self.frame_61.setFrameShadow(QFrame.Raised)
self.frame_61.setObjectName("frame_61")
self.verticalLayout_17 = QVBoxLayout(self.frame_61)
self.verticalLayout_17.setContentsMargins(0, 0, 0, 9)
self.verticalLayout_17.setObjectName("verticalLayout_17")
self.frame_131 = QFrame(self.frame_61)
self.frame_131.setMinimumSize(QSize(0, 230))
self.frame_131.setMaximumSize(QSize(16777215, 175))
self.frame_131.setFrameShape(QFrame.StyledPanel)
self.frame_131.setFrameShadow(QFrame.Raised)
self.frame_131.setObjectName("frame_131")
self.gridLayout = QGridLayout(self.frame_131)
self.gridLayout.setContentsMargins(0, 0, 0, 15)
self.gridLayout.setObjectName("gridLayout")
self.comboBox_41 = QComboBox(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.comboBox_41.setFont(font)
self.comboBox_41.setStyleSheet("QComboBox{\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding: 5px;\n"" padding-left: 10px;\n""}\n""QComboBox:hover{\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QComboBox QAbstractItemView {\n"" color: rgb(85, 170, 255); \n"" background-color: rgb(27, 29, 35);\n"" padding: 10px;\n"" selection-background-color: rgb(39, 44, 54);\n""}")
self.comboBox_41.setObjectName("comboBox_41")
self.comboBox_41.addItem("")
self.comboBox_41.addItems(['Ocak','Şubat','Mart','Nisan','Mayıs','Haziran','Temmuz','Ağustos','Eylül','Ekim','Kasım','Aralık'])
self.gridLayout.addWidget(self.comboBox_41, 5, 1, 1, 1)
self.comboBox1 = QComboBox(self.frame_131)
self.comboBox1.setStyleSheet("QComboBox{\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding: 5px;\n"" padding-left: 10px;\n""}\n""QComboBox:hover{\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QComboBox QAbstractItemView {\n"" color: rgb(85, 170, 255); \n"" background-color: rgb(27, 29, 35);\n"" padding: 10px;\n"" selection-background-color: rgb(39, 44, 54);\n""}")
self.comboBox1.setObjectName("comboBox1")
self.comboBox1.addItem("")
self.comboBox1.addItem("")
self.comboBox1.addItem("")
self.gridLayout.addWidget(self.comboBox1, 4, 1, 1, 2)
self.lineEdit1 = QLineEdit(self.frame_131)
self.lineEdit1.setMinimumSize(QSize(0, 30))
self.lineEdit1.setStyleSheet(css_lineedit)
self.lineEdit1.setObjectName("lineEdit1")
self.gridLayout.addWidget(self.lineEdit1, 1, 1, 1, 2)
self.lineEdit_41 = QLineEdit(self.frame_131)
self.lineEdit_41.setMinimumSize(QSize(0, 30))
self.lineEdit_41.setStyleSheet(css_lineedit)
self.lineEdit_41.setObjectName("lineEdit_41")
self.gridLayout.addWidget(self.lineEdit_41, 3, 1, 1, 2)
self.lineEdit_21 = QLineEdit(self.frame_131)
self.lineEdit_21.setMinimumSize(QSize(0, 30))
self.lineEdit_21.setStyleSheet(css_lineedit)
self.lineEdit_21.setObjectName("lineEdit_21")
self.lineEdit_21.setMaxLength(25)
self.gridLayout.addWidget(self.lineEdit_21, 0, 1, 1, 2)
self.lineEdit_31 = QLineEdit(self.frame_131)
self.lineEdit_31.setMinimumSize(QSize(0, 30))
self.lineEdit_31.setStyleSheet(css_lineedit)
self.lineEdit_31.setObjectName("lineEdit_31")
self.gridLayout.addWidget(self.lineEdit_31, 2, 1, 1, 2)
self.label_21 = QLabel(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.gridLayout.addWidget(self.label_21, 1, 0, 1, 1)
self.label_41 = QLabel(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_41.setFont(font)
self.label_41.setObjectName("label_41")
self.gridLayout.addWidget(self.label_41, 3, 0, 1, 1)
self.label_31 = QLabel(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_31.setFont(font)
self.label_31.setObjectName("label_31")
self.gridLayout.addWidget(self.label_31, 2, 0, 1, 1)
self.label_51 = QLabel(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_51.setFont(font)
self.label_51.setObjectName("label_51")
self.gridLayout.addWidget(self.label_51, 4, 0, 1, 1)
self.label1 = QLabel(self.frame_131)
self.label1.setMinimumSize(QSize(100, 0))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label1.setFont(font)
self.label1.setObjectName("label1")
self.gridLayout.addWidget(self.label1, 0, 0, 1, 1)
self.comboBox_31 = QComboBox(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.comboBox_31.setFont(font)
self.comboBox_31.setStyleSheet("QComboBox{\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding: 5px;\n"" padding-left: 10px;\n""}\n""QComboBox:hover{\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QComboBox QAbstractItemView {\n"" color: rgb(85, 170, 255); \n"" background-color: rgb(27, 29, 35);\n"" padding: 10px;\n"" selection-background-color: rgb(39, 44, 54);\n""}")
self.comboBox_31.setObjectName("comboBox_31")
self.comboBox_31.addItem("")
self.comboBox_31.addItems(['2021','2022','2023','2024','2025','2026','2027','2028','2029','2030','2031','2032','2033'])
self.gridLayout.addWidget(self.comboBox_31, 5, 2, 1, 1)
self.comboBox_21 = QComboBox(self.frame_131)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.comboBox_21.setFont(font)
self.comboBox_21.setStyleSheet("QComboBox{\n" " background-color: rgb(27, 29, 35);\n" " border-radius: 5px;\n" " border: 2px solid rgb(27, 29, 35);\n" " padding: 5px;\n" " padding-left: 10px;\n" "}\n" "QComboBox:hover{\n" " border: 2px solid rgb(64, 71, 88);\n" "}\n" "QComboBox QAbstractItemView {\n" " color: rgb(85, 170, 255); \n" " background-color: rgb(27, 29, 35);\n" " padding: 10px;\n" " selection-background-color: rgb(39, 44, 54);\n" "}")
self.comboBox_21.setObjectName("comboBox_21")
self.comboBox_21.addItem("")
for i in range(1,32):
self.comboBox_21.addItem(str(i))
if i==31:
break
self.gridLayout.addWidget(self.comboBox_21, 5, 0, 1, 1)
self.verticalLayout_17.addWidget(self.frame_131)
self.frame_111 = QFrame(self.frame_61)
self.frame_111.setMinimumSize(QSize(0, 60))
self.frame_111.setMaximumSize(QSize(16777215, 30))
self.frame_111.setFrameShape(QFrame.StyledPanel)
self.frame_111.setFrameShadow(QFrame.Raised)
self.frame_111.setObjectName("frame_111")
self.horizontalLayout_9 = QHBoxLayout(self.frame_111)
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.pushButton1 = QPushButton(self.frame_111)
self.pushButton1.setMinimumSize(QSize(150, 40))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton1.setFont(font)
self.pushButton1.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.pushButton1.clicked.connect(self.Deneme_Kaydet)
self.pushButton1.setIcon(icon3)
self.pushButton1.setObjectName("pushButton1")
self.horizontalLayout_9.addWidget(self.pushButton1)
self.verticalLayout_17.addWidget(self.frame_111)
self.frame_81 = QFrame(self.frame_61)
self.frame_81.setMaximumSize(QSize(16777215, 16777215))
self.frame_81.setFrameShape(QFrame.StyledPanel)
self.frame_81.setFrameShadow(QFrame.Raised)
self.frame_81.setObjectName("frame_81")
self.horizontalLayout_16 = QHBoxLayout(self.frame_81)
self.horizontalLayout_16.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.verticalLayout_17.addWidget(self.frame_81)
self.horizontalLayout_14.addWidget(self.frame_61)
self.Pd_ptext_yardim1 = QPlainTextEdit(self.groupBox_41)
self.Pd_ptext_yardim1.setMaximumSize(QSize(500, 16777215))
self.Pd_ptext_yardim1.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35); border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_ptext_yardim1.setFont(font)
self.Pd_ptext_yardim1.setReadOnly(True)
self.Pd_ptext_yardim1.setObjectName("Pd_ptext_yardim1")
self.horizontalLayout_14.addWidget(self.Pd_ptext_yardim1)
self.verticalLayout_16.addWidget(self.groupBox_41)
self.verticalLayout_10.addWidget(self.frame_41)
self.frame_51 = QFrame(self.page_derslerim1)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_51.sizePolicy().hasHeightForWidth())
self.frame_51.setSizePolicy(sizePolicy)
self.frame_51.setMaximumSize(QSize(16777215, 250))
self.frame_51.setFrameShape(QFrame.StyledPanel)
self.frame_51.setFrameShadow(QFrame.Raised)
self.frame_51.setObjectName("frame_51")
self.horizontalLayout_13 = QHBoxLayout(self.frame_51)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.groupBox1 = QGroupBox(self.frame_51)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox1.sizePolicy().hasHeightForWidth())
self.groupBox1.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox1.setFont(font)
self.groupBox1.setObjectName("groupBox1")
self.verticalLayout_5 = QVBoxLayout(self.groupBox1)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.pushButton_21 = QPushButton(self.groupBox1)
self.pushButton_21.setMinimumSize(QSize(200, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_21.setFont(font)
self.pushButton_21.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.pushButton_21.setIcon(icon4)
self.pushButton_21.clicked.connect(self.Secili_Deneme_Sonuc)
self.pushButton_21.setObjectName("pushButton_21")
self.verticalLayout_5.addWidget(self.pushButton_21)
self.pushButton_41 = QPushButton(self.groupBox1)
self.pushButton_41.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_41.setFont(font)
self.pushButton_41.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.pushButton_41.setIcon(icon4)
self.pushButton_41.setObjectName("pushButton_41")
self.pushButton_41.clicked.connect(self.TYT_Denemeleri_Sonuc)
self.verticalLayout_5.addWidget(self.pushButton_41)
self.pushButton_51 = QPushButton(self.groupBox1)
self.pushButton_51.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_51.setFont(font)
self.pushButton_51.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_51.setIcon(icon4)
self.pushButton_51.setObjectName("pushButton_51")
self.pushButton_51.clicked.connect(self.AYT_Denemeleri_Sonuc)
self.verticalLayout_5.addWidget(self.pushButton_51)
self.pushButton_31 = QPushButton(self.groupBox1)
self.pushButton_31.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_31.setFont(font)
self.pushButton_31.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_31.setIcon(icon4)
self.pushButton_31.setObjectName("pushButton_31")
self.pushButton_31.clicked.connect(self.Tum_Denemelerin_Sonuclari)
self.verticalLayout_5.addWidget(self.pushButton_31)
self.horizontalLayout_13.addWidget(self.groupBox1)
self.groupBox_21 = QGroupBox(self.frame_51)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_21.sizePolicy().hasHeightForWidth())
self.groupBox_21.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_21.setFont(font)
self.groupBox_21.setObjectName("groupBox_21")
self.verticalLayout_14 = QVBoxLayout(self.groupBox_21)
self.verticalLayout_14.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.Pd_ptext_motiv1 = QPlainTextEdit(self.groupBox_21)
self.Pd_ptext_motiv1.setReadOnly(True)
self.Pd_ptext_motiv1.setObjectName("Pd_ptext_motiv1")
self.Pd_ptext_motiv1.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35); border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
self.Pd_ptext_motiv1.insertPlainText(rastgele)
self.verticalLayout_14.addWidget(self.Pd_ptext_motiv1)
self.horizontalLayout_13.addWidget(self.groupBox_21)
self.groupBox_31 = QGroupBox(self.frame_51)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_31.sizePolicy().hasHeightForWidth())
self.groupBox_31.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_31.setFont(font)
self.groupBox_31.setObjectName("groupBox_31")
self.verticalLayout_7 = QVBoxLayout(self.groupBox_31)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.pushButton_61 = QPushButton(self.groupBox_31)
self.pushButton_61.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_61.setFont(font)
self.pushButton_61.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_61.setIcon(icon5)
self.pushButton_61.setObjectName("pushButton_61")
self.pushButton_61.clicked.connect(self.Secili_Deneme_Sil)
self.verticalLayout_7.addWidget(self.pushButton_61)
self.pushButton_71 = QPushButton(self.groupBox_31)
self.pushButton_71.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_71.setFont(font)
self.pushButton_71.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_71.setIcon(icon5)
self.pushButton_71.setObjectName("pushButton_71")
self.pushButton_71.clicked.connect(self.TYT_Denemelerini_Sil)
self.verticalLayout_7.addWidget(self.pushButton_71)
self.pushButton_81 = QPushButton(self.groupBox_31)
self.pushButton_81.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_81.setFont(font)
self.pushButton_81.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_81.setIcon(icon5)
self.pushButton_81.setObjectName("pushButton_81")
self.pushButton_81.clicked.connect(self.AYT_denemelerini_Sil)
self.verticalLayout_7.addWidget(self.pushButton_81)
self.pushButton_91 = QPushButton(self.groupBox_31)
self.pushButton_91.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.pushButton_91.setFont(font)
self.pushButton_91.setStyleSheet("QPushButton {\n" " border: 2px solid rgb(52, 59, 72);\n" " border-radius: 5px; \n" " background-color: rgb(52, 59, 72);\n" "}\n" "QPushButton:hover {\n" " background-color: rgb(57, 65, 80);\n" " border: 2px solid rgb(61, 70, 86);\n" "}\n" "QPushButton:pressed { \n" " background-color: rgb(35, 40, 49);\n" " border: 2px solid rgb(43, 50, 61);\n" "}")
self.pushButton_91.setIcon(icon5)
self.pushButton_91.setObjectName("pushButton_91")
self.pushButton_91.clicked.connect(self.Tum_Denemeleri_Sil)
self.verticalLayout_7.addWidget(self.pushButton_91)
self.horizontalLayout_13.addWidget(self.groupBox_31)
self.verticalLayout_10.addWidget(self.frame_51)
self.stackedWidget.addWidget(self.page_derslerim1)
self.page_kitaplarim = QWidget()
self.page_kitaplarim.setObjectName("page_kitaplarim")
self.verticalLayout_10 = QVBoxLayout(self.page_kitaplarim)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.frame_ust = QFrame(self.page_kitaplarim)
self.frame_ust.setFrameShape(QFrame.StyledPanel)
self.frame_ust.setFrameShadow(QFrame.Raised)
self.frame_ust.setObjectName("frame_ust")
self.verticalLayout_16 = QVBoxLayout(self.frame_ust)
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.group_ust = QGroupBox(self.frame_ust)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.group_ust.setFont(font)
self.group_ust.setObjectName("group_ust")
self.horizontalLayout_14 = QHBoxLayout(self.group_ust)
self.horizontalLayout_14.setContentsMargins(15, 5, 15, 5)
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.pd_kitaplar = QListWidget(self.group_ust)
self.pd_kitaplar.setMaximumSize(QSize(500, 16777215))
self.pd_kitaplar.setObjectName("pd_kitaplar")
isimler1 = yardimciFonk.SQL("SELECT isim FROM Kitaplar")
for kitapismi in isimler1:
self.pd_kitaplar.addItem(kitapismi[0])
self.horizontalLayout_14.addWidget(self.pd_kitaplar)
self.pd_ktp_ekle = QGroupBox(self.group_ust)
self.pd_ktp_ekle.setMinimumSize(QSize(400, 0))
self.pd_ktp_ekle.setMaximumSize(QSize(750, 2545245))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.pd_ktp_ekle.setFont(font)
self.pd_ktp_ekle.setObjectName("pd_ktp_ekle")
self.formLayout = QFormLayout(self.pd_ktp_ekle)
self.formLayout.setObjectName("formLayout")
self.kp_lbl1 = QLabel(self.pd_ktp_ekle)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.kp_lbl1.setFont(font)
self.kp_lbl1.setObjectName("kp_lbl1")
self.formLayout.setWidget(0, QFormLayout.LabelRole, self.kp_lbl1)
self.kp_lne_3 = QLineEdit(self.pd_ktp_ekle)
self.kp_lne_3.setMinimumSize(QSize(0, 30))
self.kp_lne_3.setStyleSheet("QLineEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding-left: 10px;\n""}\n""QLineEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QLineEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}")
self.kp_lne_3.setObjectName("kp_lne_3")
self.formLayout.setWidget(0, QFormLayout.FieldRole, self.kp_lne_3)
self.kp_lbl2 = QLabel(self.pd_ktp_ekle)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.kp_lbl2.setFont(font)
self.kp_lbl2.setObjectName("kp_lbl2")
self.formLayout.setWidget(1, QFormLayout.LabelRole, self.kp_lbl2)
self.kp_lne_1 = QLineEdit(self.pd_ktp_ekle)
self.kp_lne_1.setMinimumSize(QSize(0, 30))
self.kp_lne_1.setStyleSheet("QLineEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding-left: 10px;\n""}\n""QLineEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QLineEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}")
self.kp_lne_1.setObjectName("kp_lne_1")
self.formLayout.setWidget(1, QFormLayout.FieldRole, self.kp_lne_1)
self.kp_lbl3 = QLabel(self.pd_ktp_ekle)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.kp_lbl3.setFont(font)
self.kp_lbl3.setObjectName("kp_lbl3")
self.formLayout.setWidget(2, QFormLayout.LabelRole, self.kp_lbl3)
self.kp_lne_2 = QLineEdit(self.pd_ktp_ekle)
self.kp_lne_2.setMinimumSize(QSize(0, 30))
self.kp_lne_2.setStyleSheet("QLineEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding-left: 10px;\n""}\n""QLineEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QLineEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}\n""")
self.kp_lne_2.setObjectName("kp_lne_2")
self.formLayout.setWidget(2, QFormLayout.FieldRole, self.kp_lne_2)
spacerItem = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.formLayout.setItem(3, QFormLayout.LabelRole, spacerItem)
self.kp_btn_kydet = QPushButton(self.pd_ktp_ekle)
self.kp_btn_kydet.setMinimumSize(QSize(40, 40))
self.kp_btn_kydet.setMaximumSize(QSize(16777215, 16777215))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_kydet.setFont(font)
self.kp_btn_kydet.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
icon3 = QIcon()
icon3.addPixmap(QPixmap(":/16x16/icons/16x16/cil-folder-open.png"), QIcon.Normal, QIcon.Off)
self.kp_btn_kydet.setIcon(icon3)
self.kp_btn_kydet.setObjectName("kp_btn_kydet")
self.kp_btn_kydet.clicked.connect(self.Kitap_Kaydet)
self.formLayout.setWidget(5, QFormLayout.SpanningRole, self.kp_btn_kydet)
spacerItem1 = QSpacerItem(20, 40, QSizePolicy.Minimum, QSizePolicy.Expanding)
self.formLayout.setItem(4, QFormLayout.LabelRole, spacerItem1)
self.horizontalLayout_14.addWidget(self.pd_ktp_ekle)
self.Pd_Yrdm = QPlainTextEdit(self.group_ust)
self.Pd_Yrdm.setMaximumSize(QSize(500, 16777215))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Pd_Yrdm.setFont(font)
self.Pd_Yrdm.setReadOnly(True)
self.Pd_Yrdm.setObjectName("Pd_Yrdm")
self.Pd_Yrdm.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35); border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
self.horizontalLayout_14.addWidget(self.Pd_Yrdm)
self.verticalLayout_16.addWidget(self.group_ust)
self.verticalLayout_10.addWidget(self.frame_ust)
self.frame_alt = QFrame(self.page_kitaplarim)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_alt.sizePolicy().hasHeightForWidth())
self.frame_alt.setSizePolicy(sizePolicy)
self.frame_alt.setMaximumSize(QSize(16777215, 300))
self.frame_alt.setFrameShape(QFrame.StyledPanel)
self.frame_alt.setFrameShadow(QFrame.Raised)
self.frame_alt.setObjectName("frame_alt")
self.horizontalLayout_13 = QHBoxLayout(self.frame_alt)
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.kp_sonuc_1 = QGroupBox(self.frame_alt)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.kp_sonuc_1.sizePolicy().hasHeightForWidth())
self.kp_sonuc_1.setSizePolicy(sizePolicy)
self.kp_sonuc_1.setMinimumSize(QSize(300, 0))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.kp_sonuc_1.setFont(font)
self.kp_sonuc_1.setObjectName("kp_sonuc_1")
self.verticalLayout_5 = QVBoxLayout(self.kp_sonuc_1)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.kp_btn_1 = QPushButton(self.kp_sonuc_1)
self.kp_btn_1.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_1.setFont(font)
self.kp_btn_1.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.kp_btn_1.setIcon(icon3)
self.kp_btn_1.clicked.connect(self.Secili_Kitabi_Getir)
self.kp_btn_1.setObjectName("kp_btn_1")
self.verticalLayout_5.addWidget(self.kp_btn_1)
self.kp_btn_2 = QPushButton(self.kp_sonuc_1)
self.kp_btn_2.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_2.setFont(font)
self.kp_btn_2.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.kp_btn_2.setIcon(icon3)
self.kp_btn_2.clicked.connect(self.Toplam_Okunan_Sayfa_ve_Gun_Sayisi)
self.kp_btn_2.setObjectName("kp_btn_2")
self.verticalLayout_5.addWidget(self.kp_btn_2)
self.kp_btn_3 = QPushButton(self.kp_sonuc_1)
self.kp_btn_3.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_3.setFont(font)
self.kp_btn_3.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.kp_btn_3.setIcon(icon3)
self.kp_btn_3.setObjectName("kp_btn_3")
self.kp_btn_3.clicked.connect(self.Grafiksel_Gosterim)
self.verticalLayout_5.addWidget(self.kp_btn_3)
self.horizontalLayout_13.addWidget(self.kp_sonuc_1)
self.kp_motiv = QGroupBox(self.frame_alt)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.kp_motiv.sizePolicy().hasHeightForWidth())
self.kp_motiv.setSizePolicy(sizePolicy)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.kp_motiv.setFont(font)
self.kp_motiv.setObjectName("kp_motiv")
self.verticalLayout_14 = QVBoxLayout(self.kp_motiv)
self.verticalLayout_14.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.Pd_ptext_motiv12 = QPlainTextEdit(self.kp_motiv)
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.Pd_ptext_motiv12.setFont(font)
self.Pd_ptext_motiv12.setReadOnly(True)
self.Pd_ptext_motiv12.setObjectName("Pd_ptext_motiv12")
self.Pd_ptext_motiv12.setStyleSheet("QPlainTextEdit { background-color: rgb(27, 29, 35); border-radius: 5px; padding: 10px;}QPlainTextEdit:hover { border: 2px solid rgb(64, 71, 88);}QPlainTextEdit:focus { border: 2px solid rgb(91, 101, 124);}")
self.verticalLayout_14.addWidget(self.Pd_ptext_motiv12)
self.horizontalLayout_13.addWidget(self.kp_motiv)
self.kp_sil = QGroupBox(self.frame_alt)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.kp_sil.sizePolicy().hasHeightForWidth())
self.kp_sil.setSizePolicy(sizePolicy)
self.kp_sil.setMinimumSize(QSize(300, 0))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.kp_sil.setFont(font)
self.kp_sil.setObjectName("kp_sil")
self.verticalLayout_7 = QVBoxLayout(self.kp_sil)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.kp_btn_4 = QPushButton(self.kp_sil)
self.kp_btn_4.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_4.setFont(font)
self.kp_btn_4.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.kp_btn_4.setIcon(icon3)
self.kp_btn_4.setObjectName("kp_btn_4")
self.kp_btn_4.clicked.connect(self.Secili_Kitap_Sil)
self.verticalLayout_7.addWidget(self.kp_btn_4)
self.kp_btn_6 = QPushButton(self.kp_sil)
self.kp_btn_6.setMinimumSize(QSize(150, 45))
font = QFont()
font.setFamily("Segoe UI")
font.setPointSize(9)
self.kp_btn_6.setFont(font)
self.kp_btn_6.setStyleSheet("QPushButton {\n"" border: 2px solid rgb(52, 59, 72);\n"" border-radius: 5px; \n"" background-color: rgb(52, 59, 72);\n""}\n""QPushButton:hover {\n"" background-color: rgb(57, 65, 80);\n"" border: 2px solid rgb(61, 70, 86);\n""}\n""QPushButton:pressed { \n"" background-color: rgb(35, 40, 49);\n"" border: 2px solid rgb(43, 50, 61);\n""}")
self.kp_btn_6.setIcon(icon3)
self.kp_btn_6.setObjectName("kp_btn_6")
self.kp_btn_6.clicked.connect(self.Tum_Kitap_Sil)
self.verticalLayout_7.addWidget(self.kp_btn_6)
self.horizontalLayout_13.addWidget(self.kp_sil)
self.verticalLayout_10.addWidget(self.frame_alt)
self.stackedWidget.addWidget(self.page_kitaplarim)
self.page_widgets = QWidget()
self.page_widgets.setObjectName(u"page_widgets")
self.verticalLayout_6 = QVBoxLayout(self.page_widgets)
self.verticalLayout_6.setObjectName(u"verticalLayout_6")
self.frame = QFrame(self.page_widgets)
self.frame.setObjectName(u"frame")
self.frame.setStyleSheet(u"border-radius: 5px;")
self.frame.setFrameShape(QFrame.StyledPanel)
self.frame.setFrameShadow(QFrame.Raised)
self.verticalLayout_15 = QVBoxLayout(self.frame)
self.verticalLayout_15.setSpacing(0)
self.verticalLayout_15.setObjectName(u"verticalLayout_15")
self.verticalLayout_15.setContentsMargins(0, 0, 0, 0)
self.frame_div_content_1 = QFrame(self.frame)
self.frame_div_content_1.setObjectName(u"frame_div_content_1")
self.frame_div_content_1.setMinimumSize(QSize(0, 110))
self.frame_div_content_1.setMaximumSize(QSize(16777215, 110))
self.frame_div_content_1.setStyleSheet(u"background-color: rgb(41, 45, 56);\n""border-radius: 5px;\n""")
self.frame_div_content_1.setFrameShape(QFrame.NoFrame)
self.frame_div_content_1.setFrameShadow(QFrame.Raised)
self.verticalLayout_7 = QVBoxLayout(self.frame_div_content_1)
self.verticalLayout_7.setSpacing(0)
self.verticalLayout_7.setObjectName(u"verticalLayout_7")
self.verticalLayout_7.setContentsMargins(0, 0, 0, 0)
self.frame_title_wid_1 = QFrame(self.frame_div_content_1)
self.frame_title_wid_1.setObjectName(u"frame_title_wid_1")
self.frame_title_wid_1.setMaximumSize(QSize(16777215, 35))
self.frame_title_wid_1.setStyleSheet(u"background-color: rgb(39, 44, 54);")
self.frame_title_wid_1.setFrameShape(QFrame.StyledPanel)
self.frame_title_wid_1.setFrameShadow(QFrame.Raised)
self.verticalLayout_8 = QVBoxLayout(self.frame_title_wid_1)
self.verticalLayout_8.setObjectName(u"verticalLayout_8")
self.labelBoxBlenderInstalation = QLabel(self.frame_title_wid_1)
self.labelBoxBlenderInstalation.setObjectName(u"labelBoxBlenderInstalation")
self.labelBoxBlenderInstalation.setFont(font1)
self.labelBoxBlenderInstalation.setStyleSheet(u"")
self.verticalLayout_8.addWidget(self.labelBoxBlenderInstalation)
self.verticalLayout_7.addWidget(self.frame_title_wid_1)
self.frame_content_wid_1 = QFrame(self.frame_div_content_1)
self.frame_content_wid_1.setObjectName(u"frame_content_wid_1")
self.frame_content_wid_1.setFrameShape(QFrame.NoFrame)
self.frame_content_wid_1.setFrameShadow(QFrame.Raised)
self.horizontalLayout_9 = QHBoxLayout(self.frame_content_wid_1)
self.horizontalLayout_9.setObjectName(u"horizontalLayout_9")
self.gridLayout = QGridLayout()
self.gridLayout.setObjectName(u"gridLayout")
self.gridLayout.setContentsMargins(-1, -1, -1, 0)
self.lineEdit = QLineEdit(self.frame_content_wid_1)
self.lineEdit.setObjectName(u"lineEdit")
self.lineEdit.setMinimumSize(QSize(0, 30))
self.lineEdit.setStyleSheet(css_lineedit)
self.gridLayout.addWidget(self.lineEdit, 0, 0, 1, 1)
self.pushButton = QPushButton(self.frame_content_wid_1)
self.pushButton.setObjectName(u"pushButton")
self.pushButton.setMinimumSize(QSize(150, 30))
font8 = QFont()
font8.setFamily(u"Segoe UI")
font8.setPointSize(9)
self.pushButton.setFont(font8)
self.pushButton.setStyleSheet(css_buton)
icon3 = QIcon()
icon3.addFile(u":/16x16/icons/16x16/cil-folder-open.png", QSize(), QIcon.Normal, QIcon.Off)
self.pushButton.setIcon(icon3)
self.gridLayout.addWidget(self.pushButton, 0, 1, 1, 1)
self.labelVersion_3 = QLabel(self.frame_content_wid_1)
self.labelVersion_3.setObjectName(u"labelVersion_3")
self.labelVersion_3.setStyleSheet(u"color: rgb(98, 103, 111);")
self.labelVersion_3.setLineWidth(1)
self.labelVersion_3.setAlignment(Qt.AlignLeading|Qt.AlignLeft|Qt.AlignVCenter)
self.gridLayout.addWidget(self.labelVersion_3, 1, 0, 1, 2)
self.horizontalLayout_9.addLayout(self.gridLayout)
self.verticalLayout_7.addWidget(self.frame_content_wid_1)
self.verticalLayout_15.addWidget(self.frame_div_content_1)
self.verticalLayout_6.addWidget(self.frame)
self.frame_2 = QFrame(self.page_widgets)
self.frame_2.setObjectName(u"frame_2")
self.frame_2.setMinimumSize(QSize(0, 150))
self.frame_2.setStyleSheet(u"background-color: rgb(39, 44, 54);\n""border-radius: 5px;")
self.frame_2.setFrameShape(QFrame.StyledPanel)
self.frame_2.setFrameShadow(QFrame.Raised)
self.verticalLayout_11 = QVBoxLayout(self.frame_2)
self.verticalLayout_11.setObjectName(u"verticalLayout_11")
self.gridLayout_2 = QGridLayout()
self.gridLayout_2.setObjectName(u"gridLayout_2")
self.checkBox = QCheckBox(self.frame_2)
self.checkBox.setObjectName(u"checkBox")
self.checkBox.setAutoFillBackground(False)
self.checkBox.setStyleSheet(u"")
self.gridLayout_2.addWidget(self.checkBox, 0, 0, 1, 1)
self.radioButton = QRadioButton(self.frame_2)
self.radioButton.setObjectName(u"radioButton")
self.radioButton.setStyleSheet(u"")
self.gridLayout_2.addWidget(self.radioButton, 0, 1, 1, 1)
self.verticalSlider = QSlider(self.frame_2)
self.verticalSlider.setObjectName(u"verticalSlider")
self.verticalSlider.setStyleSheet(u"")
self.verticalSlider.setOrientation(Qt.Vertical)
self.gridLayout_2.addWidget(self.verticalSlider, 0, 2, 3, 1)
self.verticalScrollBar = QScrollBar(self.frame_2)
self.verticalScrollBar.setObjectName(u"verticalScrollBar")
self.verticalScrollBar.setStyleSheet(u" QScrollBar:vertical {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" width: 14px;\n"" margin: 21px 0 21px 0;\n"" border-radius: 0px;\n"" }")
self.verticalScrollBar.setOrientation(Qt.Vertical)
self.gridLayout_2.addWidget(self.verticalScrollBar, 0, 4, 3, 1)
self.scrollArea = QScrollArea(self.frame_2)
self.scrollArea.setObjectName(u"scrollArea")
self.scrollArea.setStyleSheet(u"QScrollArea {\n"" border: none;\n"" border-radius: 0px;\n""}\n""QScrollBar:horizontal {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" height: 14px;\n"" margin: 0px 21px 0 21px;\n"" border-radius: 0px;\n""}\n"" QScrollBar:vertical {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" width: 14px;\n"" margin: 21px 0 21px 0;\n"" border-radius: 0px;\n"" }\n""")
self.scrollArea.setFrameShape(QFrame.NoFrame)
self.scrollArea.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
self.scrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.scrollArea.setWidgetResizable(True)
self.scrollAreaWidgetContents = QWidget()
self.scrollAreaWidgetContents.setObjectName(u"scrollAreaWidgetContents")
self.scrollAreaWidgetContents.setGeometry(QRect(0, 0, 274, 218))
self.horizontalLayout_11 = QHBoxLayout(self.scrollAreaWidgetContents)
self.horizontalLayout_11.setObjectName(u"horizontalLayout_11")
self.plainTextEdit = QPlainTextEdit(self.scrollAreaWidgetContents)
self.plainTextEdit.setObjectName(u"plainTextEdit")
self.plainTextEdit.setMinimumSize(QSize(200, 200))
self.plainTextEdit.setStyleSheet(u"QPlainTextEdit {\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" padding: 10px;\n""}\n""QPlainTextEdit:hover {\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QPlainTextEdit:focus {\n"" border: 2px solid rgb(91, 101, 124);\n""}")
self.horizontalLayout_11.addWidget(self.plainTextEdit)
self.scrollArea.setWidget(self.scrollAreaWidgetContents)
self.gridLayout_2.addWidget(self.scrollArea, 0, 5, 3, 1)
self.comboBox = QComboBox(self.frame_2)
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.setObjectName(u"comboBox")
self.comboBox.setFont(font8)
self.comboBox.setAutoFillBackground(False)
self.comboBox.setStyleSheet(u"QComboBox{\n"" background-color: rgb(27, 29, 35);\n"" border-radius: 5px;\n"" border: 2px solid rgb(27, 29, 35);\n"" padding: 5px;\n"" padding-left: 10px;\n""}\n""QComboBox:hover{\n"" border: 2px solid rgb(64, 71, 88);\n""}\n""QComboBox QAbstractItemView {\n"" color: rgb(85, 170, 255); \n"" background-color: rgb(27, 29, 35);\n"" padding: 10px;\n"" selection-background-color: rgb(39, 44, 54);\n""}")
self.comboBox.setIconSize(QSize(16, 16))
self.comboBox.setFrame(True)
self.gridLayout_2.addWidget(self.comboBox, 1, 0, 1, 2)
self.horizontalScrollBar = QScrollBar(self.frame_2)
self.horizontalScrollBar.setObjectName(u"horizontalScrollBar")
sizePolicy5 = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
sizePolicy5.setHorizontalStretch(0)
sizePolicy5.setVerticalStretch(0)
sizePolicy5.setHeightForWidth(self.horizontalScrollBar.sizePolicy().hasHeightForWidth())
self.horizontalScrollBar.setSizePolicy(sizePolicy5)
self.horizontalScrollBar.setStyleSheet(u"QScrollBar:horizontal {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" height: 14px;\n"" margin: 0px 21px 0 21px;\n"" border-radius: 0px;\n""}\n""")
self.horizontalScrollBar.setOrientation(Qt.Horizontal)
self.gridLayout_2.addWidget(self.horizontalScrollBar, 1, 3, 1, 1)
self.commandLinkButton = QCommandLinkButton(self.frame_2)
self.commandLinkButton.setObjectName(u"commandLinkButton")
self.commandLinkButton.setStyleSheet(u"QCommandLinkButton { \n"" color: rgb(85, 170, 255);\n"" border-radius: 5px;\n"" padding: 5px;\n""}\n""QCommandLinkButton:hover { \n"" color: rgb(210, 210, 210);\n"" background-color: rgb(44, 49, 60);\n""}\n""QCommandLinkButton:pressed { \n"" color: rgb(210, 210, 210);\n"" background-color: rgb(52, 58, 71);\n""}")
icon4 = QIcon()
icon4.addFile(u":/16x16/icons/16x16/cil-link.png", QSize(), QIcon.Normal, QIcon.Off)
self.commandLinkButton.setIcon(icon4)
self.gridLayout_2.addWidget(self.commandLinkButton, 1, 6, 1, 1)
self.horizontalSlider = QSlider(self.frame_2)
self.horizontalSlider.setObjectName(u"horizontalSlider")
self.horizontalSlider.setStyleSheet(u"")
self.horizontalSlider.setOrientation(Qt.Horizontal)
self.gridLayout_2.addWidget(self.horizontalSlider, 2, 0, 1, 2)
self.verticalLayout_11.addLayout(self.gridLayout_2)
self.verticalLayout_6.addWidget(self.frame_2)
self.frame_3 = QFrame(self.page_widgets)
self.frame_3.setObjectName(u"frame_3")
self.frame_3.setMinimumSize(QSize(0, 150))
self.frame_3.setFrameShape(QFrame.StyledPanel)
self.frame_3.setFrameShadow(QFrame.Raised)
self.horizontalLayout_12 = QHBoxLayout(self.frame_3)
self.horizontalLayout_12.setSpacing(0)
self.horizontalLayout_12.setObjectName(u"horizontalLayout_12")
self.horizontalLayout_12.setContentsMargins(0, 0, 0, 0)
self.tableWidget = QTableWidget(self.frame_3)
if (self.tableWidget.columnCount() < 4):
self.tableWidget.setColumnCount(4)
__qtablewidgetitem = QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, __qtablewidgetitem)
__qtablewidgetitem1 = QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, __qtablewidgetitem1)
__qtablewidgetitem2 = QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, __qtablewidgetitem2)
__qtablewidgetitem3 = QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, __qtablewidgetitem3)
if (self.tableWidget.rowCount() < 16):
self.tableWidget.setRowCount(16)
__qtablewidgetitem4 = QTableWidgetItem()
__qtablewidgetitem4.setFont(font2);
self.tableWidget.setVerticalHeaderItem(0, __qtablewidgetitem4)
__qtablewidgetitem5 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(1, __qtablewidgetitem5)
__qtablewidgetitem6 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(2, __qtablewidgetitem6)
__qtablewidgetitem7 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(3, __qtablewidgetitem7)
__qtablewidgetitem8 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(4, __qtablewidgetitem8)
__qtablewidgetitem9 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(5, __qtablewidgetitem9)
__qtablewidgetitem10 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(6, __qtablewidgetitem10)
__qtablewidgetitem11 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(7, __qtablewidgetitem11)
__qtablewidgetitem12 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(8, __qtablewidgetitem12)
__qtablewidgetitem13 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(9, __qtablewidgetitem13)
__qtablewidgetitem14 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(10, __qtablewidgetitem14)
__qtablewidgetitem15 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(11, __qtablewidgetitem15)
__qtablewidgetitem16 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(12, __qtablewidgetitem16)
__qtablewidgetitem17 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(13, __qtablewidgetitem17)
__qtablewidgetitem18 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(14, __qtablewidgetitem18)
__qtablewidgetitem19 = QTableWidgetItem()
self.tableWidget.setVerticalHeaderItem(15, __qtablewidgetitem19)
__qtablewidgetitem20 = QTableWidgetItem()
self.tableWidget.setItem(0, 0, __qtablewidgetitem20)
__qtablewidgetitem21 = QTableWidgetItem()
self.tableWidget.setItem(0, 1, __qtablewidgetitem21)
__qtablewidgetitem22 = QTableWidgetItem()
self.tableWidget.setItem(0, 2, __qtablewidgetitem22)
__qtablewidgetitem23 = QTableWidgetItem()
self.tableWidget.setItem(0, 3, __qtablewidgetitem23)
self.tableWidget.setObjectName(u"tableWidget")
sizePolicy.setHeightForWidth(self.tableWidget.sizePolicy().hasHeightForWidth())
self.tableWidget.setSizePolicy(sizePolicy)
palette1 = QPalette()
palette1.setBrush(QPalette.Active, QPalette.WindowText, brush6)
brush15 = QBrush(QColor(39, 44, 54, 255))
brush15.setStyle(Qt.SolidPattern)
palette1.setBrush(QPalette.Active, QPalette.Button, brush15)
palette1.setBrush(QPalette.Active, QPalette.Text, brush6)
palette1.setBrush(QPalette.Active, QPalette.ButtonText, brush6)
palette1.setBrush(QPalette.Active, QPalette.Base, brush15)
palette1.setBrush(QPalette.Active, QPalette.Window, brush15)
brush16 = QBrush(QColor(210, 210, 210, 128))
brush16.setStyle(Qt.NoBrush)
#if QT_VERSION >= QT_VERSION_CHECK(5, 12, 0)
palette1.setBrush(QPalette.Active, QPalette.PlaceholderText, brush16)
#endif
palette1.setBrush(QPalette.Inactive, QPalette.WindowText, brush6)
palette1.setBrush(QPalette.Inactive, QPalette.Button, brush15)
palette1.setBrush(QPalette.Inactive, QPalette.Text, brush6)
palette1.setBrush(QPalette.Inactive, QPalette.ButtonText, brush6)
palette1.setBrush(QPalette.Inactive, QPalette.Base, brush15)
palette1.setBrush(QPalette.Inactive, QPalette.Window, brush15)
brush17 = QBrush(QColor(210, 210, 210, 128))
brush17.setStyle(Qt.NoBrush)
#if QT_VERSION >= QT_VERSION_CHECK(5, 12, 0)
palette1.setBrush(QPalette.Inactive, QPalette.PlaceholderText, brush17)
#endif
palette1.setBrush(QPalette.Disabled, QPalette.WindowText, brush6)
palette1.setBrush(QPalette.Disabled, QPalette.Button, brush15)
palette1.setBrush(QPalette.Disabled, QPalette.Text, brush6)
palette1.setBrush(QPalette.Disabled, QPalette.ButtonText, brush6)
palette1.setBrush(QPalette.Disabled, QPalette.Base, brush15)
palette1.setBrush(QPalette.Disabled, QPalette.Window, brush15)
brush18 = QBrush(QColor(210, 210, 210, 128))
brush18.setStyle(Qt.NoBrush)
#if QT_VERSION >= QT_VERSION_CHECK(5, 12, 0)
palette1.setBrush(QPalette.Disabled, QPalette.PlaceholderText, brush18)
#endif
self.tableWidget.setPalette(palette1)
self.tableWidget.setStyleSheet(u"QTableWidget { \n"" background-color: rgb(39, 44, 54);\n"" padding: 10px;\n"" border-radius: 5px;\n"" gridline-color: rgb(44, 49, 60);\n"" border-bottom: 1px solid rgb(44, 49, 60);\n""}\n""QTableWidget::item{\n"" border-color: rgb(44, 49, 60);\n"" padding-left: 5px;\n"" padding-right: 5px;\n"" gridline-color: rgb(44, 49, 60);\n""}\n""QTableWidget::item:selected{\n"" background-color: rgb(85, 170, 255);\n""}\n""QScrollBar:horizontal {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" height: 14px;\n"" margin: 0px 21px 0 21px;\n"" border-radius: 0px;\n""}\n"" QScrollBar:vertical {\n"" border: none;\n"" background: rgb(52, 59, 72);\n"" width: 14px;\n"" margin: 21px 0 21px 0;\n"" border-radius: 0px;\n"" }\n""QHeaderView::section{\n"" Background-color: rgb(39, 44, 54);\n"" max-width: 30px;\n"" border: 1px solid rgb(44, 49, 60);\n"" border-style: none;\n"" border-bottom: 1px solid rgb(44, 49, 60);\n"" border-right: 1px solid rgb(44, 49, 60);\n""}\n""" "QTableWidget::horizontalHeader { \n"" background-color: rgb(81, 255, 0);\n""}\n""QHeaderView::section:horizontal\n""{\n"" border: 1px solid rgb(32, 34, 42);\n"" background-color: rgb(27, 29, 35);\n"" padding: 3px;\n"" border-top-left-radius: 7px;\n"" border-top-right-radius: 7px;\n""}\n""QHeaderView::section:vertical\n""{\n"" border: 1px solid rgb(44, 49, 60);\n""}\n""")
self.tableWidget.setFrameShape(QFrame.NoFrame)
self.tableWidget.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
self.tableWidget.setSizeAdjustPolicy(QAbstractScrollArea.AdjustToContents)
self.tableWidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.tableWidget.setAlternatingRowColors(False)
self.tableWidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.tableWidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.tableWidget.setShowGrid(True)
self.tableWidget.setGridStyle(Qt.SolidLine)
self.tableWidget.setSortingEnabled(False)
self.tableWidget.horizontalHeader().setVisible(True)
self.tableWidget.horizontalHeader().setCascadingSectionResizes(True)
self.tableWidget.horizontalHeader().setDefaultSectionSize(200)
self.tableWidget.horizontalHeader().setStretchLastSection(True)
self.tableWidget.verticalHeader().setVisible(False)
self.tableWidget.verticalHeader().setCascadingSectionResizes(False)
self.tableWidget.verticalHeader().setHighlightSections(False)
self.tableWidget.verticalHeader().setStretchLastSection(True)
self.horizontalLayout_12.addWidget(self.tableWidget)
self.verticalLayout_6.addWidget(self.frame_3)
self.stackedWidget.addWidget(self.page_widgets)
self.verticalLayout_9.addWidget(self.stackedWidget)
self.verticalLayout_4.addWidget(self.frame_content)
self.frame_grip = QFrame(self.frame_content_right)
self.frame_grip.setObjectName(u"frame_grip")
self.frame_grip.setMinimumSize(QSize(0, 25))
self.frame_grip.setMaximumSize(QSize(16777215, 25))
self.frame_grip.setStyleSheet(u"background-color: rgb(33, 37, 43);")
self.frame_grip.setFrameShape(QFrame.NoFrame)
self.frame_grip.setFrameShadow(QFrame.Raised)
self.horizontalLayout_6 = QHBoxLayout(self.frame_grip)
self.horizontalLayout_6.setSpacing(0)
self.horizontalLayout_6.setObjectName(u"horizontalLayout_6")
self.horizontalLayout_6.setContentsMargins(0, 0, 2, 0)
self.frame_label_bottom = QFrame(self.frame_grip)
self.frame_label_bottom.setObjectName(u"frame_label_bottom")
self.frame_label_bottom.setFrameShape(QFrame.NoFrame)
self.frame_label_bottom.setFrameShadow(QFrame.Raised)
self.horizontalLayout_7 = QHBoxLayout(self.frame_label_bottom)
self.horizontalLayout_7.setSpacing(0)
self.horizontalLayout_7.setObjectName(u"horizontalLayout_7")
self.horizontalLayout_7.setContentsMargins(10, 0, 10, 0)
self.label_credits = QLabel(self.frame_label_bottom)
self.label_credits.setObjectName(u"label_credits")
self.label_credits.setFont(font2)
self.label_credits.setStyleSheet(u"color: rgb(98, 103, 111);")
self.horizontalLayout_7.addWidget(self.label_credits)
self.label_version = QLabel(self.frame_label_bottom)
self.label_version.setObjectName(u"label_version")
self.label_version.setMaximumSize(QSize(100, 16777215))
self.label_version.setFont(font2)
self.label_version.setStyleSheet(u"color: rgb(98, 103, 111);")
self.label_version.setAlignment(Qt.AlignRight|Qt.AlignTrailing|Qt.AlignVCenter)
self.horizontalLayout_7.addWidget(self.label_version)
self.horizontalLayout_6.addWidget(self.frame_label_bottom)
self.frame_size_grip = QFrame(self.frame_grip)
self.frame_size_grip.setObjectName(u"frame_size_grip")
self.frame_size_grip.setMaximumSize(QSize(20, 20))
self.frame_size_grip.setStyleSheet(u"QSizeGrip {\n"" background-image: url(:/16x16/icons/16x16/cil-size-grip.png);\n"" background-position: center;\n"" background-repeat: no-reperat;\n""}")
self.frame_size_grip.setFrameShape(QFrame.NoFrame)
self.frame_size_grip.setFrameShadow(QFrame.Raised)
self.horizontalLayout_6.addWidget(self.frame_size_grip)
self.verticalLayout_4.addWidget(self.frame_grip)
self.horizontalLayout_2.addWidget(self.frame_content_right)
self.verticalLayout.addWidget(self.frame_center)
self.horizontalLayout.addWidget(self.frame_main)
MainWindow.setCentralWidget(self.centralwidget)
QWidget.setTabOrder(self.btn_minimize, self.btn_maximize_restore)
QWidget.setTabOrder(self.btn_maximize_restore, self.btn_close)
QWidget.setTabOrder(self.btn_close, self.btn_toggle_menu)
QWidget.setTabOrder(self.btn_toggle_menu, self.checkBox)
QWidget.setTabOrder(self.checkBox, self.comboBox)
QWidget.setTabOrder(self.comboBox, self.radioButton)
QWidget.setTabOrder(self.radioButton, self.horizontalSlider)
QWidget.setTabOrder(self.horizontalSlider, self.verticalSlider)
QWidget.setTabOrder(self.verticalSlider, self.scrollArea)
QWidget.setTabOrder(self.scrollArea, self.plainTextEdit)
QWidget.setTabOrder(self.plainTextEdit, self.tableWidget)
QWidget.setTabOrder(self.tableWidget, self.commandLinkButton)
self.retranslateUi(MainWindow)
self.stackedWidget.setCurrentIndex(1)
QMetaObject.connectSlotsByName(MainWindow)
# setupUi
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QCoreApplication.translate("MainWindow", u"MainWindow", None))
self.btn_toggle_menu.setText("")
self.label_title_bar_top.setText(QCoreApplication.translate("MainWindow", u"Main Window - Base", None))
#if QT_CONFIG(tooltip)
self.btn_minimize.setToolTip(QCoreApplication.translate("MainWindow", u"Minimize", None))
#endif // QT_CONFIG(tooltip)
self.btn_minimize.setText("")
#if QT_CONFIG(tooltip)
self.btn_maximize_restore.setToolTip(QCoreApplication.translate("MainWindow", u"Maximize", None))
#endif // QT_CONFIG(tooltip)
self.btn_maximize_restore.setText("")
#if QT_CONFIG(tooltip)
self.btn_close.setToolTip(QCoreApplication.translate("MainWindow", u"Close", None))
#endif // QT_CONFIG(tooltip)
self.btn_close.setText("")
self.label_top_info_1.setText(QCoreApplication.translate("MainWindow", u"C:\\Program Files\\Blender Foundation\\Blender 2.82", None))
self.label_top_info_2.setText(QCoreApplication.translate("MainWindow", u"| Ana Sayfa", None))
self.label_user_icon.setText(QCoreApplication.translate("MainWindow", u"BŞ", None))
self.label_6.setText(QCoreApplication.translate("MainWindow", u"Ana Sayfa", None))
self.label.setText(QCoreApplication.translate("MainWindow", u"Boş Sayfa ", None))
self.label_7.setText(QCoreApplication.translate("MainWindow", u"Ana Sayfa", None))
self.labelBoxBlenderInstalation.setText(QCoreApplication.translate("MainWindow", u"BLENDER INSTALLATION", None))
self.lineEdit.setPlaceholderText(QCoreApplication.translate("MainWindow", u"Your Password", None))
self.pushButton.setText(QCoreApplication.translate("MainWindow", u"Open Blender", None))
self.labelVersion_3.setText(QCoreApplication.translate("MainWindow", u"Ex: C:Program FilesBlender FoundationBlender 2.82 blender.exe", None))
self.checkBox.setText(QCoreApplication.translate("MainWindow", u"CheckBox", None))
self.radioButton.setText(QCoreApplication.translate("MainWindow", u"RadioButton", None))
self.comboBox.setItemText(0, QCoreApplication.translate("MainWindow", u"Test 1", None))
self.comboBox.setItemText(1, QCoreApplication.translate("MainWindow", u"Test 2", None))
self.comboBox.setItemText(2, QCoreApplication.translate("MainWindow", u"Test 3", None))
self.commandLinkButton.setText(QCoreApplication.translate("MainWindow", u"CommandLinkButton", None))
self.commandLinkButton.setDescription(QCoreApplication.translate("MainWindow", u"Open External Link", None))
___qtablewidgetitem = self.tableWidget.horizontalHeaderItem(0)
___qtablewidgetitem.setText(QCoreApplication.translate("MainWindow", u"0", None));
___qtablewidgetitem1 = self.tableWidget.horizontalHeaderItem(1)
___qtablewidgetitem1.setText(QCoreApplication.translate("MainWindow", u"1", None));
___qtablewidgetitem2 = self.tableWidget.horizontalHeaderItem(2)
___qtablewidgetitem2.setText(QCoreApplication.translate("MainWindow", u"2", None));
___qtablewidgetitem3 = self.tableWidget.horizontalHeaderItem(3)
___qtablewidgetitem3.setText(QCoreApplication.translate("MainWindow", u"3", None));
___qtablewidgetitem4 = self.tableWidget.verticalHeaderItem(0)
___qtablewidgetitem4.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem5 = self.tableWidget.verticalHeaderItem(1)
___qtablewidgetitem5.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem6 = self.tableWidget.verticalHeaderItem(2)
___qtablewidgetitem6.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem7 = self.tableWidget.verticalHeaderItem(3)
___qtablewidgetitem7.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem8 = self.tableWidget.verticalHeaderItem(4)
___qtablewidgetitem8.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem9 = self.tableWidget.verticalHeaderItem(5)
___qtablewidgetitem9.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem10 = self.tableWidget.verticalHeaderItem(6)
___qtablewidgetitem10.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem11 = self.tableWidget.verticalHeaderItem(7)
___qtablewidgetitem11.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem12 = self.tableWidget.verticalHeaderItem(8)
___qtablewidgetitem12.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem13 = self.tableWidget.verticalHeaderItem(9)
___qtablewidgetitem13.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem14 = self.tableWidget.verticalHeaderItem(10)
___qtablewidgetitem14.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem15 = self.tableWidget.verticalHeaderItem(11)
___qtablewidgetitem15.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem16 = self.tableWidget.verticalHeaderItem(12)
___qtablewidgetitem16.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem17 = self.tableWidget.verticalHeaderItem(13)
___qtablewidgetitem17.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem18 = self.tableWidget.verticalHeaderItem(14)
___qtablewidgetitem18.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
___qtablewidgetitem19 = self.tableWidget.verticalHeaderItem(15)
___qtablewidgetitem19.setText(QCoreApplication.translate("MainWindow", u"New Row", None));
__sortingEnabled = self.tableWidget.isSortingEnabled()
self.tableWidget.setSortingEnabled(False)
___qtablewidgetitem20 = self.tableWidget.item(0, 0)
___qtablewidgetitem20.setText(QCoreApplication.translate("MainWindow", u"Test", None));
___qtablewidgetitem21 = self.tableWidget.item(0, 1)
___qtablewidgetitem21.setText(QCoreApplication.translate("MainWindow", u"Text", None));
___qtablewidgetitem22 = self.tableWidget.item(0, 2)
___qtablewidgetitem22.setText(QCoreApplication.translate("MainWindow", u"Cell", None));
___qtablewidgetitem23 = self.tableWidget.item(0, 3)
___qtablewidgetitem23.setText(QCoreApplication.translate("MainWindow", u"Line", None));
self.tableWidget.setSortingEnabled(__sortingEnabled)
_translate = QCoreApplication.translate
self.label_credits.setText(QCoreApplication.translate("MainWindow", u"Yapımcı : Berkay Şen", None))
self.label_version.setText(QCoreApplication.translate("MainWindow", u"v1.0.0", None))
self.label_title_bar_top.setText(_translate("MainWindow", "Main Window - Base"))
self.btn_minimize.setToolTip(_translate("MainWindow", "Minimize"))
self.btn_maximize_restore.setToolTip(_translate("MainWindow", "Maximize"))
self.btn_close.setToolTip(_translate("MainWindow", "Close"))
self.label_top_info_1.setText(_translate("MainWindow", "C:\\Program Files\\Blender Foundation\\Blender 2.82"))
self.label_top_info_2.setText(_translate("MainWindow", "| Ana Sayfa"))
self.groupBox_4.setTitle(_translate("MainWindow", "Soru Çözme İşlemleri"))
self.Pd_lbl2.setText(_translate("MainWindow", "Bir Gün Seçiniz"))
self.Pd_lbl1.setText(_translate("MainWindow", "Toplam Soru Sayısı"))
self.Pd_lbl3.setText(_translate("MainWindow", "Toplam Doğru Sayısı"))
self.Pd_lbl4.setText(_translate("MainWindow", "Toplan Yanlış Sayısı"))
self.Pd_radio_e.setText(_translate("MainWindow", "Ekleme Yapacağım"))
self.Pd_radio_g.setText(_translate("MainWindow", "Güncelleme Yapacağım"))
self.Pd_btn_coz.setText(_translate("MainWindow", "Soru Çöz"))
self.Pd_ptext_yardim.setPlainText(_translate("MainWindow", " YARDIM MENÜSÜ\n""Öncelikle soru çözmek istediğiniz dersi \"Derslerim\" listesi altından seçtikten sonra \"Gün Seçiniz\" kısmından soruları çözmüş olduğunuz günü seçiniz.Ardından toplam çözmüş olduğunuz soru sayısını,toplam çözmüş olduğunuz doğru ve toplam yanlış sayısını ilgili alanlara yazınız.Sonrasın da ise iki seçenek karşınıza çıkıyor.Eğer \"Çözdüğüm Soru Sayılarını Güncelle\" sekmesini seçerek işlem yaparsanız o gün içerisinde çözmüş olduğunuz bilgiler silinir ,yeni girdiğiniz bilgiler ile güncellenir.Ancak \"Çözdüklerime ekleme yapacağım\" sekmesini seçerek işlem yaparsanız önceden kayıtlı sorularınıza ekleme yaparak sisteme kaydedilir."))
self.groupBox.setTitle(_translate("MainWindow", "Sonuç İşlemleri"))
self.Pd_btn_1.setText(_translate("MainWindow", "Seçili Dersin Günlük Sonuçlarını Göster"))
self.Pd_btn_2.setText(_translate("MainWindow", "Seçili Dersin Haftalık Sonucunu Göster"))
self.Pd_btn_3.setText(_translate("MainWindow", "Tüm Derslerin Haftalık Sonucunu Göster"))
self.groupBox_2.setTitle(_translate("MainWindow", "Motivasyon Sözü"))
self.groupBox_3.setTitle(_translate("MainWindow", "Temizleme İşlemleri"))
self.Pd_btn_4.setText(_translate("MainWindow", "Seçili Dersin Günlük Sonuçlarını Temizle"))
self.Pd_btn_6.setText(_translate("MainWindow", "Seçili Dersin Haftalık Sonucunu Temizle"))
self.Pd_btn_5.setText(_translate("MainWindow", "Tüm Derslerin Haftalık Sonuçlarını Temizle"))
self.label_credits.setText(_translate("MainWindow", "Yapımcı:Berkay Şen"))
self.label_version.setText(_translate("MainWindow", "v1.0.0"))
self.label_title_bar_top.setText(_translate("MainWindow", "Main Window - Base"))
self.btn_minimize.setToolTip(_translate("MainWindow", "Minimize"))
self.btn_maximize_restore.setToolTip(_translate("MainWindow", "Maximize"))
self.btn_close.setToolTip(_translate("MainWindow", "Close"))
self.label_top_info_1.setText(_translate("MainWindow", "C:\\Program Files\\Blender Foundation\\Blender 2.82"))
self.label_top_info_2.setText(_translate("MainWindow", "| Ana Sayfa"))
self.groupBox_41.setTitle(_translate("MainWindow", "Deneme İşlemlerim"))
self.comboBox_41.setItemText(0, _translate("MainWindow", "Ay"))
self.comboBox1.setItemText(0, _translate("MainWindow", "Deneme Türü Seçiniz"))
self.comboBox1.setItemText(1, _translate("MainWindow", "TYT Denemesi"))
self.comboBox1.setItemText(2, _translate("MainWindow", "AYT Denemesi"))
self.lineEdit1.setPlaceholderText(_translate("MainWindow", "Deneme puanını giriniz"))
self.lineEdit_41.setPlaceholderText(_translate("MainWindow", "Toplam yanlış sayısını giriniz"))
self.lineEdit_21.setPlaceholderText(_translate("MainWindow", "Denemenize bir isim veriniz"))
self.lineEdit_31.setPlaceholderText(_translate("MainWindow", "Toplam doğru sayısını giriniz"))
self.label_21.setText(_translate("MainWindow", "Deneme Puanı"))
self.label_41.setText(_translate("MainWindow", "Yanlış Sayısı"))
self.label_31.setText(_translate("MainWindow", "Doğru Sayısı"))
self.label_51.setText(_translate("MainWindow", "Deneme Türü"))
self.label1.setText(_translate("MainWindow", "Deneme İsmi"))
self.comboBox_31.setItemText(0, _translate("MainWindow", "Yıl"))
self.comboBox_21.setItemText(0, _translate("MainWindow", "Gün"))
self.pushButton1.setText(_translate("MainWindow", "Deneme Kaydet"))
self.Pd_ptext_yardim1.setPlainText(_translate("MainWindow", " YARDIM MENÜSÜ\n"
"Denemelerinize isim verip türünü seçerek ve denemeyi çözdüğünüz tarihi seçtikten sonra istediğiniz gibi denemelerinizi veritabanına kaydedebilirsiniz.Kaydettiğiniz zaman sol üstteki listeye denemenizin ismi gelecektir.İsterseniz sol alttaki Sonuç İşlemleri kısmından denemelerinizi tek tek inceleyebilir,isterseniz hepsinin sonuçlarını grafiksel olarak açıp sonuçları görüntüleyebilirsiniz."))
self.groupBox1.setTitle(_translate("MainWindow", "Sonuç İşlemleri"))
self.pushButton_21.setText(_translate("MainWindow", "Seçili Denememin Sonuçlarını Göster"))
self.pushButton_41.setText(_translate("MainWindow", "Sadece TYT Denemelerini Göster"))
self.pushButton_51.setText(_translate("MainWindow", "Sadece AYT Denemelerini Göster"))
self.pushButton_31.setText(_translate("MainWindow", "Tüm Denemelerin Sonuçlarını Göster"))
self.groupBox_21.setTitle(_translate("MainWindow", "Motivasyon Sözü"))
self.groupBox_31.setTitle(_translate("MainWindow", "Silme İşlemleri"))
self.pushButton_61.setText(_translate("MainWindow", "Seçili Denememi Sil"))
self.pushButton_71.setText(_translate("MainWindow", "Sadece TYT Denemelerini Sil"))
self.pushButton_81.setText(_translate("MainWindow", "Sadece AYT Denemelerini Sil"))
self.pushButton_91.setText(_translate("MainWindow", "Tüm Denemeleri Sil"))
self.label_credits.setText(_translate("MainWindow", "Yapımcı:Berkay Şen"))
self.label_version.setText(_translate("MainWindow", "v1.0.0"))
self.label_title_bar_top.setText(_translate("MainWindow", "Kitap İşlemleri"))
self.btn_minimize.setToolTip(_translate("MainWindow", "Minimize"))
self.btn_maximize_restore.setToolTip(_translate("MainWindow", "Maximize"))
self.btn_close.setToolTip(_translate("MainWindow", "Close"))
self.label_top_info_1.setText(_translate("MainWindow", "Kitap Oku"))
self.label_top_info_2.setText(_translate("MainWindow", "Kitaplarım"))
self.group_ust.setTitle(_translate("MainWindow", "Kitap İşlemlerim"))
__sortingEnabled = self.pd_kitaplar.isSortingEnabled()
self.pd_kitaplar.setSortingEnabled(False)
self.pd_kitaplar.setSortingEnabled(__sortingEnabled)
self.pd_ktp_ekle.setTitle(_translate("MainWindow", "Kitap Ekle"))
self.kp_lbl1.setText(_translate("MainWindow", "Kitap Adı"))
self.kp_lne_3.setPlaceholderText(_translate("MainWindow", "Kitabın ismini giriniz"))
self.kp_lbl2.setText(_translate("MainWindow", "Sayfa Sayısı"))
self.kp_lne_1.setPlaceholderText(_translate("MainWindow", "Kitabın toplam sayfa sayısını giriniz"))
self.kp_lbl3.setText(_translate("MainWindow", "Okunan Gün"))
self.kp_lne_2.setPlaceholderText(_translate("MainWindow", "Toplam kaç günde bitirdiğinizi giriniz"))
self.kp_btn_kydet.setText(_translate("MainWindow", "Kitabı Kaydet"))
self.Pd_Yrdm.setPlainText(_translate("MainWindow", " YARDIM MENÜSÜ\n"
"Bu menüde okuduğunuz kitapları isimleri ve sayfa sayılarıyla birlikte,kaç günde okuduğunuzu da uygulamaya kaydedip,okuduğunuz kitap ve sayfa sayılarına göre kendini başarınızı grafiksel olarak inceleyebilirsiniz."))
self.kp_sonuc_1.setTitle(_translate("MainWindow", "Sonuç İşlemleri"))
self.kp_btn_1.setText(_translate("MainWindow", "Seçili Kitabın Bilgilerini Göster"))
self.kp_btn_2.setText(_translate("MainWindow", "Tüm Kitapların Bilgilerini Göster"))
self.kp_btn_3.setText(_translate("MainWindow", "Grafiksel Gösterim"))
self.kp_motiv.setTitle(_translate("MainWindow", "Motivasyon Sözü"))
self.Pd_ptext_motiv12.setPlainText(_translate("MainWindow", " "))
self.kp_sil.setTitle(_translate("MainWindow", "Silme İşlemleri"))
self.kp_btn_4.setText(_translate("MainWindow", "Seçili Kitabı Sil"))
self.kp_btn_6.setText(_translate("MainWindow", "Tüm Kitapları Sil"))
################################--DERSLERİM--#################################
def Soru_Coz_Proccess(self):
if self.Pd_cbox_gunler.currentText()=="Gün Seçiniz":
yardimciFonk.HataUyarisi("Lütfen soruları çözdüğünüz günü seçiniz.")
elif self.Pd_lne_soru.text()=="" or self.Pd_lne_dogru.text()=="" or self.Pd_lne_yanlis.text()=="":
yardimciFonk.HataUyarisi("Soru çözmek için lütfen gerekli alanları\ndoldurunuz.")
elif self.Pd_lne_dogru.text().isdigit()== False:
yardimciFonk.HataUyarisi("Dogru sayısına metinsel karakterler girilemez.")
elif self.Pd_lne_soru.text().isdigit()== False:
yardimciFonk.HataUyarisi("Dogru sayısına metinsel karakterler girilemez.")
elif self.Pd_lne_yanlis.text().isdigit()== False:
yardimciFonk.HataUyarisi("Dogru sayısına metinsel karakterler girilemez.")
elif self.Pd_radio_e.isChecked()==False and self.Pd_radio_g.isChecked()==False:
yardimciFonk.HataUyarisi("Lütfen ekleme mi yapacağınızı veya\ngüncelleme mi yapacağınızı seçiniz.")
##########----EKLEME YAPILACAK----###################
elif self.Pd_radio_e.isChecked()==True:
try:
veri_toplam=yardimciFonk.SQL("SELECT Toplam FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
veri_toplam=int(veri_toplam[0][0])
veri_dogrular=yardimciFonk.SQL("SELECT Dogrular FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
veri_dogrular=int(veri_dogrular[0][0])
veri_yanlislar=yardimciFonk.SQL("SELECT Yanlislar FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
veri_yanlislar=int(veri_yanlislar[0][0])
ekle_toplam=int(self.Pd_lne_soru.text())
ekle_yanlislar=int(self.Pd_lne_yanlis.text())
ekle_dogrular=int(self.Pd_lne_dogru.text())
new_toplam=ekle_toplam+veri_toplam
new_yanlis=ekle_yanlislar+veri_yanlislar
new_dogru=ekle_dogrular+veri_dogrular
yardimciFonk.SQL("UPDATE {} SET Toplam='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),new_toplam,self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} SET Dogrular='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),new_dogru,self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} SET Yanlislar='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),new_yanlis,self.Pd_cbox_gunler.currentText()))
yardimciFonk.BasariliUyari("Ekleme işlemi başarıyla gerçekleşti.")
self.Pd_lne_soru.clear()
self.Pd_lne_yanlis.clear()
self.Pd_lne_dogru.clear()
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
##########----GÜNCELLEME YAPILACAK----###############
elif self.Pd_radio_g.isChecked()==True:
try:
yardimciFonk.SQL("UPDATE {} set Toplam='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_lne_soru.text(),self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} set Yanlislar='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_lne_yanlis.text(),self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} set Dogrular='{}' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_lne_dogru.text(),self.Pd_cbox_gunler.currentText()))
self.Pd_lne_soru.clear()
self.Pd_lne_yanlis.clear()
self.Pd_lne_dogru.clear()
self.Pd_cbox_gunler.setCurrentText("Gün Seçiniz")
yardimciFonk.BasariliUyari("Veriler başarıyla kaydedilmiştir.")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
def Dersin_Gunluk_Sonucu(self):
if self.Pd_cbox_gunler.currentText()=="Gün Seçiniz":
yardimciFonk.HataUyarisi("İşlemi gerçekleştirmek için bir gün seçiniz.")
else:
try:
gunluk_dogru=yardimciFonk.SQL("SELECT Dogrular FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
gunluk_yanlis=yardimciFonk.SQL("SELECT Yanlislar FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
gunluk_toplam=yardimciFonk.SQL("SELECT Toplam FROM {} where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
yardimciFonk.BasariliUyari(f"{self.Pd_cbox_gunler.currentText()} Günü {self.list_dersler.currentItem().text()} dersinden:\n\nToplam Soru Sayısı:{str(gunluk_toplam[0][0])}\nToplam Doğru Sayısı:{str(gunluk_dogru[0][0])}\nToplam Yanlış Sayısı:{str(gunluk_yanlis[0][0])}")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
def Dersin_Haftalik_Sonucu(self):
try:
pazartesi_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Pazartesi'".format(self.list_dersler.currentItem().text().replace(" ","_")))
sali_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Salı'".format(self.list_dersler.currentItem().text().replace(" ","_")))
carsamba_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Çarşamba'".format(self.list_dersler.currentItem().text().replace(" ","_")))
persembe_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Perşembe'".format(self.list_dersler.currentItem().text().replace(" ","_")))
cuma_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Cuma'".format(self.list_dersler.currentItem().text().replace(" ","_")))
cumartesi_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Cumartesi'".format(self.list_dersler.currentItem().text().replace(" ","_")))
pazar_verileri=yardimciFonk.SQL("SELECT Toplam,Dogrular,Yanlislar FROM {} where Gunler='Pazar'".format(self.list_dersler.currentItem().text().replace(" ","_")))
pazartesi_toplam=int(pazartesi_verileri[0][0])
pazartesi_dogru=int(pazartesi_verileri[0][1])
pazartesi_yanlis=int(pazartesi_verileri[0][2])
sali_toplam = int(sali_verileri[0][0])
sali_dogru = int(sali_verileri[0][1])
sali_yanlis = int(sali_verileri[0][2])
carsamba_toplam = int(carsamba_verileri[0][0])
carsamba_dogru = int(carsamba_verileri[0][1])
carsamba_yanlis = int(carsamba_verileri[0][2])
persembe_toplam = int(persembe_verileri[0][0])
persembe_dogru = int(persembe_verileri[0][1])
persembe_yanlis = int(persembe_verileri[0][2])
cuma_toplam = int(cuma_verileri[0][0])
cuma_dogru = int(cuma_verileri[0][1])
cuma_yanlis = int(cuma_verileri[0][2])
cumartesi_toplam = int(cumartesi_verileri[0][0])
cumartesi_dogru = int(cumartesi_verileri[0][1])
cumartesi_yanlis = int(cumartesi_verileri[0][2])
pazar_toplam = int(pazar_verileri[0][0])
pazar_dogru = int(pazar_verileri[0][1])
pazar_yanlis = int(pazar_verileri[0][2])
plt.figure(figsize=(10, 7))
names = ['Pazartesi\n{} Soru\n{} Doğru\n{} Yanlış'.format(pazartesi_toplam,pazartesi_dogru,pazartesi_yanlis),
'Salı\n{} Soru\n{} Doğru\n{} Yanlış'.format(sali_toplam,sali_dogru,sali_yanlis),
'Çarşamba\n{} Soru\n{} Doğru\n{} Yanlış'.format(carsamba_toplam,carsamba_dogru,carsamba_yanlis),
"Perşembe\n{} Soru\n{} Doğru\n{} Yanlış".format(persembe_toplam,persembe_dogru,persembe_yanlis),
"Cuma\n{} Soru\n{} Doğru\n{} Yanlış".format(cuma_toplam,cuma_dogru,cuma_yanlis),
"Cumartesi\n{} Soru\n{} Doğru\n{} Yanlış".format(cumartesi_toplam,cumartesi_dogru,cumartesi_yanlis),
"Pazar\n{} Soru\n{} Doğru\n{} Yanlış".format(pazar_toplam,pazar_dogru,pazar_yanlis)]
values = [pazartesi_dogru, sali_dogru, carsamba_dogru, persembe_dogru, cuma_dogru, cumartesi_dogru, pazar_dogru]
values2 = [pazartesi_yanlis, sali_yanlis, carsamba_yanlis, persembe_yanlis, cuma_yanlis, cumartesi_yanlis,pazar_yanlis]
values3 = [pazartesi_toplam,sali_toplam,carsamba_toplam,persembe_toplam,cuma_toplam,cumartesi_toplam,pazar_toplam]
plt.bar(names,values3,color="c",width=0.5)
plt.bar(names, values,color="g",width=0.5,align='edge')
plt.bar(names,values2,color="r",width=0.5)
red_patch = mpatches.Patch(color='red', label='Yanlış Sayısı')
blue_patch = mpatches.Patch(label='Soru Sayısı',color="c")
green_patch=mpatches.Patch(color='green',label='Doğru Sayısı')
plt.legend(handles=[blue_patch,green_patch,red_patch])
plt.suptitle('{} Dersinin Haftalık Soru Grafiği'.format(self.list_dersler.currentItem().text()))
plt.xlabel("Günler")
plt.ylabel("Soru Sayısı")
plt.show()
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
def Tum_Ders_Haftalik_Sonuc(self):
#################################-MATEMATİK TYT-#####################################
try:
mat_tyt_dogrular = 0
mat_tyt_yanlislar = 0
mat_tyt_toplam = 0
mat_tyt_verileri=yardimciFonk.SQL("SELECT Toplam FROM Matematik_TYT")
for i in mat_tyt_verileri:
mat_tyt_toplam+=int(i[0])
mat_tyt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Matematik_TYT")
for i in mat_tyt_verileri:
mat_tyt_yanlislar += int(i[0])
mat_tyt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Matematik_TYT")
for i in mat_tyt_verileri:
mat_tyt_dogrular += int(i[0])
#################################-MATEMATİK AYT-#####################################
mat_ayt_dogrular = 0
mat_ayt_yanlislar = 0
mat_ayt_toplam = 0
mat_ayt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Matematik_AYT")
for i in mat_ayt_verileri:
mat_ayt_toplam += int(i[0])
mat_ayt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Matematik_AYT")
for i in mat_ayt_verileri:
mat_ayt_yanlislar += int(i[0])
mat_ayt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Matematik_AYT")
for i in mat_ayt_verileri:
mat_ayt_dogrular += int(i[0])
#################################-Biyoloji TYT-#####################################
biyoloji_tyt_dogrular = 0
biyoloji_tyt_yanlislar = 0
biyoloji_tyt_toplam = 0
bio_tyt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Biyoloji_TYT")
for i in bio_tyt_verileri:
biyoloji_tyt_toplam += int(i[0])
bio_tyt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Biyoloji_TYT")
for i in bio_tyt_verileri:
biyoloji_tyt_yanlislar += int(i[0])
bio_tyt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Biyoloji_TYT")
for i in bio_tyt_verileri:
biyoloji_tyt_dogrular += int(i[0])
#################################-Din Kültürü-#####################################
din_dogrular = 0
din_yanlislar = 0
din_toplam = 0
din_verileri = yardimciFonk.SQL("SELECT Toplam FROM Din_Kültürü")
for i in din_verileri:
din_toplam += int(i[0])
din_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Din_Kültürü")
for i in din_verileri:
din_yanlislar += int(i[0])
din_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Din_Kültürü")
for i in din_verileri:
din_dogrular += int(i[0])
#################################-Biyoloji AYT-#####################################
biyoloji_ayt_dogrular = 0
biyoloji_ayt_yanlislar = 0
biyoloji_ayt_toplam = 0
bio_ayt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Biyoloji_AYT")
for i in bio_ayt_verileri:
biyoloji_ayt_toplam += int(i[0])
bio_ayt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Biyoloji_AYT")
for i in bio_ayt_verileri:
biyoloji_ayt_yanlislar += int(i[0])
bio_ayt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Biyoloji_AYT")
for i in bio_ayt_verileri:
biyoloji_ayt_dogrular += int(i[0])
#################################-Fizik TYT-#####################################
fizik_tyt_dogrular = 0
fizik_tyt_yanlislar = 0
fizik_tyt_toplam = 0
fizik_tyt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Fizik_TYT")
for i in fizik_tyt_verileri:
fizik_tyt_toplam += int(i[0])
fizik_tyt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Fizik_TYT")
for i in fizik_tyt_verileri:
fizik_tyt_yanlislar += int(i[0])
fizik_tyt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Fizik_TYT")
for i in fizik_tyt_verileri:
fizik_tyt_dogrular += int(i[0])
#################################-Fizik AYT-#####################################
fizik_ayt_dogrular = 0
fizik_ayt_yanlislar = 0
fizik_ayt_toplam = 0
fizik_ayt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Fizik_AYT")
for i in fizik_ayt_verileri:
fizik_ayt_toplam += int(i[0])
fizik_ayt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Fizik_AYT")
for i in fizik_ayt_verileri:
fizik_ayt_yanlislar += int(i[0])
fizik_ayt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Fizik_AYT")
for i in fizik_ayt_verileri:
fizik_ayt_dogrular += int(i[0])
#################################-Kimya TYT-#####################################
kimya_tyt_dogrular = 0
kimya_tyt_yanlislar = 0
kimya_tyt_toplam = 0
kimya_tyt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Kimya_TYT")
for i in kimya_tyt_verileri:
kimya_tyt_toplam += int(i[0])
kimya_tyt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Kimya_TYT")
for i in kimya_tyt_verileri:
kimya_tyt_yanlislar += int(i[0])
kimya_tyt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Kimya_TYT")
for i in kimya_tyt_verileri:
kimya_tyt_dogrular += int(i[0])
#################################-Kimya AYT-#####################################
kimya_ayt_dogrular = 0
kimya_ayt_yanlislar = 0
kimya_ayt_toplam = 0
kimya_ayt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Kimya_AYT")
for i in kimya_ayt_verileri:
kimya_ayt_toplam += int(i[0])
kimya_ayt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Kimya_AYT")
for i in kimya_ayt_verileri:
kimya_ayt_yanlislar += int(i[0])
kimya_ayt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Kimya_AYT")
for i in kimya_ayt_verileri:
kimya_ayt_dogrular += int(i[0])
#################################-Geometri TYT-#####################################
geometri_tyt_dogrular = 0
geometri_tyt_yanlislar = 0
geometri_tyt_toplam = 0
geometri_tyt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Geometri_TYT")
for i in geometri_tyt_verileri:
geometri_tyt_toplam += int(i[0])
geometri_tyt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Geometri_TYT")
for i in geometri_tyt_verileri:
geometri_tyt_yanlislar += int(i[0])
geometri_tyt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Geometri_TYT")
for i in geometri_tyt_verileri:
geometri_tyt_dogrular += int(i[0])
#################################-Geometri AYT-#####################################
geometri_ayt_dogrular = 0
geometri_ayt_yanlislar = 0
geometri_ayt_toplam = 0
geometri_ayt_verileri = yardimciFonk.SQL("SELECT Toplam FROM Geometri_AYT")
for i in geometri_ayt_verileri:
geometri_ayt_toplam += int(i[0])
geometri_ayt_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Geometri_AYT")
for i in geometri_ayt_verileri:
geometri_ayt_yanlislar += int(i[0])
geometri_ayt_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Geometri_AYT")
for i in geometri_ayt_verileri:
geometri_ayt_dogrular += int(i[0])
#################################-Türkçe-#####################################
turkce_dogrular = 0
turkce_yanlislar = 0
turkce_toplam = 0
turkce_verileri = yardimciFonk.SQL("SELECT Toplam FROM Türkçe")
for i in turkce_verileri:
turkce_toplam += int(i[0])
turkce_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Türkçe")
for i in turkce_verileri:
turkce_yanlislar += int(i[0])
turkce_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Türkçe")
for i in turkce_verileri:
turkce_dogrular += int(i[0])
#################################-Tarih-#####################################
tarih_dogrular = 0
tarih_yanlislar = 0
tarih_toplam = 0
tarih_verileri = yardimciFonk.SQL("SELECT Toplam FROM Tarih")
for i in tarih_verileri:
tarih_toplam += int(i[0])
tarih_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Tarih")
for i in tarih_verileri:
tarih_yanlislar += int(i[0])
tarih_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Tarih")
for i in tarih_verileri:
tarih_dogrular += int(i[0])
#################################-Felsefe-#####################################
felsefe_dogrular = 0
felsefe_yanlislar = 0
felsefe_toplam = 0
felsefe_verileri = yardimciFonk.SQL("SELECT Toplam FROM Felsefe")
for i in felsefe_verileri:
felsefe_toplam += int(i[0])
felsefe_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Felsefe")
for i in felsefe_verileri:
felsefe_yanlislar += int(i[0])
felsefe_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Felsefe")
for i in felsefe_verileri:
felsefe_dogrular += int(i[0])
#################################-Coğrafya-#####################################
cografya_dogrular = 0
cografya_yanlislar = 0
cografya_toplam = 0
cografya_verileri = yardimciFonk.SQL("SELECT Toplam FROM Coğrafya")
for i in cografya_verileri:
cografya_toplam += int(i[0])
cografya_verileri = yardimciFonk.SQL("SELECT Yanlislar FROM Coğrafya")
for i in cografya_verileri:
cografya_yanlislar += int(i[0])
cografya_verileri = yardimciFonk.SQL("SELECT Dogrular FROM Coğrafya")
for i in cografya_verileri:
cografya_dogrular += int(i[0])
namess = ["Mat TYT\n{} Soru\n{} Dogru\n{} Yanlış".format(mat_tyt_toplam, mat_tyt_dogrular, mat_tyt_yanlislar),
"Mat AYT\n{} Soru\n{} Dogru\n{} Yanlış".format(mat_ayt_toplam, mat_ayt_dogrular, mat_ayt_yanlislar),
"Geo TYT\n{} Soru\n{} Dogru\n{} Yanlış".format(geometri_tyt_toplam, geometri_tyt_dogrular,geometri_tyt_yanlislar),
"Geo AYT\n{} Soru\n{} Dogru\n{} Yanlış".format(geometri_ayt_toplam, geometri_ayt_dogrular,geometri_ayt_yanlislar),
"Fizik TYT\n{} Soru\n{} Dogru\n{} Yanlış".format(fizik_tyt_toplam, fizik_tyt_dogrular,fizik_tyt_yanlislar),
"Fizik AYT\n{} Soru\n{} Dogru\n{} Yanlış".format(fizik_ayt_toplam, fizik_ayt_dogrular,fizik_ayt_yanlislar),
"Biyoloji TYT\n{} Soru\n{} Dogru\n{} Yanlış".format(biyoloji_tyt_toplam, biyoloji_tyt_dogrular,biyoloji_tyt_yanlislar),
"Biyoloji AYT\n{} Soru\n{} Dogru\n{} Yanlış".format(biyoloji_ayt_toplam, biyoloji_ayt_dogrular,biyoloji_ayt_yanlislar),
"Kimya TYT\n{} Soru\n{} Dogru\n{} Yanlış".format(kimya_tyt_toplam, kimya_tyt_dogrular,kimya_tyt_yanlislar),
"Kimya AYT\n{} Soru\n{} Dogru\n{} Yanlış".format(kimya_ayt_toplam, kimya_ayt_dogrular,kimya_ayt_yanlislar),
"Felsefe\n{} Soru\n{} Dogru\n{} Yanlış".format(felsefe_toplam, felsefe_dogrular, felsefe_yanlislar),
"Türkçe\n{} Soru\n{} Dogru\n{} Yanlış".format(turkce_toplam, turkce_dogrular, turkce_yanlislar),
"Coğrafya\n{} Soru\n{} Dogru\n{} Yanlış".format(cografya_toplam, cografya_dogrular,cografya_yanlislar),
"Tarih\n{} Soru\n{} Dogru\n{} Yanlış".format(tarih_toplam, tarih_dogrular, tarih_yanlislar),
"Din Kültürü\n{} Soru\n{} Dogru\n{} Yanlış".format(din_toplam, din_dogrular, din_yanlislar)]
valuess = [mat_tyt_toplam, mat_ayt_toplam, geometri_tyt_toplam, geometri_ayt_toplam, fizik_tyt_toplam,
fizik_ayt_toplam, biyoloji_tyt_toplam,
biyoloji_ayt_toplam, kimya_tyt_toplam, kimya_ayt_toplam, felsefe_toplam, turkce_toplam, cografya_toplam, tarih_toplam,din_toplam]
valuess22 = [mat_tyt_yanlislar, mat_ayt_yanlislar, geometri_tyt_yanlislar, geometri_ayt_yanlislar,
fizik_tyt_yanlislar,fizik_ayt_yanlislar, biyoloji_tyt_yanlislar, biyoloji_ayt_yanlislar, kimya_tyt_yanlislar, kimya_ayt_yanlislar,
felsefe_yanlislar, turkce_yanlislar, cografya_yanlislar, tarih_yanlislar,din_yanlislar]
values33 = [mat_tyt_dogrular, mat_ayt_dogrular, geometri_tyt_dogrular, geometri_ayt_dogrular,
fizik_tyt_dogrular, fizik_ayt_dogrular, biyoloji_tyt_dogrular, biyoloji_ayt_dogrular,
kimya_tyt_dogrular, kimya_ayt_dogrular, felsefe_dogrular, turkce_dogrular, cografya_dogrular,
tarih_dogrular,din_dogrular]
plt.figure(figsize=(18, 7))
plt.bar(namess, valuess,color="c",width=0.5)
plt.bar(namess,values33,color="g",width=0.5,align="edge")
plt.bar(namess, valuess22, color="r",width=0.5)
blue_patch = mpatches.Patch(color="c",label='Soru Sayısı')
red_patch = mpatches.Patch(color='red', label='Yanlış Sayısı')
green_patch= mpatches.Patch(color='green', label='Doğru Sayısı')
plt.legend(handles=[blue_patch,green_patch, red_patch])
plt.suptitle("Haftalık Grafik")
plt.xlabel("Dersler")
plt.ylabel("Soru Sayısı")
plt.show()
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
def Dersin_Gunluk_Temizle(self):
if self.Pd_cbox_gunler.currentText()=="Gün Seçiniz":
yardimciFonk.HataUyarisi("İşlem için lütfen bir gün seçiniz.")
else:
reply=yardimciFonk.main("{} dersinin {} günündeki kayıtları silmek\nsıfırlamak istediğinizden emin misiniz?".format(self.list_dersler.currentItem().text(), self.Pd_cbox_gunler.currentText()))
if reply:
try:
yardimciFonk.SQL("UPDATE {} set Toplam='0' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} set Dogrular='0' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
yardimciFonk.SQL("UPDATE {} set Yanlislar='0' where Gunler='{}'".format(self.list_dersler.currentItem().text().replace(" ","_"),self.Pd_cbox_gunler.currentText()))
yardimciFonk.BasariliUyari("Temizleme işlemi başarıyla gerçekleşti.")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
else:
pass
def Dersin_Haftalik_Temizle(self):
reply = yardimciFonk.main("{} dersinin tüm haftadaki sonuçlarını silmek\nsıfırlamak istediğinizden emin misiniz".format(self.list_dersler.currentItem().text()))
try:
if reply:
yardimciFonk.SQL("UPDATE {} set Toplam='0'".format(self.list_dersler.currentItem().text().replace(" ","_")))
yardimciFonk.SQL("UPDATE {} set Dogrular='0'".format(self.list_dersler.currentItem().text().replace(" ","_")))
yardimciFonk.SQL("UPDATE {} set Yanlislar='0'".format(self.list_dersler.currentItem().text().replace(" ","_")))
yardimciFonk.BasariliUyari("Temizleme işlemi başarıyla gerçekleştirilmiştir.")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
def Tum_Ders_Hafta_Temizle(self):
reply = yardimciFonk.main("Tüm derslerin haftadaki tüm kayıtlarını silmek\nsıfırlamak istediğinizden emin misiniz")
if reply:
try:
connect = sqlite3.connect("veritabani.db")
cursor = connect.cursor()
cursor.execute("UPDATE Matematik_TYT set Toplam='0'")
cursor.execute("UPDATE Matematik_TYT set Dogrular='0'")
cursor.execute("UPDATE Matematik_TYT set Yanlislar='0'")
cursor.execute("UPDATE Matematik_AYT set Toplam='0'")
cursor.execute("UPDATE Matematik_AYT set Dogrular='0'")
cursor.execute("UPDATE Matematik_AYT set Yanlislar='0'")
cursor.execute("UPDATE Biyoloji_AYT set Toplam='0'")
cursor.execute("UPDATE Biyoloji_AYT set Dogrular='0'")
cursor.execute("UPDATE Biyoloji_AYT set Yanlislar='0'")
cursor.execute("UPDATE Biyoloji_TYT set Toplam='0'")
cursor.execute("UPDATE Biyoloji_TYT set Dogrular='0'")
cursor.execute("UPDATE Biyoloji_TYT set Yanlislar='0'")
cursor.execute("UPDATE Coğrafya set Toplam='0'")
cursor.execute("UPDATE Coğrafya set Dogrular='0'")
cursor.execute("UPDATE Coğrafya set Yanlislar='0'")
cursor.execute("UPDATE Din_Kültürü set Toplam='0'")
cursor.execute("UPDATE Din_Kültürü set Dogrular='0'")
cursor.execute("UPDATE Din_Kültürü set Yanlislar='0'")
cursor.execute("UPDATE Matematik_TYT set Toplam='0'")
cursor.execute("UPDATE Matematik_TYT set Dogrular='0'")
cursor.execute("UPDATE Matematik_TYT set Yanlislar='0'")
cursor.execute("UPDATE Felsefe set Toplam='0'")
cursor.execute("UPDATE Felsefe set Dogrular='0'")
cursor.execute("UPDATE Felsefe set Yanlislar='0'")
cursor.execute("UPDATE Fizik_AYT set Toplam='0'")
cursor.execute("UPDATE Fizik_AYT set Dogrular='0'")
cursor.execute("UPDATE Fizik_AYT set Yanlislar='0'")
cursor.execute("UPDATE Fizik_TYT set Toplam='0'")
cursor.execute("UPDATE Fizik_TYT set Dogrular='0'")
cursor.execute("UPDATE Fizik_TYT set Yanlislar='0'")
cursor.execute("UPDATE Geometri_TYT set Toplam='0'")
cursor.execute("UPDATE Geometri_TYT set Dogrular='0'")
cursor.execute("UPDATE Geometri_TYT set Yanlislar='0'")
cursor.execute("UPDATE Geometri_AYT set Toplam='0'")
cursor.execute("UPDATE Geometri_AYT set Dogrular='0'")
cursor.execute("UPDATE Geometri_AYT set Yanlislar='0'")
cursor.execute("UPDATE Kimya_TYT set Toplam='0'")
cursor.execute("UPDATE Kimya_TYT set Dogrular='0'")
cursor.execute("UPDATE Kimya_TYT set Yanlislar='0'")
cursor.execute("UPDATE Kimya_AYT set Toplam='0'")
cursor.execute("UPDATE Kimya_AYT set Dogrular='0'")
cursor.execute("UPDATE Kimya_AYT set Yanlislar='0'")
cursor.execute("UPDATE Türkçe set Toplam='0'")
cursor.execute("UPDATE Türkçe set Dogrular='0'")
cursor.execute("UPDATE Türkçe set Yanlislar='0'")
cursor.execute("UPDATE Tarih set Toplam='0'")
cursor.execute("UPDATE Tarih set Dogrular='0'")
cursor.execute("UPDATE Tarih set Yanlislar='0'")
connect.commit()
connect.close()
yardimciFonk.BasariliUyari("Temizleme işlemi başarıyla gerçekleştirilmiştir.")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu,lütfen tekrar deneyiniz.")
################################--DERSLERİM--##################################
################################--Sınavlarım--#################################
def Deneme_Kaydet(self):
if self.lineEdit1.text()=="" or self.lineEdit1.text()=="" or self.lineEdit_31.text()=="" or self.lineEdit_41.text()=="":
yardimciFonk.HataUyarisi("İşlemin gerçekleşmesi için lütfen gerekli\nbilgileri giriniz.")
elif self.comboBox1.currentText()=="Deneme Türü Seçiniz":
yardimciFonk.HataUyarisi("Lütfen denemenizin türünü seçiniz. TYT/AYT")
elif self.comboBox_21.currentText()=="Gün" or self.comboBox_41.currentText()=="Ay" or self.comboBox_31.currentText()=="Yıl":
yardimciFonk.HataUyarisi("Lütfen denemeyi çözdüğünüz tarihi giriniz.")
elif self.lineEdit1.text().isdigit()==False:
yardimciFonk.HataUyarisi("Deneme puanına metinsel ifadeler girilemez")
elif self.lineEdit_31.text().isdigit()==False:
yardimciFonk.HataUyarisi("Doğru sayısına metinsel ifadeler girilemez")
elif self.lineEdit_41.text().isdigit()==False:
yardimciFonk.HataUyarisi("Yanlış sayısına metinsel ifadeler girilemez")
else:
if self.lineEdit_21.text().count("'"):
yardimciFonk.HataUyarisi("Deneme isminde tek tırnak işareti bulunamaz")
else:
veri=yardimciFonk.SQL("SELECT DenemeAd FROM Sinavlar where DenemeAd='{}'".format(self.lineEdit_21.text()))
if veri !=[]:
yardimciFonk.HataUyarisi("Girmiş olduğunuz deneme adında bir\nbaşka deneme bulunmaktadır.")
else:
tarih=self.comboBox_21.currentText() + " " + self.comboBox_41.currentText() + " " + self.comboBox_31.currentText()
yardimciFonk.SQL(f"INSERT INTO Sinavlar VALUES ('{self.lineEdit_21.text()}','{self.lineEdit1.text()}','{self.lineEdit_31.text()}','{self.lineEdit_41.text()}','{self.comboBox1.currentText()[:3]}','{tarih}')")
yardimciFonk.BasariliUyari("Denemeniz başarıyla kaydedilmiştir.")
self.lineEdit_21.clear()
self.lineEdit1.clear()
self.lineEdit_31.clear()
self.lineEdit_41.clear()
self.comboBox1.setCurrentText("Deneme Türü Seçiniz")
self.comboBox_21.setCurrentText("Gün")
self.comboBox_41.setCurrentText("Ay")
self.comboBox_31.setCurrentText("Yıl")
denemeisimeleri=yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
self.list_dersler1.clear()
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
del tarih,denemeisimekle,denemeisimeleri
def Secili_Deneme_Sonuc(self):
if self.list_dersler1.currentItem()==None:
yardimciFonk.HataUyarisi("Lütfen listeden bir deneme ismi seçiniz.")
else:
veri=yardimciFonk.SQL("SELECT * FROM Sinavlar where DenemeAd='{}'".format(self.list_dersler1.currentItem().text()))
plt.figure(figsize=(16, 7))
d_names=['Deneme Türü={}'.format(str(veri[0][4])),
'Deneme Tarihi={}'.format(str(veri[0][5]))]
d1_names=['Yanlış Sayısı={}'.format(str(veri[0][3]))]
d2_names = ['Doğru Sayısı={}'.format(str(veri[0][2]))]
dpuani=['Deneme Puanı= {}'.format(str(veri[0][1]))]
d_values=[0,0]
d2_values=int(veri[0][3])
d3_values=int(veri[0][2])
dp_values=int(veri[0][1])
plt.bar(dpuani,dp_values)
plt.bar(d2_names,d3_values,color='g')
plt.bar(d1_names,d2_values,color='r')
plt.bar(d_names, d_values)
plt.suptitle('{} İsimli Deneme Sonucunuz\n{} Puan\n{} Doğru {} Yanlış\n{} Denemesi Tarih: {}'.format(veri[0][0],veri[0][1],veri[0][2],veri[0][3],veri[0][4],veri[0][5]))
plt.xlabel("Bilgiler")
plt.ylabel("Puan")
plt.show()
del d_values,d2_values,d3_values,dp_values,d_names,d1_names,d2_names,dpuani,veri
def TYT_Denemeleri_Sonuc(self):
V_ad=yardimciFonk.SQL("SELECT DenemeAd FROM Sinavlar where Tur='TYT'")
v_Puan=yardimciFonk.SQL("SELECT Puan FROM Sinavlar where Tur='TYT'")
v_Dogru = yardimciFonk.SQL("SELECT Dogrular FROM Sinavlar where Tur='TYT'")
v_Yanlis = yardimciFonk.SQL("SELECT Yanlislar FROM Sinavlar where Tur='TYT'")
v_Tarih = yardimciFonk.SQL("SELECT Tarih FROM Sinavlar where Tur='TYT'")
plt.figure(figsize=(15, 8))
names = []
p_value=[]
y_value=[]
d_value=[]
uzunluk_dogru=len(v_Puan)
print(len(v_Puan))
C=0
for isimler in V_ad:
names.append(str(isimler[0])+"\nPuan:{}".format(str(v_Puan[C][0]))+"\nDoğru:{}".format(v_Dogru[C][0])+"\nYanlış:{}".format(str(v_Yanlis[C][0]))+"\nTarih:{}".format(v_Tarih[C][0]))
if C == uzunluk_dogru:
break
C+=1
for puan in v_Puan:
p_value.append(int(puan[0]))
for yanlis in v_Yanlis:
y_value.append(int(yanlis[0]))
for dogru in v_Dogru:
d_value.append(int(dogru[0]))
plt.bar(names, p_value, color="c", width=0.5)
plt.bar(names, d_value, color="g", width=0.5 ,align='edge')
plt.bar(names, y_value, color="r", width=0.5)
blue_patch = mpatches.Patch(label='Puan', color="c")
green_patch = mpatches.Patch(color='green', label='Doğru Sayısı')
red_patch = mpatches.Patch(color='red', label='Yanlış Sayısı')
plt.legend(handles=[blue_patch, green_patch, red_patch])
plt.suptitle('TYT Denemelerinin Grafiği')
plt.show()
def AYT_Denemeleri_Sonuc(self):
V_ad = yardimciFonk.SQL("SELECT DenemeAd FROM Sinavlar where Tur='AYT'")
v_Puan = yardimciFonk.SQL("SELECT Puan FROM Sinavlar where Tur='AYT'")
v_Dogru = yardimciFonk.SQL("SELECT Dogrular FROM Sinavlar where Tur='AYT'")
v_Yanlis = yardimciFonk.SQL("SELECT Yanlislar FROM Sinavlar where Tur='AYT'")
v_Tarih = yardimciFonk.SQL("SELECT Tarih FROM Sinavlar where Tur='AYT'")
plt.figure(figsize=(15, 8))
names = []
p_value = []
y_value = []
d_value = []
uzunluk_dogru = len(v_Puan)
C = 0
for isimler in V_ad:
names.append(str(isimler[0]) + "\nPuan:{}".format(str(v_Puan[C][0])) + "\nDoğru:{}".format(
v_Dogru[C][0]) + "\nYanlış:{}".format(str(v_Yanlis[C][0])) + "\nTarih:{}".format(v_Tarih[C][0]))
if C == uzunluk_dogru:
break
C += 1
for puan in v_Puan:
p_value.append(int(puan[0]))
for yanlis in v_Yanlis:
y_value.append(int(yanlis[0]))
for dogru in v_Dogru:
d_value.append(int(dogru[0]))
plt.bar(names, p_value, color="c", width=0.5)
plt.bar(names, d_value, color="g", width=0.5, align='edge')
plt.bar(names, y_value, color="r", width=0.5)
blue_patch = mpatches.Patch(label='Puan', color="c")
green_patch = mpatches.Patch(color='green', label='Doğru Sayısı')
red_patch = mpatches.Patch(color='red', label='Yanlış Sayısı')
plt.legend(handles=[blue_patch, green_patch, red_patch])
plt.suptitle('AYT Denemelerinin Grafiği')
plt.show()
def Tum_Denemelerin_Sonuclari(self):
V_ad = yardimciFonk.SQL("SELECT DenemeAd FROM Sinavlar")
v_Puan = yardimciFonk.SQL("SELECT Puan FROM Sinavlar")
v_Dogru = yardimciFonk.SQL("SELECT Dogrular FROM Sinavlar")
v_Yanlis = yardimciFonk.SQL("SELECT Yanlislar FROM Sinavlar")
v_Tarih = yardimciFonk.SQL("SELECT Tarih FROM Sinavlar")
v_Tur= yardimciFonk.SQL("SELECT Tur FROM Sinavlar")
plt.figure(figsize=(15, 8))
names = []
p_value = []
y_value = []
d_value = []
uzunluk_dogru = len(v_Puan)
C = 0
for isimler in V_ad:
names.append(str(isimler[0]) + "\nPuan:{}".format(str(v_Puan[C][0])) + "\nDoğru:{}".format(v_Dogru[C][0]) + " /\ Yanlış:{}".format(str(v_Yanlis[C][0])) + "\nTarih:{}".format(v_Tarih[C][0])+"\n{} Denemesi".format(v_Tur[C][0]))
if C == uzunluk_dogru:
break
C += 1
for puan in v_Puan:
p_value.append(int(puan[0]))
for yanlis in v_Yanlis:
y_value.append(int(yanlis[0]))
for dogru in v_Dogru:
d_value.append(int(dogru[0]))
plt.bar(names, p_value, color="c", width=0.5)
plt.bar(names, d_value, color="g", width=0.5, align='edge')
plt.bar(names, y_value, color="r", width=0.5)
blue_patch = mpatches.Patch(label='Puan', color="c")
green_patch = mpatches.Patch(color='green', label='Doğru Sayısı')
red_patch = mpatches.Patch(color='red', label='Yanlış Sayısı')
plt.legend(handles=[blue_patch, green_patch, red_patch])
plt.suptitle('Tüm Denemelerinin Grafiği')
plt.show()
def Secili_Deneme_Sil(self):
if self.list_dersler1.currentItem() == None:
yardimciFonk.HataUyarisi("Lütfen silinmesini istediğiniz bir denemeyi\nlisteden seçiniz.")
else:
silinecekdenemeismi=self.list_dersler1.currentItem().text()
reply = yardimciFonk.main(f"{silinecekdenemeismi} İsimli denemenizin kayıtlarını silmek\nistediğinizden emin misiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE FROM Sinavlar where DenemeAd='{}'".format(silinecekdenemeismi))
kontrol=yardimciFonk.SQL("SELECT * FROM Sinavlar where DenemeAd='{}'".format(silinecekdenemeismi))
del silinecekdenemeismi
if kontrol==[]:
denemeisimeleri = yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
self.list_dersler1.clear()
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
yardimciFonk.BasariliUyari("Kayıt veritabanından başarıyla silinmiştir.")
else:
yardimciFonk.HataUyarisi("Kayıt silinirken bir hata oluştu.")
except:
yardimciFonk.HataUyarisi("Kayıt silinirken bir hata oluştu.")
def TYT_Denemelerini_Sil(self):
reply = yardimciFonk.main("Tüm TYT denemelerinizin kayıtlarını silmek istediğinizden\nemin misiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE FROM Sinavlar where Tur='TYT'")
kontrol=yardimciFonk.SQL("SELECT * FROM Sinavlar where Tur='TYT'")
if kontrol==[]:
self.list_dersler1.clear()
denemeisimeleri = yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
yardimciFonk.BasariliUyari("Tüm TYT denemeleriniz silinmiştir.")
except:
yardimciFonk.HataUyarisi("İşlem sırasında bir sorun oluştu.")
def AYT_denemelerini_Sil(self):
reply = yardimciFonk.main("Tüm AYT denemelerinizin kayıtlarını silmek istediğinizden\nemin misiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE FROM Sinavlar where Tur='AYT'")
kontrol=yardimciFonk.SQL("SELECT * FROM Sinavlar where Tur='AYT'")
if kontrol==[]:
self.list_dersler1.clear()
denemeisimeleri = yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
yardimciFonk.BasariliUyari("Tüm AYT denemeleriniz silinmiştir.")
else:
yardimciFonk.HataUyarisi("İşlem sırasında bir hata oluştu.")
except:
yardimciFonk.HataUyarisi("İşlem sırasında bir sorun oluştu.")
def Tum_Denemeleri_Sil(self):
reply = yardimciFonk.main("Tüm denemelerinizin kayıtlarını silmek istediğinizden emin\nmisiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE FROM Sinavlar")
kontrol=yardimciFonk.SQL("SELECT * FROM Sinavlar")
if kontrol==[]:
self.list_dersler1.clear()
denemeisimeleri = yardimciFonk.SQL("SELECT DenemeAd From Sinavlar")
for denemeisimekle in denemeisimeleri:
denemeisimekle = str(denemeisimekle[0])
self.list_dersler1.addItem(denemeisimekle)
yardimciFonk.BasariliUyari("Tüm denemeleriniz başarıyla silinmiştir.")
except:
yardimciFonk.HataUyarisi("İşlem sırasında bir sorun oluştu.")
################################--Sınavlarım--#################################
def Kitap_Kaydet(self):
if self.kp_lne_1.text()=="" or self.kp_lne_2.text()=="" or self.kp_lne_3.text()=="":
yardimciFonk.HataUyarisi("Lütfen gerekli alanları doldurunuz.")
elif self.kp_lne_1.text().isdigit()==False:
yardimciFonk.HataUyarisi("Sayfa sayısına metinsel ifade girilemez.")
elif self.kp_lne_2.text().isdigit()==False:
yardimciFonk.HataUyarisi("Okunan gün kısmına metinsel ifade girilemez.")
else:
try:
isimkontrol=yardimciFonk.SQL("SELECT * FROM Kitaplar where isim ='{}'".format(self.kp_lne_3.text()))
if isimkontrol!=[]:
yardimciFonk.HataUyarisi("Kaydetmeye çalıştığınız kitap isminde zaten\nkayırlı bir kitap bulunmaktadır.")
else:
yardimciFonk.SQL(f"INSERT INTO Kitaplar (isim,SayfaSayisi,OkunanGün) VALUES ('{self.kp_lne_3.text()}','{self.kp_lne_1.text()}','{self.kp_lne_2.text()}')")
veri=yardimciFonk.SQL(f"SELECT * FROM Kitaplar where isim='{self.kp_lne_3.text()}'")
if veri !=[]:
yardimciFonk.BasariliUyari("Kitap başarıyla veritabanına kaydedildi.")
self.kp_lne_3.clear()
self.kp_lne_1.clear()
self.kp_lne_2.clear()
self.pd_kitaplar.clear()
isimler1 = yardimciFonk.SQL("SELECT isim FROM Kitaplar")
for kitapismi in isimler1:
self.pd_kitaplar.addItem(kitapismi[0])
elif veri ==[]:
yardimciFonk.HataUyarisi("Kitap veritabanına kaydedilemedi.")
except:
yardimciFonk.HataUyarisi("Bir sorun oluştu lütfen tekrar deneyiniz.")
def Secili_Kitabi_Getir(self):
if self.pd_kitaplar.currentItem()==None:
yardimciFonk.HataUyarisi("Lütfen listeden okumuş olduğunuz bir kitabı\nseçiniz.")
else:
kitapismi=self.pd_kitaplar.currentItem().text()
veriler=yardimciFonk.SQL(f"SELECT * FROM Kitaplar where isim='{kitapismi}'")
yardimciFonk.BasariliUyari(f"Kitap ismi: {kitapismi}\nSayfa Sayısı: {str(veriler[0][1])}\nOkunan Gün: {str(veriler[0][2])}")
def Toplam_Okunan_Sayfa_ve_Gun_Sayisi(self):
sayfalar=yardimciFonk.SQL("SELECT SayfaSayisi FROM Kitaplar")
gunler=yardimciFonk.SQL("SELECT OkunanGün FROM Kitaplar")
t_sayfa=0
t_gunler=0
t_kitap=len(sayfalar)
for i in sayfalar:
t_sayfa += int(i[0])
for a in gunler:
t_gunler += int(a[0])
yardimciFonk.BasariliUyari(f"Toplam okunan kitap sayısı: {str(t_kitap)}\nToplam okunan sayfa sayısı: {str(t_sayfa)}\nToplam kitap okunan gün sayısı: {str(t_gunler)}")
def Grafiksel_Gosterim(self):
isimler=yardimciFonk.SQL("SELECT isim FROM Kitaplar")
sayfalar=yardimciFonk.SQL("SELECT SayfaSayisi FROM Kitaplar")
gunler=yardimciFonk.SQL("SELECT OkunanGün FROM Kitaplar")
plt.figure(figsize=(19, 7))
names = []
sayfasayilari=[]
O_gun_say=[]
C=0
uzunluk=len(isimler)
for isim in isimler:
names.append(str(isim[0]) +"\nSayfa Sayısı:{}".format(str(sayfalar[C][0])) +"\nOkunan Gün :{}".format(str(gunler[C][0])))
if C == uzunluk:
break
C+=1
for s_sayisi in sayfalar:
sayfasayilari.append(int(s_sayisi[0]))
for okunan_gun_sayisi in gunler:
O_gun_say.append(int(okunan_gun_sayisi[0]))
plt.bar(names,sayfasayilari,width=0.4)
plt.bar(names,O_gun_say,width=0.4,align="edge")
red_patch = mpatches.Patch(color='orange', label='Okunan Gün Sayısı')
blue_patch = mpatches.Patch(label='Sayfa Sayısı')
plt.legend(handles=[red_patch,blue_patch])
plt.suptitle('Okunan Kitaplar Grafiği')
plt.show()
def Secili_Kitap_Sil(self):
if self.pd_kitaplar.currentItem() == None:
yardimciFonk.HataUyarisi("Lütfen silinmesini istediğiniz bir kitabı\nlisteden seçiniz.")
else:
silinecekkitapismi=self.pd_kitaplar.currentItem().text()
reply = yardimciFonk.main(f"{silinecekkitapismi} İsimli denemenizin kayıtlarını silmek\nistediğinizden emin misiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE FROM Kitaplar where isim='{}'".format(silinecekkitapismi))
kontrol=yardimciFonk.SQL("SELECT * FROM Kitaplar where isim='{}'".format(silinecekkitapismi))
del silinecekkitapismi
if kontrol==[]:
isimler1 = yardimciFonk.SQL("SELECT isim FROM Kitaplar")
self.pd_kitaplar.clear()
for kitapismi in isimler1:
self.pd_kitaplar.addItem(kitapismi[0])
yardimciFonk.BasariliUyari("Kayıt veritabanından başarıyla silinmiştir.")
else:
yardimciFonk.HataUyarisi("Kayıt silinirken bir hata oluştu.")
except:
yardimciFonk.HataUyarisi("Kayıt silinirken bir hata oluştu.")
def Tum_Kitap_Sil(self):
reply = yardimciFonk.main("Tüm kitaplarınızın kayıtlarını silmek istediğinizden emin\nmisiniz?")
if reply:
try:
yardimciFonk.SQL("DELETE From Kitaplar")
kontrol = yardimciFonk.SQL("SELECT * FROM Kitaplar")
if kontrol == []:
self.pd_kitaplar.clear()
yardimciFonk.BasariliUyari("Tüm kitaplarınız başarıyla silinmiştir.")
elif kontrol!=[]:
yardimciFonk.HataUyarisi("Kitaplar Silinemedi.")
except:
yardimciFonk.HataUyarisi("İşlem sırasında bir sorun oluştu.")
| 61.785079 | 5,450 | 0.667878 |
4bcf0219da9d979b989f3e7df8c7bce49877e3e3 | 35,796 | py | Python | platform/gcutil/lib/google_api_python_client/apiclient/discovery.py | IsaacHuang/google-cloud-sdk | 52afa5d1a75dff08f4f5380c5cccc015bf796ca5 | [
"Apache-2.0"
] | null | null | null | platform/gcutil/lib/google_api_python_client/apiclient/discovery.py | IsaacHuang/google-cloud-sdk | 52afa5d1a75dff08f4f5380c5cccc015bf796ca5 | [
"Apache-2.0"
] | null | null | null | platform/gcutil/lib/google_api_python_client/apiclient/discovery.py | IsaacHuang/google-cloud-sdk | 52afa5d1a75dff08f4f5380c5cccc015bf796ca5 | [
"Apache-2.0"
] | 2 | 2020-07-25T05:03:06.000Z | 2020-11-04T04:55:57.000Z | # Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs.
A client library for Google's discovery based APIs.
"""
__all__ = [
'build',
'build_from_document',
'fix_method_name',
'key2param',
]
# Standard library imports
import copy
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
import keyword
import logging
import mimetypes
import os
import re
import urllib
import urlparse
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Third-party imports
import httplib2
import mimeparse
import uritemplate
# Local imports
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownFileType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import MediaModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from oauth2client.anyjson import simplejson
from oauth2client.util import _add_query_parameter
from oauth2client.util import positional
# The client library requires a version of httplib2 that supports RETRIES.
httplib2.RETRIES = 1
logger = logging.getLogger(__name__)
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
BODY_PARAMETER_DEFAULT_VALUE = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
'description': ('The filename of the media request body, or an instance '
'of a MediaUpload object.'),
'type': 'string',
'required': False,
}
# Parameters accepted by the stack, but not visible via discovery.
# TODO(dhermes): Remove 'userip' in 'v2'.
STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
# Library-specific reserved words beyond Python keywords.
RESERVED_WORDS = frozenset(['body'])
def fix_method_name(name):
"""Fix method names to avoid reserved word conflicts.
Args:
name: string, method name.
Returns:
The name with a '_' prefixed if the name is a reserved word.
"""
if keyword.iskeyword(name) or name in RESERVED_WORDS:
return name + '_'
else:
return name
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
Args:
key: string, the method key name.
Returns:
A safe method name based on the key name.
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
@positional(2)
def build(serviceName,
version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with an API. The serviceName and
version are the names from the Discovery service.
Args:
serviceName: string, name of the service.
version: string, the version of the service.
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
discoveryServiceUrl: string, a URI Template that points to the location of
the discovery service. It should have two parameters {api} and
{apiVersion} that when filled in produce an absolute URI to the discovery
document for that service.
developerKey: string, key obtained from
https://code.google.com/apis/console.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: apiclient.http.HttpRequest, encapsulator for an HTTP
request.
Returns:
A Resource object with methods for interacting with the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logger.info('URL being requested: %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, uri=requested_url)
try:
service = simplejson.loads(content)
except ValueError, e:
logger.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
return build_from_document(content, base=discoveryServiceUrl, http=http,
developerKey=developerKey, model=model, requestBuilder=requestBuilder)
@positional(1)
def build_from_document(
service,
base=None,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object from a discovery
document that is it given, as opposed to retrieving one over HTTP.
Args:
service: string or object, the JSON discovery document describing the API.
The value passed in may either be the JSON string or the deserialized
JSON.
base: string, base URI for all HTTP requests, usually the discovery URI.
This parameter is no longer used as rootUrl and servicePath are included
within the discovery document. (deprecated)
future: string, discovery document with future capabilities (deprecated).
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and de-serializes requests and
responses.
requestBuilder: Takes an http request and packages it up to be executed.
Returns:
A Resource object with methods for interacting with the service.
"""
# future is no longer used.
future = {}
if isinstance(service, basestring):
service = simplejson.loads(service)
base = urlparse.urljoin(service['rootUrl'], service['servicePath'])
schema = Schemas(service)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
return Resource(http=http, baseUrl=base, model=model,
developerKey=developerKey, requestBuilder=requestBuilder,
resourceDesc=service, rootDesc=service, schema=schema)
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer.
Args:
maxSize: string, size as a string, such as 2MB or 7GB.
Returns:
The size as an integer value.
"""
if len(maxSize) < 2:
return 0L
units = maxSize[-2:].upper()
bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
if bit_shift is not None:
return long(maxSize[:-2]) << bit_shift
else:
return long(maxSize)
def _media_path_url_from_info(root_desc, path_url):
"""Creates an absolute media path URL.
Constructed using the API root URI and service path from the discovery
document and the relative path for the API method.
Args:
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
Returns:
String; the absolute URI for media upload for the API method.
"""
return '%(root)supload/%(service_path)s%(path)s' % {
'root': root_desc['rootUrl'],
'service_path': root_desc['servicePath'],
'path': path_url,
}
def _fix_up_parameters(method_desc, root_desc, http_method):
"""Updates parameters of an API method with values specific to this library.
Specifically, adds whatever global parameters are specified by the API to the
parameters for the individual method. Also adds parameters which don't
appear in the discovery document, but are available to all discovery based
APIs (these are listed in STACK_QUERY_PARAMETERS).
SIDE EFFECTS: This updates the parameters dictionary object in the method
description.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
http_method: String; the HTTP method used to call the API method described
in method_desc.
Returns:
The updated Dictionary stored in the 'parameters' key of the method
description dictionary.
"""
parameters = method_desc.setdefault('parameters', {})
# Add in the parameters common to all methods.
for name, description in root_desc.get('parameters', {}).iteritems():
parameters[name] = description
# Add in undocumented query parameters.
for name in STACK_QUERY_PARAMETERS:
parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
# Add 'body' (our own reserved word) to parameters if the method supports
# a request payload.
if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
body = BODY_PARAMETER_DEFAULT_VALUE.copy()
body.update(method_desc['request'])
parameters['body'] = body
return parameters
def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
"""Updates parameters of API by adding 'media_body' if supported by method.
SIDE EFFECTS: If the method supports media upload and has a required body,
sets body to be optional (required=False) instead. Also, if there is a
'mediaUpload' in the method description, adds 'media_upload' key to
parameters.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
path_url: String; the relative URL for the API method. Relative to the API
root, which is specified in the discovery document.
parameters: A dictionary describing method parameters for method described
in method_desc.
Returns:
Triple (accept, max_size, media_path_url) where:
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
media_upload = method_desc.get('mediaUpload', {})
accept = media_upload.get('accept', [])
max_size = _media_size_to_long(media_upload.get('maxSize', ''))
media_path_url = None
if media_upload:
media_path_url = _media_path_url_from_info(root_desc, path_url)
parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
if 'body' in parameters:
parameters['body']['required'] = False
return accept, max_size, media_path_url
def _fix_up_method_description(method_desc, root_desc):
"""Updates a method description in a discovery document.
SIDE EFFECTS: Changes the parameters dictionary in the method description with
extra parameters which are used locally.
Args:
method_desc: Dictionary with metadata describing an API method. Value comes
from the dictionary of methods stored in the 'methods' key in the
deserialized discovery document.
root_desc: Dictionary; the entire original deserialized discovery document.
Returns:
Tuple (path_url, http_method, method_id, accept, max_size, media_path_url)
where:
- path_url is a String; the relative URL for the API method. Relative to
the API root, which is specified in the discovery document.
- http_method is a String; the HTTP method used to call the API method
described in the method description.
- method_id is a String; the name of the RPC method associated with the
API method, and is in the method description in the 'id' key.
- accept is a list of strings representing what content types are
accepted for media upload. Defaults to empty list if not in the
discovery document.
- max_size is a long representing the max size in bytes allowed for a
media upload. Defaults to 0L if not in the discovery document.
- media_path_url is a String; the absolute URI for media upload for the
API method. Constructed using the API root URI and service path from
the discovery document and the relative path for the API method. If
media upload is not supported, this is None.
"""
path_url = method_desc['path']
http_method = method_desc['httpMethod']
method_id = method_desc['id']
parameters = _fix_up_parameters(method_desc, root_desc, http_method)
# Order is important. `_fix_up_media_upload` needs `method_desc` to have a
# 'parameters' key and needs to know if there is a 'body' parameter because it
# also sets a 'media_body' parameter.
accept, max_size, media_path_url = _fix_up_media_upload(
method_desc, root_desc, path_url, parameters)
return path_url, http_method, method_id, accept, max_size, media_path_url
# TODO(dhermes): Convert this class to ResourceMethod and make it callable
class ResourceMethodParameters(object):
"""Represents the parameters associated with a method.
Attributes:
argmap: Map from method parameter name (string) to query parameter name
(string).
required_params: List of required parameters (represented by parameter
name as string).
repeated_params: List of repeated parameters (represented by parameter
name as string).
pattern_params: Map from method parameter name (string) to regular
expression (as a string). If the pattern is set for a parameter, the
value for that parameter must match the regular expression.
query_params: List of parameters (represented by parameter name as string)
that will be used in the query string.
path_params: Set of parameters (represented by parameter name as string)
that will be used in the base URL path.
param_types: Map from method parameter name (string) to parameter type. Type
can be any valid JSON schema type; valid values are 'any', 'array',
'boolean', 'integer', 'number', 'object', or 'string'. Reference:
http://tools.ietf.org/html/draft-zyp-json-schema-03#section-5.1
enum_params: Map from method parameter name (string) to list of strings,
where each list of strings is the list of acceptable enum values.
"""
def __init__(self, method_desc):
"""Constructor for ResourceMethodParameters.
Sets default values and defers to set_parameters to populate.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
self.argmap = {}
self.required_params = []
self.repeated_params = []
self.pattern_params = {}
self.query_params = []
# TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
# parsing is gotten rid of.
self.path_params = set()
self.param_types = {}
self.enum_params = {}
self.set_parameters(method_desc)
def set_parameters(self, method_desc):
"""Populates maps and lists based on method description.
Iterates through each parameter for the method and parses the values from
the parameter dictionary.
Args:
method_desc: Dictionary with metadata describing an API method. Value
comes from the dictionary of methods stored in the 'methods' key in
the deserialized discovery document.
"""
for arg, desc in method_desc.get('parameters', {}).iteritems():
param = key2param(arg)
self.argmap[param] = arg
if desc.get('pattern'):
self.pattern_params[param] = desc['pattern']
if desc.get('enum'):
self.enum_params[param] = desc['enum']
if desc.get('required'):
self.required_params.append(param)
if desc.get('repeated'):
self.repeated_params.append(param)
if desc.get('location') == 'query':
self.query_params.append(param)
if desc.get('location') == 'path':
self.path_params.add(param)
self.param_types[param] = desc.get('type', 'string')
# TODO(dhermes): Determine if this is still necessary. Discovery based APIs
# should have all path parameters already marked with
# 'location: path'.
for match in URITEMPLATE.finditer(method_desc['path']):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
self.path_params.add(name)
if name in self.query_params:
self.query_params.remove(name)
def createMethod(methodName, methodDesc, rootDesc, schema):
"""Creates a method for attaching to a Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
methodName = fix_method_name(methodName)
(pathUrl, httpMethod, methodId, accept,
maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc)
parameters = ResourceMethodParameters(methodDesc)
def method(self, **kwargs):
# Don't bother with doc string, it will be over-written by createMethod.
for name in kwargs.iterkeys():
if name not in parameters.argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
# Remove args that have a value of None.
keys = kwargs.keys()
for name in keys:
if kwargs[name] is None:
del kwargs[name]
for name in parameters.required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in parameters.pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in parameters.enum_params.iteritems():
if name in kwargs:
# We need to handle the case of a repeated enum
# name differently, since we want to handle both
# arg='value' and arg=['value1', 'value2']
if (name in parameters.repeated_params and
not isinstance(kwargs[name], basestring)):
values = kwargs[name]
else:
values = [kwargs[name]]
for value in values:
if value not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, value, str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = parameters.param_types.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in parameters.repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in parameters.query_params:
actual_query_params[parameters.argmap[key]] = cast_value
if key in parameters.path_params:
actual_path_params[parameters.argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
if methodName.endswith('_media'):
model = MediaModel()
elif 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Ensure we end up with a valid MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename,
mimetype=media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if media_upload.resumable():
url = _add_query_parameter(url, 'uploadType', 'resumable')
if media_upload.resumable():
# This is all we need to do for resumable, if the body exists it gets
# sent in the first request, otherwise an empty body is sent.
resumable = media_upload
else:
# A non-resumable upload
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
body = media_upload.getbytes(0, media_upload.size())
url = _add_query_parameter(url, 'uploadType', 'media')
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
body = msgRoot.as_string()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
url = _add_query_parameter(url, 'uploadType', 'multipart')
logger.info('URL being requested: %s' % url)
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(parameters.argmap) > 0:
docs.append('Args:\n')
# Skip undocumented params and params common to all methods.
skip_parameters = rootDesc.get('parameters', {}).keys()
skip_parameters.extend(STACK_QUERY_PARAMETERS)
all_args = parameters.argmap.keys()
args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
# Move body to the front of the line.
if 'body' in all_args:
args_ordered.append('body')
for name in all_args:
if name not in args_ordered:
args_ordered.append(name)
for arg in args_ordered:
if arg in skip_parameters:
continue
repeated = ''
if arg in parameters.repeated_params:
repeated = ' (repeated)'
required = ''
if arg in parameters.required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
if methodName.endswith('_media'):
docs.append('\nReturns:\n The media object as a string.\n\n ')
else:
docs.append('\nReturns:\n An object of the form:\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
return (methodName, method)
def createNextMethod(methodName):
"""Creates any _next methods for attaching to a Resource.
The _next methods allow for easy iteration through list() responses.
Args:
methodName: string, name of the method to use.
"""
methodName = fix_method_name(methodName)
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page. (required)
previous_response: The response from the request for the previous page. (required)
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logger.info('URL being requested: %s' % uri)
return request
return (methodName, methodNext)
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
resourceDesc, rootDesc, schema):
"""Build a Resource from the API description.
Args:
http: httplib2.Http, Object to make http requests with.
baseUrl: string, base URL for the API. All requests are relative to this
URI.
model: apiclient.Model, converts to and from the wire format.
requestBuilder: class or callable that instantiates an
apiclient.HttpRequest object.
developerKey: string, key obtained from
https://code.google.com/apis/console
resourceDesc: object, section of deserialized discovery document that
describes a resource. Note that the top level discovery document
is considered a resource.
rootDesc: object, the entire deserialized discovery document.
schema: object, mapping of schema names to schema descriptions.
"""
self._dynamic_attrs = []
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
self._resourceDesc = resourceDesc
self._rootDesc = rootDesc
self._schema = schema
self._set_service_methods()
def _set_dynamic_attr(self, attr_name, value):
"""Sets an instance attribute and tracks it in a list of dynamic attributes.
Args:
attr_name: string; The name of the attribute to be set
value: The value being set on the object and tracked in the dynamic cache.
"""
self._dynamic_attrs.append(attr_name)
self.__dict__[attr_name] = value
def __getstate__(self):
"""Trim the state down to something that can be pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
state_dict = copy.copy(self.__dict__)
for dynamic_attr in self._dynamic_attrs:
del state_dict[dynamic_attr]
del state_dict['_dynamic_attrs']
return state_dict
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled.
Uses the fact that the instance variable _dynamic_attrs holds attrs that
will be wiped and restored on pickle serialization.
"""
self.__dict__.update(state)
self._dynamic_attrs = []
self._set_service_methods()
def _set_service_methods(self):
self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
self._add_next_methods(self._resourceDesc, self._schema)
def _add_basic_methods(self, resourceDesc, rootDesc, schema):
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
fixedMethodName, method = createMethod(
methodName, methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
# Add in _media methods. The functionality of the attached method will
# change when it sees that the method name ends in _media.
if methodDesc.get('supportsMediaDownload', False):
fixedMethodName, method = createMethod(
methodName + '_media', methodDesc, rootDesc, schema)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_nested_resources(self, resourceDesc, rootDesc, schema):
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(methodName, methodDesc):
"""Create a method on the Resource to access a nested Resource.
Args:
methodName: string, name of the method to use.
methodDesc: object, fragment of deserialized discovery document that
describes the method.
"""
methodName = fix_method_name(methodName)
def methodResource(self):
return Resource(http=self._http, baseUrl=self._baseUrl,
model=self._model, developerKey=self._developerKey,
requestBuilder=self._requestBuilder,
resourceDesc=methodDesc, rootDesc=rootDesc,
schema=schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
return (methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
fixedMethodName, method = createResourceMethod(methodName, methodDesc)
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
def _add_next_methods(self, resourceDesc, schema):
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
fixedMethodName, method = createNextMethod(methodName + '_next')
self._set_dynamic_attr(fixedMethodName,
method.__get__(self, self.__class__))
| 37.2875 | 84 | 0.685104 |
f49cb071187accc7cb3c10d3d67528cbc935c71f | 1,175 | py | Python | tfmpl/meta.py | cuijianaaa/tf-matplotlib | a197b77f71c32c56e54368d716d9603fd3903f1a | [
"MIT"
] | null | null | null | tfmpl/meta.py | cuijianaaa/tf-matplotlib | a197b77f71c32c56e54368d716d9603fd3903f1a | [
"MIT"
] | null | null | null | tfmpl/meta.py | cuijianaaa/tf-matplotlib | a197b77f71c32c56e54368d716d9603fd3903f1a | [
"MIT"
] | null | null | null | # Copyright 2018 Christoph Heindl.
#
# Licensed under MIT License
# ============================================================
from functools import wraps
import tensorflow as tf
from collections import Sequence
def vararg_decorator(f):
'''Decorator to handle variable argument decorators.'''
@wraps(f)
def decorator(*args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
return f(args[0])
else:
return lambda realf: f(realf, *args, **kwargs)
return decorator
class PositionalTensorArgs:
'''Handle tensor arguments.'''
def __init__(self, args):
self.args = args
self.tf_args = [(i,a) for i,a in enumerate(args) if tf.is_tensor(a)]
@property
def tensor_args(self):
return [a for i,a in self.tf_args]
def mix_args(self, tensor_values):
args = list(self.args)
for i, (j, _) in enumerate(self.tf_args):
args[j] = tensor_values[i]
return args
def as_list(x):
'''Ensure `x` is of list type.'''
if x is None:
x = []
elif not isinstance(x, Sequence):
x = [x]
return list(x)
| 25 | 76 | 0.572766 |
001640dd32ada8012089f585d9586b350608f373 | 2,846 | py | Python | salt/beacons/network_info.py | jbq/pkg-salt | b6742e03cbbfb82f4ce7db2e21a3ff31b270cdb3 | [
"Apache-2.0"
] | 1 | 2020-09-16T21:31:02.000Z | 2020-09-16T21:31:02.000Z | salt/beacons/network_info.py | jbq/pkg-salt | b6742e03cbbfb82f4ce7db2e21a3ff31b270cdb3 | [
"Apache-2.0"
] | null | null | null | salt/beacons/network_info.py | jbq/pkg-salt | b6742e03cbbfb82f4ce7db2e21a3ff31b270cdb3 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Beacon to monitor statistics from ethernet adapters
.. versionadded:: 2015.5.0
'''
# Import Python libs
from __future__ import absolute_import
import logging
import psutil
log = logging.getLogger(__name__)
__virtualname__ = 'network_info'
__attrs = ['bytes_sent', 'bytes_recv', 'packets_sent',
'packets_recv', 'errin', 'errout',
'dropin', 'dropout']
def _to_list(obj):
'''
Convert snetinfo object to list
'''
ret = {}
for attr in __attrs:
# Better way to do this?
ret[attr] = obj.__dict__[attr]
return ret
def __virtual__():
return __virtualname__
def beacon(config):
'''
Emit the network statistics of this host.
Specify thresholds for for each network stat
and only emit a beacon if any of them are
exceeded.
code_block:: yaml
Emit beacon when any values are equal to
configured values.
beacons:
network_info:
eth0:
- type: equal
- bytes_sent: 100000
- bytes_recv: 100000
- packets_sent: 100000
- packets_recv: 100000
- errin: 100
- errout: 100
- dropin: 100
- dropout: 100
Emit beacon when any values are greater
than to configured values.
beacons:
network_info:
eth0:
- type: greater
- bytes_sent: 100000
- bytes_recv: 100000
- packets_sent: 100000
- packets_recv: 100000
- errin: 100
- errout: 100
- dropin: 100
- dropout: 100
'''
ret = []
_stats = psutil.net_io_counters(pernic=True)
for interface in config:
if interface in _stats:
_if_stats = _stats[interface]
_diff = False
for attr in __attrs:
if attr in config[interface]:
if 'type' in config[interface] and config[interface]['type'] == 'equal':
if _if_stats.__dict__[attr] == int(config[interface][attr]):
_diff = True
elif 'type' in config[interface] and config[interface]['type'] == 'greater':
if _if_stats.__dict__[attr] > int(config[interface][attr]):
_diff = True
else:
if _if_stats.__dict__[attr] == int(config[interface][attr]):
_diff = True
if _diff:
ret.append({'interface': interface,
'network_info': _to_list(_if_stats)})
return ret
| 27.104762 | 96 | 0.503865 |
21f2760999a4daa57234a0e1c767ea25c76322ba | 405 | py | Python | web/models/classifier/model.py | xinliy/pyWeb | 929dd3a03540a57eac6a98f05f43b9f5f925ee98 | [
"BSD-3-Clause"
] | 1 | 2021-01-10T09:14:30.000Z | 2021-01-10T09:14:30.000Z | web/models/classifier/model.py | xinliy/pyWeb | 929dd3a03540a57eac6a98f05f43b9f5f925ee98 | [
"BSD-3-Clause"
] | 12 | 2019-12-05T01:09:41.000Z | 2022-03-12T00:07:50.000Z | web/models/classifier/model.py | xinliy/pyWeb | 929dd3a03540a57eac6a98f05f43b9f5f925ee98 | [
"BSD-3-Clause"
] | 1 | 2020-11-20T13:42:39.000Z | 2020-11-20T13:42:39.000Z | from torchvision import models
import torch.nn as nn
def Multiclass_classifier(n_classes):
model = models.vgg16(pretrained=True)
for param in model.parameters():
param.requires_grad = False
model.classifier[6] = nn.Sequential(
nn.Linear(4096, 256),
nn.ReLU(),
nn.Dropout(0.4),
nn.Linear(256, n_classes),
nn.LogSoftmax(dim=1))
return model
| 25.3125 | 41 | 0.646914 |
c072fe60caf085b734595d725dcfc600473d8c57 | 3,667 | py | Python | build_msvc/msvc-autogen.py | VaderCoinProject/vadercoin | b513c794b014d40e5aad281dd1f54845c46d216c | [
"MIT"
] | null | null | null | build_msvc/msvc-autogen.py | VaderCoinProject/vadercoin | b513c794b014d40e5aad281dd1f54845c46d216c | [
"MIT"
] | null | null | null | build_msvc/msvc-autogen.py | VaderCoinProject/vadercoin | b513c794b014d40e5aad281dd1f54845c46d216c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016-2019 The Vadercoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import os
import re
import argparse
from shutil import copyfile
SOURCE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'src'))
DEFAULT_PLATFORM_TOOLSET = R'v141'
libs = [
'libvadercoin_cli',
'libvadercoin_common',
'libvadercoin_crypto',
'libvadercoin_server',
'libvadercoin_util',
'libvadercoin_wallet_tool',
'libvadercoin_wallet',
'libvadercoin_zmq',
'bench_vadercoin',
'libtest_util',
]
ignore_list = [
]
lib_sources = {}
def parse_makefile(makefile):
with open(makefile, 'r', encoding='utf-8') as file:
current_lib = ''
for line in file.read().splitlines():
if current_lib:
source = line.split()[0]
if source.endswith('.cpp') and not source.startswith('$') and source not in ignore_list:
source_filename = source.replace('/', '\\')
object_filename = source.replace('/', '_')[:-4] + ".obj"
lib_sources[current_lib].append((source_filename, object_filename))
if not line.endswith('\\'):
current_lib = ''
continue
for lib in libs:
_lib = lib.replace('-', '_')
if re.search(_lib + '.*_SOURCES \\= \\\\', line):
current_lib = lib
lib_sources[current_lib] = []
break
def set_common_properties(toolset):
with open(os.path.join(SOURCE_DIR, '../build_msvc/common.init.vcxproj'), 'r', encoding='utf-8') as rfile:
s = rfile.read()
s = re.sub('<PlatformToolset>.*?</PlatformToolset>', '<PlatformToolset>'+toolset+'</PlatformToolset>', s)
with open(os.path.join(SOURCE_DIR, '../build_msvc/common.init.vcxproj'), 'w', encoding='utf-8',newline='\n') as wfile:
wfile.write(s)
def main():
parser = argparse.ArgumentParser(description='Vadercoin-core msbuild configuration initialiser.')
parser.add_argument('-toolset', nargs='?',help='Optionally sets the msbuild platform toolset, e.g. v142 for Visual Studio 2019.'
' default is %s.'%DEFAULT_PLATFORM_TOOLSET)
args = parser.parse_args()
if args.toolset:
set_common_properties(args.toolset)
for makefile_name in os.listdir(SOURCE_DIR):
if 'Makefile' in makefile_name:
parse_makefile(os.path.join(SOURCE_DIR, makefile_name))
for key, value in lib_sources.items():
vcxproj_filename = os.path.abspath(os.path.join(os.path.dirname(__file__), key, key + '.vcxproj'))
content = ''
for source_filename, object_filename in value:
content += ' <ClCompile Include="..\\..\\src\\' + source_filename + '">\n'
content += ' <ObjectFileName>$(IntDir)' + object_filename + '</ObjectFileName>\n'
content += ' </ClCompile>\n'
with open(vcxproj_filename + '.in', 'r', encoding='utf-8') as vcxproj_in_file:
with open(vcxproj_filename, 'w', encoding='utf-8') as vcxproj_file:
vcxproj_file.write(vcxproj_in_file.read().replace(
'@SOURCE_FILES@\n', content))
copyfile(os.path.join(SOURCE_DIR,'../build_msvc/vadercoin_config.h'), os.path.join(SOURCE_DIR, 'config/vadercoin-config.h'))
copyfile(os.path.join(SOURCE_DIR,'../build_msvc/libsecp256k1_config.h'), os.path.join(SOURCE_DIR, 'secp256k1/src/libsecp256k1-config.h'))
if __name__ == '__main__':
main()
| 42.149425 | 141 | 0.626125 |
516b5c226c3721d88d70ad9f7f287e2ee1ac6c81 | 5,215 | py | Python | pili/api.py | nervending/pili-sdk-python3 | 9f42cd4ea2214768b2daaf2fa89dbb82261ef768 | [
"MIT"
] | null | null | null | pili/api.py | nervending/pili-sdk-python3 | 9f42cd4ea2214768b2daaf2fa89dbb82261ef768 | [
"MIT"
] | null | null | null | pili/api.py | nervending/pili-sdk-python3 | 9f42cd4ea2214768b2daaf2fa89dbb82261ef768 | [
"MIT"
] | null | null | null | from .auth import auth_interface
import pili.conf as conf
from urllib.request import Request
import json
import base64
def normalize(args, keyword):
if set(args) - set(keyword):
raise ValueError('invalid key')
for k, v in args.items():
if v is None:
del args[k]
return args
@auth_interface
def delete_room(version, roomName):
url = "http://%s/%s/rooms/%s" % (conf.RTC_API_HOST, version, roomName)
req = Request(url=url)
req.get_method = lambda: 'DELETE'
return req
@auth_interface
def get_room(version, roomName):
url = "http://%s/%s/rooms/%s" % (conf.RTC_API_HOST, version, roomName)
return Request(url=url)
@auth_interface
def get_user(version, roomName):
url = "http://%s/%s/rooms/%s/users" % (conf.RTC_API_HOST, version, roomName)
return Request(url=url)
@auth_interface
def kick_user(version, roomName, userId):
url = "http://%s/%s/rooms/%s/users/%s" % (conf.RTC_API_HOST, version, roomName, userId)
req = Request(url=url)
req.get_method = lambda: 'DELETE'
return req
@auth_interface
def create_room(ownerId, version, roomName=None):
params = {'owner_id': ownerId}
url = "http://%s/%s/rooms" % (conf.RTC_API_HOST, version)
if bool(roomName):
params['room_name'] = roomName
encoded = json.dumps(params)
req = Request(url=url, data=encoded)
req.get_method = lambda: 'POST'
return req
@auth_interface
def create_stream(hub, **kwargs):
keyword = ['key']
encoded = json.dumps(normalize(kwargs, keyword))
url = "http://%s/%s/hubs/%s/streams" % (conf.API_HOST, conf.API_VERSION, hub)
return Request(url=url, data=encoded)
@auth_interface
def get_stream(hub, key):
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s" % (conf.API_HOST, conf.API_VERSION, hub, key)
return Request(url=url)
@auth_interface
def get_stream_list(hub, **kwargs):
keyword = ['liveonly', 'prefix', 'limit', 'marker']
args = normalize(kwargs, keyword)
url = "http://%s/%s/hubs/%s/streams?" % (conf.API_HOST, conf.API_VERSION, hub)
for k, v in args.items():
url += "&%s=%s" % (k, v)
return Request(url=url)
@auth_interface
def batch_live_status(hub, streams):
encoded = json.dumps({"items": streams})
url = "http://%s/%s/hubs/%s/livestreams?" % (conf.API_HOST, conf.API_VERSION, hub)
return Request(url=url, data=encoded)
@auth_interface
def disable_stream(hub, key, till):
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/disabled" % (conf.API_HOST, conf.API_VERSION, hub, key)
encoded = json.dumps({"disabledTill": till})
return Request(url=url, data=encoded)
@auth_interface
def get_status(hub, key):
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/live" % (conf.API_HOST, conf.API_VERSION, hub, key)
return Request(url=url)
@auth_interface
def stream_saveas(hub, key, **kwargs):
keyword = ['start', 'end', 'fname', 'format', 'pipeline', 'notify', 'expireDays']
encoded = json.dumps(normalize(kwargs, keyword))
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/saveas" % (conf.API_HOST, conf.API_VERSION, hub, key)
return Request(url=url, data=encoded)
@auth_interface
def stream_snapshot(hub, key, **kwargs):
keyword = ['time', 'fname', 'format']
encoded = json.dumps(normalize(kwargs, keyword))
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/snapshot" % (conf.API_HOST, conf.API_VERSION, hub, key)
return Request(url=url, data=encoded)
@auth_interface
def get_history(hub, key, **kwargs):
keyword = ['start', 'end']
args = normalize(kwargs, keyword)
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/historyactivity?" % (conf.API_HOST, conf.API_VERSION, hub, key)
for k, v in args.items():
url += "&%s=%s" % (k, v)
return Request(url=url)
@auth_interface
def update_stream_converts(hub, key, profiles):
key = base64.urlsafe_b64encode(key.encode()).decode(encoding='utf-8')
url = "http://%s/%s/hubs/%s/streams/%s/converts" % (conf.API_HOST, conf.API_VERSION, hub, key)
encoded = json.dumps({"converts": profiles})
return Request(url=url, data=encoded)
@auth_interface
def bandwidth_count_now(hub):
url = "http://%s/%s/hubs/%s/stat/play" % (conf.API_HOST, conf.API_VERSION, hub)
return Request(url=url)
@auth_interface
def bandwidth_count_history(hub, **kwargs):
keyword = ['start', 'end', 'limit', 'marker']
args = normalize(kwargs, keyword)
url = "http://%s/%s/hubs/%s/stat/play/history" % (conf.API_HOST, conf.API_VERSION, hub)
for k, v in args.items():
url += "&%s=%s" % (k, v)
return Request(url=url)
@auth_interface
def bandwidth_count_detail(hub, time):
url = "http://%s/%s/hubs/%s/stat/play/history/detail?time=%s" % (conf.API_HOST, conf.API_VERSION, hub, time)
return Request(url=url)
| 32.391304 | 112 | 0.663663 |
e6741f78076b78bb4f494f5f769c464235796e84 | 3,771 | py | Python | gbk2utf8.py | riiy/learn_python | f087ff3d504bf7d73d1d45f56eafd6de5ec9b661 | [
"Apache-2.0"
] | 1 | 2017-05-02T10:34:01.000Z | 2017-05-02T10:34:01.000Z | gbk2utf8.py | congminghaoxue/learn_python | f087ff3d504bf7d73d1d45f56eafd6de5ec9b661 | [
"Apache-2.0"
] | null | null | null | gbk2utf8.py | congminghaoxue/learn_python | f087ff3d504bf7d73d1d45f56eafd6de5ec9b661 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2017-11-18 18:18:56
# @Author : Zhou Bo (congminghaoxue@gmail.com)
# @Link : https://congminghaoxue.github.io/
# @Version : $Id$
try:
from chardet.universaldetector import UniversalDetector
IsAuto = True
except ImportError:
IsAuto = False
import os
import os.path
import glob
def Convert_Auto( filename,out_enc="utf-8" ):
''' Re-encode text file with auto detec current encode. Need chardet Lib.
Input Parameter:
filename: full path and file name, e.g. c:/dir1/file.txt
out_enc: new encode. Default as 'utf-8'
Output Parameter
None'''
try:
with open(filename,'rb') as f:
b= b' '
b+=f.read(1024)
u=UniversalDetector()
u.reset()
u.feed(b)
u.close()
f.seek(0)
b=f.read()
in_enc=u.result['encoding']
new_content=b.decode(in_enc, 'ignore')
with open(filename, 'w', encoding=out_enc) as f:
f.write(new_content)
print ("Success: "+filename+" converted from "+ in_enc+" to "+out_enc +" !")
except IOError:
print ("Error: "+filename+" FAIL to converted from "+ in_enc+" to "+out_enc+" !" )
def Convert_Manu( filename,in_enc='gbk', out_enc="utf-8" ):
''' Re-encode text file with manual decide input text encode.
Input Parameter:
filename: full path and file name, e.g. c:/dir1/file.txt
in_enc: current encode. Default as 'gbk'
out_enc: new encode. Default as 'utf-8'
Output Parameter
None
'''
try:
print ("convert " + filename)
with open(filename,'rb') as f:
b=f.read()
new_content=b.decode(in_enc, 'ignore')
with open(filename, 'w', encoding=out_enc) as f:
f.write(new_content)
print ("Success: "+filename+" converted from "+ in_enc+" to "+out_enc +" !")
except IOError:
print ("Error: "+filename+" FAIL to converted from "+ in_enc+" to "+out_enc+" !" )
def explore(dir, suffix, IsLoopSubDIR=True):
'''Convert files encoding.
Input:
dir : Current folder
IsLoopSubDIR: True -- Include files in sub folder
False-- Only include files in current folder
Output:
NONE
'''
if IsLoopSubDIR:
flist=getSubFileList(dir, suffix)
else:
flist=getCurrFileList(dir, suffix)
for fname in flist:
if IsAuto:
Convert_Auto(fname, 'utf-8')
else:
Convert_Manu(fname, 'gbk', 'utf-8')
def getSubFileList(dir, suffix=''):
''' Get all file list with specified suffix under current folder(Include sub folder)
Input:
dir : Current folder
suffix : default to blank, means select all files.
Output:
File list
'''
flist=[]
for root, dirs, files in os.walk(dir):
for name in files:
if name.endswith(suffix):
flist.append(os.path.join(root, name))
return flist
def getCurrFileList(dir, suffix=''):
''' Get all file list with specified suffix under current level folder
Input:
dir : Current folder
suffix : default to blank, means select all files.
Output:
File list
'''
if suffix=='':
files=glob.glob('*')
else:
files=glob.glob('*'+suffix)
flist=[]
for f in files:
flist.append(os.path.join(dir, f))
return flist
def main():
file_path='/Users/zhoubo/Music/'
suffix='cue'
explore(file_path, True, suffix)
if __name__ == "__main__":
main()
| 31.165289 | 90 | 0.563246 |
2652ff8d86cfa18e3abdfcfe835c8533d837b1ef | 5,570 | py | Python | litex_boards/targets/decklink_quad_hdmi_recorder.py | AEW2015/litex-boards | e98ddd58d3dbe2229d04281d43cb0d13a06527ea | [
"BSD-2-Clause"
] | 177 | 2019-06-13T09:54:49.000Z | 2022-03-29T02:25:13.000Z | litex_boards/targets/decklink_quad_hdmi_recorder.py | zeldin/litex-boards | d52859d9ef5d8d210118c01ce89e29404ac8d7c6 | [
"BSD-2-Clause"
] | 347 | 2019-06-12T17:47:45.000Z | 2022-03-30T21:59:01.000Z | litex_boards/targets/decklink_quad_hdmi_recorder.py | zeldin/litex-boards | d52859d9ef5d8d210118c01ce89e29404ac8d7c6 | [
"BSD-2-Clause"
] | 202 | 2019-06-11T15:01:26.000Z | 2022-03-31T16:25:19.000Z | #!/usr/bin/env python3
#
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 Florent Kermarrec <florent@enjoy-digital.fr>
# SPDX-License-Identifier: BSD-2-Clause
# Build/Load bitstream:
# ./decklink_quad_hdmi_recorder.py --csr-csv=csr.csv --build --load
#
# Use:
# litex_server --jtag --jtag-config=openocd_xc7_ft232.cfg
# litex_term bridge
import os
import argparse
from migen import *
from litex_boards.platforms import quad_hdmi_recorder
from litex.soc.cores.clock import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litedram.common import PHYPadsReducer
from litedram.modules import MT41J256M16
from litedram.phy import usddrphy
from litepcie.phy.uspciephy import USPCIEPHY
from litepcie.software import generate_litepcie_software
# CRG ----------------------------------------------------------------------------------------------
class _CRG(Module):
def __init__(self, platform, sys_clk_freq):
self.clock_domains.cd_sys = ClockDomain()
self.clock_domains.cd_sys4x = ClockDomain(reset_less=True)
self.clock_domains.cd_pll4x = ClockDomain(reset_less=True)
self.clock_domains.cd_idelay = ClockDomain()
# # #
self.submodules.pll = pll = USMMCM(speedgrade=-2)
pll.register_clkin(platform.request("clk200"), 200e6)
pll.create_clkout(self.cd_pll4x, sys_clk_freq*4, buf=None, with_reset=False)
pll.create_clkout(self.cd_idelay, 200e6)
platform.add_false_path_constraints(self.cd_sys.clk, pll.clkin) # Ignore sys_clk to pll.clkin path created by SoC's rst.
self.specials += [
Instance("BUFGCE_DIV", name="main_bufgce_div",
p_BUFGCE_DIVIDE=4,
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys.clk),
Instance("BUFGCE", name="main_bufgce",
i_CE=1, i_I=self.cd_pll4x.clk, o_O=self.cd_sys4x.clk),
]
self.submodules.idelayctrl = USIDELAYCTRL(cd_ref=self.cd_idelay, cd_sys=self.cd_sys)
# BaseSoC ------------------------------------------------------------------------------------------
class BaseSoC(SoCCore):
def __init__(self, sys_clk_freq=int(200e6), with_pcie=False, **kwargs):
platform = quad_hdmi_recorder.Platform()
# SoCCore ----------------------------------------------------------------------------------
kwargs["uart_name"] = "crossover"
SoCCore.__init__(self, platform, sys_clk_freq,
ident = "LiteX SoC on Blackmagic Decklink Quad HDMI Recorder",
ident_version = True,
**kwargs)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = _CRG(platform, sys_clk_freq)
# JTAGBone --------------------------------------------------------------------------------
self.add_jtagbone()
# DDR3 SDRAM -------------------------------------------------------------------------------
if not self.integrated_main_ram_size:
self.submodules.ddrphy = usddrphy.USDDRPHY(
pads = PHYPadsReducer(platform.request("ddram"), [0, 1, 2, 3]),
memtype = "DDR3",
sys_clk_freq = sys_clk_freq,
iodelay_clk_freq = 200e6)
self.add_sdram("sdram",
phy = self.ddrphy,
module = MT41J256M16(sys_clk_freq, "1:4"),
l2_cache_size = kwargs.get("l2_size", 8192)
)
# PCIe -------------------------------------------------------------------------------------
if with_pcie:
self.submodules.pcie_phy = USPCIEPHY(platform, platform.request("pcie_x4"),
speed = "gen3",
data_width = 128,
bar0_size = 0x20000)
self.add_pcie(phy=self.pcie_phy, ndmas=1)
# False Paths (FIXME: Improve integration).
platform.toolchain.pre_placement_commands.append("set_false_path -from [get_clocks sys_clk] -to [get_clocks pcie_clk_1]")
platform.toolchain.pre_placement_commands.append("set_false_path -from [get_clocks pcie_clk_1] -to [get_clocks sys_clk]")
# Build --------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="LiteX SoC on Blackmagic Decklink Quad HDMI Recorder")
parser.add_argument("--build", action="store_true", help="Build bitstream")
parser.add_argument("--load", action="store_true", help="Load bitstream")
parser.add_argument("--sys-clk-freq", default=200e6, help="System clock frequency (default: 200MHz)")
parser.add_argument("--with-pcie", action="store_true", help="Enable PCIe support")
parser.add_argument("--driver", action="store_true", help="Generate PCIe driver")
builder_args(parser)
soc_core_args(parser)
args = parser.parse_args()
soc = BaseSoC(
sys_clk_freq = int(float(args.sys_clk_freq)),
with_pcie = args.with_pcie,
**soc_core_argdict(args)
)
builder = Builder(soc, **builder_argdict(args))
builder.build(run=args.build)
if args.driver:
generate_litepcie_software(soc, os.path.join(builder.output_dir, "driver"))
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, soc.build_name + ".bit"))
if __name__ == "__main__":
main()
| 42.19697 | 133 | 0.570557 |
d288f9566c7fe7ed334f9d8b5d33348c40b44d9b | 1,282 | py | Python | app/router_ssj_typeforms.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | app/router_ssj_typeforms.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | app/router_ssj_typeforms.py | WildflowerSchools/wf-airtable-api | 963021e5108462d33efa222fedb00890e1788ad6 | [
"MIT"
] | null | null | null | from fastapi import APIRouter, Depends, Request
from . import auth
from .models import ssj_typeform_start_a_school as ssj_typeform_start_a_school_models
from .utils.utils import get_airtable_client
OPENAPI_TAG_METADATA = {
"name": "SSJ Typeforms",
"description": "SSJ Typeform data in Airtable (mostly for storing responses)",
}
router = APIRouter(
prefix="/ssj_typeforms",
tags=[ssj_typeform_start_a_school_models.MODEL_TYPE],
dependencies=[Depends(auth.JWTBearer(any_scope=["read:all", "read:educators", "read:schools"]))],
responses={404: {"description": "Not found"}},
)
@router.post("/start_a_school_response")
async def create_start_a_school_response(
payload: ssj_typeform_start_a_school_models.CreateApiSSJTypeformStartASchoolFields, request: Request
):
airtable_client = get_airtable_client(request)
airtable_payload = payload.to_airtable()
airtable_response = airtable_client.create_start_a_school_response(payload=airtable_payload)
data = ssj_typeform_start_a_school_models.ApiSSJTypeformStartASchoolData.from_airtable(
airtable_start_a_school=airtable_response, url_path_for=request.app.url_path_for
)
return ssj_typeform_start_a_school_models.ApiSSJTypeformStartASchoolResponse(data=data, links=None)
| 37.705882 | 104 | 0.798752 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.